Merge remote-tracking branch 'upstream/master' into autoshield-poc-daa

This commit is contained in:
Kris Nuttycombe 2021-08-12 11:21:30 -06:00
commit b783c4d0e7
88 changed files with 13217 additions and 2265 deletions

35
.github/workflows/book.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: librustzcash documentation
on:
push:
branches:
- master
jobs:
deploy:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
- name: Build latest rustdocs
uses: actions-rs/cargo@v1
with:
command: doc
args: --no-deps --workspace --all-features
env:
RUSTDOCFLAGS: -Z unstable-options --enable-index-page --cfg docsrs
- name: Move latest rustdocs into book
run: |
mkdir -p ./book/book/rustdoc
mv ./target/doc ./book/book/rustdoc/latest
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book/book

View File

@ -49,7 +49,6 @@ jobs:
strategy:
matrix:
target:
- wasm32-unknown-unknown
- wasm32-wasi
steps:

View File

@ -1,6 +1,7 @@
[workspace]
members = [
"components/equihash",
"components/zcash_address",
"components/zcash_note_encryption",
"zcash_client_backend",
"zcash_client_sqlite",
@ -14,3 +15,17 @@ members = [
lto = true
panic = 'abort'
codegen-units = 1
[patch.crates-io]
# To resolve the funty mess. Removing this probably blocks on a nom 7 release.
# - https://github.com/Geal/nom/issues/1302
# - https://github.com/Geal/nom/pull/1304
nom = { git = "https://github.com/myrrlyn/nom.git", rev = "d6b81f5303b0a347726e1f3f428751f376e7b771" }
# In development.
halo2 = { git = "https://github.com/zcash/halo2.git", rev = "27c4187673a9c6ade13fbdbd4f20955530c22d7f" }
orchard = { git = "https://github.com/zcash/orchard.git", rev = "8454f86d423edbf0b53a1d5d32df1c691f8b7188" }
zcash_note_encryption = { path = "components/zcash_note_encryption" }
# Unreleased
jubjub = { git = "https://github.com/zkcrypto/jubjub.git", rev = "96ab4162b83303378eae32a326b54d88b75bffc2" }

View File

@ -11,3 +11,6 @@ edition = "2018"
[dependencies]
blake2b_simd = "0.5"
byteorder = "1"
[lib]
bench = false

View File

@ -0,0 +1,23 @@
[package]
name = "zcash_address"
description = "Zcash address parsing and serialization"
version = "0.0.0"
authors = [
"Jack Grigg <jack@electriccoin.co>",
]
homepage = "https://github.com/zcash/librustzcash"
repository = "https://github.com/zcash/librustzcash"
readme = "README.md"
license = "MIT OR Apache-2.0"
edition = "2018"
[dependencies]
bech32 = "0.8"
blake2b_simd = "0.5"
bs58 = { version = "0.4", features = ["check"] }
[dev-dependencies]
proptest = "0.10.1"
[lib]
bench = false

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2021 Electric Coin Company
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,21 @@
# zcash_address
Zcash address parsing and serialization. This library allows its users to easily
recognize and give good error messages for new Zcash address types.
## License
Licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally
submitted for inclusion in the work by you, as defined in the Apache-2.0
license, shall be dual licensed as above, without any additional terms or
conditions.

View File

@ -0,0 +1,7 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc e08469bc301313ef868b97a5c37d9a9746d9720c915a9127c89db25c3be778fd # shrinks to ua = Address([Sapling([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), P2pkh([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])])

View File

@ -0,0 +1,175 @@
use std::{error::Error, fmt};
use crate::{kind::*, AddressKind, Network, ZcashAddress};
/// An address type is not supported for conversion.
#[derive(Debug)]
pub struct UnsupportedAddress(&'static str);
impl fmt::Display for UnsupportedAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Zcash {} addresses are not supported", self.0)
}
}
impl Error for UnsupportedAddress {}
/// A helper trait for converting a [`ZcashAddress`] into another type.
///
/// [`ZcashAddress`]: crate::ZcashAddress
///
/// # Examples
///
/// ```
/// use zcash_address::{FromAddress, Network, UnsupportedAddress, ZcashAddress};
///
/// #[derive(Debug)]
/// struct MySapling([u8; 43]);
///
/// // Implement the FromAddress trait, overriding whichever conversion methods match your
/// // requirements for the resulting type.
/// impl FromAddress for MySapling {
/// fn from_sapling(net: Network, data: [u8; 43]) -> Result<Self, UnsupportedAddress> {
/// Ok(MySapling(data))
/// }
/// }
///
/// // For a supported address type, the conversion works.
/// let addr: ZcashAddress =
/// "zs1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqpq6d8g"
/// .parse()
/// .unwrap();
/// assert!(addr.convert::<MySapling>().is_ok());
///
/// // For an unsupported address type, we get an error.
/// let addr: ZcashAddress = "t1Hsc1LR8yKnbbe3twRp88p6vFfC5t7DLbs".parse().unwrap();
/// assert_eq!(
/// addr.convert::<MySapling>().unwrap_err().to_string(),
/// "Zcash transparent P2PKH addresses are not supported",
/// );
/// ```
pub trait FromAddress: Sized {
fn from_sprout(net: Network, data: sprout::Data) -> Result<Self, UnsupportedAddress> {
let _ = (net, data);
Err(UnsupportedAddress("Sprout"))
}
fn from_sapling(net: Network, data: sapling::Data) -> Result<Self, UnsupportedAddress> {
let _ = (net, data);
Err(UnsupportedAddress("Sapling"))
}
fn from_unified(net: Network, data: unified::Address) -> Result<Self, UnsupportedAddress> {
let _ = (net, data);
Err(UnsupportedAddress("Unified"))
}
fn from_transparent_p2pkh(net: Network, data: p2pkh::Data) -> Result<Self, UnsupportedAddress> {
let _ = (net, data);
Err(UnsupportedAddress("transparent P2PKH"))
}
fn from_transparent_p2sh(net: Network, data: p2sh::Data) -> Result<Self, UnsupportedAddress> {
let _ = (net, data);
Err(UnsupportedAddress("transparent P2SH"))
}
}
/// A helper trait for converting another type into a [`ZcashAddress`].
///
/// This trait is sealed and cannot be implemented for types outside this crate. Its
/// purpose is to move these conversion functions out of the main `ZcashAddress` API
/// documentation, as they are only required when creating addresses (rather than when
/// parsing addresses, which is a more common occurrence).
///
/// [`ZcashAddress`]: crate::ZcashAddress
///
/// # Examples
///
/// ```
/// use zcash_address::{ToAddress, Network, ZcashAddress};
///
/// #[derive(Debug)]
/// struct MySapling([u8; 43]);
///
/// impl MySapling {
/// /// Encodes this Sapling address for the given network.
/// fn encode(&self, net: Network) -> ZcashAddress {
/// ZcashAddress::from_sapling(net, self.0)
/// }
/// }
///
/// let addr = MySapling([0; 43]);
/// let encoded = addr.encode(Network::Main);
/// assert_eq!(
/// encoded.to_string(),
/// "zs1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqpq6d8g",
/// );
/// ```
pub trait ToAddress: private::Sealed {
fn from_sprout(net: Network, data: sprout::Data) -> Self;
fn from_sapling(net: Network, data: sapling::Data) -> Self;
fn from_unified(net: Network, data: unified::Address) -> Self;
fn from_transparent_p2pkh(net: Network, data: p2pkh::Data) -> Self;
fn from_transparent_p2sh(net: Network, data: p2sh::Data) -> Self;
}
impl ToAddress for ZcashAddress {
fn from_sprout(net: Network, data: sprout::Data) -> Self {
ZcashAddress {
net: if let Network::Regtest = net {
Network::Test
} else {
net
},
kind: AddressKind::Sprout(data),
}
}
fn from_sapling(net: Network, data: sapling::Data) -> Self {
ZcashAddress {
net,
kind: AddressKind::Sapling(data),
}
}
fn from_unified(net: Network, data: unified::Address) -> Self {
ZcashAddress {
net,
kind: AddressKind::Unified(data),
}
}
fn from_transparent_p2pkh(net: Network, data: p2pkh::Data) -> Self {
ZcashAddress {
net: if let Network::Regtest = net {
Network::Test
} else {
net
},
kind: AddressKind::P2pkh(data),
}
}
fn from_transparent_p2sh(net: Network, data: p2sh::Data) -> Self {
ZcashAddress {
net: if let Network::Regtest = net {
Network::Test
} else {
net
},
kind: AddressKind::P2sh(data),
}
}
}
mod private {
use crate::ZcashAddress;
pub trait Sealed {}
impl Sealed for ZcashAddress {}
}

View File

@ -0,0 +1,305 @@
use std::{convert::TryInto, error::Error, fmt, str::FromStr};
use bech32::{self, FromBase32, ToBase32, Variant};
use crate::{kind::*, AddressKind, Network, ZcashAddress};
/// An error while attempting to parse a string as a Zcash address.
#[derive(Debug, PartialEq)]
pub enum ParseError {
/// The string is an invalid encoding.
InvalidEncoding,
/// The string is not a Zcash address.
NotZcash,
/// Errors specific to unified addresses.
Unified(unified::ParseError),
}
impl From<unified::ParseError> for ParseError {
fn from(e: unified::ParseError) -> Self {
match e {
unified::ParseError::InvalidEncoding => Self::InvalidEncoding,
_ => Self::Unified(e),
}
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseError::InvalidEncoding => write!(f, "Invalid encoding"),
ParseError::NotZcash => write!(f, "Not a Zcash address"),
ParseError::Unified(e) => e.fmt(f),
}
}
}
impl Error for ParseError {}
impl FromStr for ZcashAddress {
type Err = ParseError;
/// Attempts to parse the given string as a Zcash address.
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Remove leading and trailing whitespace, to handle copy-paste errors.
let s = s.trim();
// Most Zcash addresses use Bech32 or Bech32m, so try those first.
match bech32::decode(s) {
Ok((hrp, data, Variant::Bech32m)) => {
// If we reached this point, the encoding is supposed to be valid Bech32m.
let data =
Vec::<u8>::from_base32(&data).map_err(|_| ParseError::InvalidEncoding)?;
let net = match hrp.as_str() {
unified::MAINNET => Network::Main,
unified::TESTNET => Network::Test,
unified::REGTEST => Network::Regtest,
// We will not define new Bech32m address encodings.
_ => {
return Err(ParseError::NotZcash);
}
};
return (hrp.as_str(), &data[..])
.try_into()
.map(AddressKind::Unified)
.map_err(|_| ParseError::InvalidEncoding)
.map(|kind| ZcashAddress { net, kind });
}
Ok((hrp, data, Variant::Bech32)) => {
// If we reached this point, the encoding is supposed to be valid Bech32.
let data =
Vec::<u8>::from_base32(&data).map_err(|_| ParseError::InvalidEncoding)?;
let net = match hrp.as_str() {
sapling::MAINNET => Network::Main,
sapling::TESTNET => Network::Test,
sapling::REGTEST => Network::Regtest,
// We will not define new Bech32 address encodings.
_ => {
return Err(ParseError::NotZcash);
}
};
return data[..]
.try_into()
.map(AddressKind::Sapling)
.map_err(|_| ParseError::InvalidEncoding)
.map(|kind| ZcashAddress { net, kind });
}
Err(_) => (),
}
// The rest use Base58Check.
if let Ok(decoded) = bs58::decode(s).with_check(None).into_vec() {
let net = match decoded[..2].try_into().unwrap() {
sprout::MAINNET | p2pkh::MAINNET | p2sh::MAINNET => Network::Main,
sprout::TESTNET | p2pkh::TESTNET | p2sh::TESTNET => Network::Test,
// We will not define new Base58Check address encodings.
_ => return Err(ParseError::NotZcash),
};
return match decoded[..2].try_into().unwrap() {
sprout::MAINNET | sprout::TESTNET => {
decoded[2..].try_into().map(AddressKind::Sprout)
}
p2pkh::MAINNET | p2pkh::TESTNET => decoded[2..].try_into().map(AddressKind::P2pkh),
p2sh::MAINNET | p2sh::TESTNET => decoded[2..].try_into().map(AddressKind::P2sh),
_ => unreachable!(),
}
.map_err(|_| ParseError::InvalidEncoding)
.map(|kind| ZcashAddress { kind, net });
};
// If it's not valid Bech32 or Base58Check, it's not a Zcash address.
Err(ParseError::NotZcash)
}
}
fn encode_bech32m(hrp: &str, data: &[u8]) -> String {
bech32::encode(hrp, data.to_base32(), Variant::Bech32m).expect("hrp is invalid")
}
fn encode_bech32(hrp: &str, data: &[u8]) -> String {
bech32::encode(hrp, data.to_base32(), Variant::Bech32).expect("hrp is invalid")
}
fn encode_b58(prefix: [u8; 2], data: &[u8]) -> String {
let mut bytes = Vec::with_capacity(2 + data.len());
bytes.extend_from_slice(&prefix);
bytes.extend_from_slice(data);
bs58::encode(bytes).with_check().into_string()
}
impl fmt::Display for ZcashAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let encoded = match &self.kind {
AddressKind::Sprout(data) => encode_b58(
match self.net {
Network::Main => sprout::MAINNET,
Network::Test | Network::Regtest => sprout::TESTNET,
},
data,
),
AddressKind::Sapling(data) => encode_bech32(
match self.net {
Network::Main => sapling::MAINNET,
Network::Test => sapling::TESTNET,
Network::Regtest => sapling::REGTEST,
},
data,
),
AddressKind::Unified(data) => {
let hrp = match self.net {
Network::Main => unified::MAINNET,
Network::Test => unified::TESTNET,
Network::Regtest => unified::REGTEST,
};
encode_bech32m(hrp, &data.to_bytes(hrp))
}
AddressKind::P2pkh(data) => encode_b58(
match self.net {
Network::Main => p2pkh::MAINNET,
Network::Test | Network::Regtest => p2pkh::TESTNET,
},
data,
),
AddressKind::P2sh(data) => encode_b58(
match self.net {
Network::Main => p2sh::MAINNET,
Network::Test | Network::Regtest => p2sh::TESTNET,
},
data,
),
};
write!(f, "{}", encoded)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::kind::unified;
fn encoding(encoded: &str, decoded: ZcashAddress) {
assert_eq!(decoded.to_string(), encoded);
assert_eq!(encoded.parse(), Ok(decoded));
}
#[test]
fn sprout() {
encoding(
"zc8E5gYid86n4bo2Usdq1cpr7PpfoJGzttwBHEEgGhGkLUg7SPPVFNB2AkRFXZ7usfphup5426dt1buMmY3fkYeRrQGLa8y",
ZcashAddress { net: Network::Main, kind: AddressKind::Sprout([0; 64]) },
);
encoding(
"ztJ1EWLKcGwF2S4NA17pAJVdco8Sdkz4AQPxt1cLTEfNuyNswJJc2BbBqYrsRZsp31xbVZwhF7c7a2L9jsF3p3ZwRWpqqyS",
ZcashAddress { net: Network::Test, kind: AddressKind::Sprout([0; 64]) },
);
}
#[test]
fn sapling() {
encoding(
"zs1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqpq6d8g",
ZcashAddress {
net: Network::Main,
kind: AddressKind::Sapling([0; 43]),
},
);
encoding(
"ztestsapling1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqfhgwqu",
ZcashAddress {
net: Network::Test,
kind: AddressKind::Sapling([0; 43]),
},
);
encoding(
"zregtestsapling1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqknpr3m",
ZcashAddress {
net: Network::Regtest,
kind: AddressKind::Sapling([0; 43]),
},
);
}
#[test]
fn unified() {
encoding(
"u175h4qsgd8gujkevz283ka89ul6r2kr25xvachlt5w5srewdwcjacdtm3ku06jazzwk2klezj3kfy2jc9p65l5fgvjhekmnd4myk2m7xn",
ZcashAddress {
net: Network::Main,
kind: AddressKind::Unified(unified::Address(vec![unified::Receiver::Sapling([0; 43])])),
},
);
encoding(
"utest193cmy6pcjrw6cg8rqcxgq6z2095a2mc0hqu0g0gvnlf83em0szx23qtv9722s6qkssz80try4tynp73u9gee3zskye0ztzdz0snrxw7n",
ZcashAddress {
net: Network::Test,
kind: AddressKind::Unified(unified::Address(vec![unified::Receiver::Sapling([0; 43])])),
},
);
encoding(
"uregtest1dl4mka5saz8xwnf0pttr637jx6su0nejfhzcz3metcmqyzdgktsmm09ese6ew794xqcyp6476nuspvdvx2xk6gn2euvu7fdmrvwl87zx",
ZcashAddress {
net: Network::Regtest,
kind: AddressKind::Unified(unified::Address(vec![unified::Receiver::Sapling([0; 43])])),
},
);
}
#[test]
fn transparent() {
encoding(
"t1Hsc1LR8yKnbbe3twRp88p6vFfC5t7DLbs",
ZcashAddress {
net: Network::Main,
kind: AddressKind::P2pkh([0; 20]),
},
);
encoding(
"tm9iMLAuYMzJ6jtFLcA7rzUmfreGuKvr7Ma",
ZcashAddress {
net: Network::Test,
kind: AddressKind::P2pkh([0; 20]),
},
);
encoding(
"t3JZcvsuaXE6ygokL4XUiZSTrQBUoPYFnXJ",
ZcashAddress {
net: Network::Main,
kind: AddressKind::P2sh([0; 20]),
},
);
encoding(
"t26YoyZ1iPgiMEWL4zGUm74eVWfhyDMXzY2",
ZcashAddress {
net: Network::Test,
kind: AddressKind::P2sh([0; 20]),
},
);
}
#[test]
fn whitespace() {
assert_eq!(
" t1Hsc1LR8yKnbbe3twRp88p6vFfC5t7DLbs".parse(),
Ok(ZcashAddress {
net: Network::Main,
kind: AddressKind::P2pkh([0; 20])
}),
);
assert_eq!(
"t1Hsc1LR8yKnbbe3twRp88p6vFfC5t7DLbs ".parse(),
Ok(ZcashAddress {
net: Network::Main,
kind: AddressKind::P2pkh([0; 20])
}),
);
assert_eq!(
"something t1Hsc1LR8yKnbbe3twRp88p6vFfC5t7DLbs".parse::<ZcashAddress>(),
Err(ParseError::NotZcash),
);
}
}

View File

@ -0,0 +1,7 @@
pub mod unified;
pub(crate) mod sapling;
pub(crate) mod sprout;
pub(crate) mod p2pkh;
pub(crate) mod p2sh;

View File

@ -0,0 +1,7 @@
/// The prefix for a Base58Check-encoded mainnet transparent P2PKH address.
pub(crate) const MAINNET: [u8; 2] = [0x1c, 0xb8];
/// The prefix for a Base58Check-encoded testnet transparent P2PKH address.
pub(crate) const TESTNET: [u8; 2] = [0x1d, 0x25];
pub(crate) type Data = [u8; 20];

View File

@ -0,0 +1,7 @@
/// The prefix for a Base58Check-encoded mainnet transparent P2SH address.
pub(crate) const MAINNET: [u8; 2] = [0x1c, 0xbd];
/// The prefix for a Base58Check-encoded testnet transparent P2SH address.
pub(crate) const TESTNET: [u8; 2] = [0x1c, 0xba];
pub(crate) type Data = [u8; 20];

View File

@ -0,0 +1,22 @@
/// The HRP for a Bech32-encoded mainnet Sapling address.
///
/// Defined in the [Zcash Protocol Specification section 5.6.4][saplingpaymentaddrencoding].
///
/// [saplingpaymentaddrencoding]: https://zips.z.cash/protocol/protocol.pdf#saplingpaymentaddrencoding
pub(crate) const MAINNET: &str = "zs";
/// The HRP for a Bech32-encoded testnet Sapling address.
///
/// Defined in the [Zcash Protocol Specification section 5.6.4][saplingpaymentaddrencoding].
///
/// [saplingpaymentaddrencoding]: https://zips.z.cash/protocol/protocol.pdf#saplingpaymentaddrencoding
pub(crate) const TESTNET: &str = "ztestsapling";
/// The HRP for a Bech32-encoded regtest Sapling address.
///
/// It is defined in [the `zcashd` codebase].
///
/// [the `zcashd` codebase]: https://github.com/zcash/zcash/blob/128d863fb8be39ee294fda397c1ce3ba3b889cb2/src/chainparams.cpp#L493
pub(crate) const REGTEST: &str = "zregtestsapling";
pub(crate) type Data = [u8; 43];

View File

@ -0,0 +1,15 @@
/// The prefix for a Base58Check-encoded mainnet Sprout address.
///
/// Defined in the [Zcash Protocol Specification section 5.6.3][sproutpaymentaddrencoding].
///
/// [sproutpaymentaddrencoding]: https://zips.z.cash/protocol/protocol.pdf#sproutpaymentaddrencoding
pub(crate) const MAINNET: [u8; 2] = [0x16, 0x9a];
/// The prefix for a Base58Check-encoded testnet Sprout address.
///
/// Defined in the [Zcash Protocol Specification section 5.6.3][].
///
/// []: https://zips.z.cash/protocol/protocol.pdf#sproutpaymentaddrencoding
pub(crate) const TESTNET: [u8; 2] = [0x16, 0xb6];
pub(crate) type Data = [u8; 64];

View File

@ -0,0 +1,507 @@
use std::cmp;
use std::collections::HashSet;
use std::convert::{TryFrom, TryInto};
use std::error::Error;
use std::fmt;
use std::iter;
use crate::kind;
mod f4jumble;
/// The HRP for a Bech32m-encoded mainnet Unified Address.
///
/// Defined in [ZIP 316][zip-0316].
///
/// [zip-0316]: https://zips.z.cash/zip-0316
pub(crate) const MAINNET: &str = "u";
/// The HRP for a Bech32m-encoded testnet Unified Address.
///
/// Defined in [ZIP 316][zip-0316].
///
/// [zip-0316]: https://zips.z.cash/zip-0316
pub(crate) const TESTNET: &str = "utest";
/// The HRP for a Bech32m-encoded regtest Unified Address.
pub(crate) const REGTEST: &str = "uregtest";
const PADDING_LEN: usize = 16;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Typecode {
P2pkh,
P2sh,
Sapling,
Orchard,
Unknown(u8),
}
impl Ord for Typecode {
fn cmp(&self, other: &Self) -> cmp::Ordering {
match (self, other) {
// Trivial equality checks.
(Self::Orchard, Self::Orchard)
| (Self::Sapling, Self::Sapling)
| (Self::P2sh, Self::P2sh)
| (Self::P2pkh, Self::P2pkh) => cmp::Ordering::Equal,
// We don't know for certain the preference order of unknown receivers, but it
// is likely that the higher typecode has higher preference. The exact order
// doesn't really matter, as unknown receivers have lower preference than
// known receivers.
(Self::Unknown(a), Self::Unknown(b)) => b.cmp(a),
// For the remaining cases, we rely on `match` always choosing the first arm
// with a matching pattern. Patterns below are listed in priority order:
(Self::Orchard, _) => cmp::Ordering::Less,
(_, Self::Orchard) => cmp::Ordering::Greater,
(Self::Sapling, _) => cmp::Ordering::Less,
(_, Self::Sapling) => cmp::Ordering::Greater,
(Self::P2sh, _) => cmp::Ordering::Less,
(_, Self::P2sh) => cmp::Ordering::Greater,
(Self::P2pkh, _) => cmp::Ordering::Less,
(_, Self::P2pkh) => cmp::Ordering::Greater,
}
}
}
impl PartialOrd for Typecode {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl From<u8> for Typecode {
fn from(typecode: u8) -> Self {
match typecode {
0x00 => Typecode::P2pkh,
0x01 => Typecode::P2sh,
0x02 => Typecode::Sapling,
0x03 => Typecode::Orchard,
_ => Typecode::Unknown(typecode),
}
}
}
impl From<Typecode> for u8 {
fn from(t: Typecode) -> Self {
match t {
Typecode::P2pkh => 0x00,
Typecode::P2sh => 0x01,
Typecode::Sapling => 0x02,
Typecode::Orchard => 0x03,
Typecode::Unknown(typecode) => typecode,
}
}
}
impl Typecode {
fn is_transparent(&self) -> bool {
// Unknown typecodes are treated as not transparent for the purpose of disallowing
// only-transparent UAs, which can be represented with existing address encodings.
matches!(self, Typecode::P2pkh | Typecode::P2sh)
}
}
/// An error while attempting to parse a string as a Zcash address.
#[derive(Debug, PartialEq)]
pub enum ParseError {
/// The unified address contains both P2PKH and P2SH receivers.
BothP2phkAndP2sh,
/// The unified address contains a duplicated typecode.
DuplicateTypecode(Typecode),
/// The string is an invalid encoding.
InvalidEncoding,
/// The unified address only contains transparent receivers.
OnlyTransparent,
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseError::BothP2phkAndP2sh => write!(f, "UA contains both P2PKH and P2SH receivers"),
ParseError::DuplicateTypecode(typecode) => {
write!(f, "Duplicate typecode {}", u8::from(*typecode))
}
ParseError::InvalidEncoding => write!(f, "Invalid encoding"),
ParseError::OnlyTransparent => write!(f, "UA only contains transparent receivers"),
}
}
}
impl Error for ParseError {}
/// The set of known Receivers for Unified Addresses.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Receiver {
Orchard([u8; 43]),
Sapling(kind::sapling::Data),
P2pkh(kind::p2pkh::Data),
P2sh(kind::p2sh::Data),
Unknown { typecode: u8, data: Vec<u8> },
}
impl cmp::Ord for Receiver {
fn cmp(&self, other: &Self) -> cmp::Ordering {
match self.typecode().cmp(&other.typecode()) {
cmp::Ordering::Equal => self.addr().cmp(other.addr()),
res => res,
}
}
}
impl cmp::PartialOrd for Receiver {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl TryFrom<(u8, &[u8])> for Receiver {
type Error = ParseError;
fn try_from((typecode, addr): (u8, &[u8])) -> Result<Self, Self::Error> {
match typecode.into() {
Typecode::P2pkh => addr.try_into().map(Receiver::P2pkh),
Typecode::P2sh => addr.try_into().map(Receiver::P2sh),
Typecode::Sapling => addr.try_into().map(Receiver::Sapling),
Typecode::Orchard => addr.try_into().map(Receiver::Orchard),
Typecode::Unknown(_) => Ok(Receiver::Unknown {
typecode,
data: addr.to_vec(),
}),
}
.map_err(|_| ParseError::InvalidEncoding)
}
}
impl Receiver {
fn typecode(&self) -> Typecode {
match self {
Receiver::P2pkh(_) => Typecode::P2pkh,
Receiver::P2sh(_) => Typecode::P2sh,
Receiver::Sapling(_) => Typecode::Sapling,
Receiver::Orchard(_) => Typecode::Orchard,
Receiver::Unknown { typecode, .. } => Typecode::Unknown(*typecode),
}
}
fn addr(&self) -> &[u8] {
match self {
Receiver::P2pkh(data) => data,
Receiver::P2sh(data) => data,
Receiver::Sapling(data) => data,
Receiver::Orchard(data) => data,
Receiver::Unknown { data, .. } => data,
}
}
}
/// A Unified Address.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Address(pub(crate) Vec<Receiver>);
impl TryFrom<(&str, &[u8])> for Address {
type Error = ParseError;
fn try_from((hrp, buf): (&str, &[u8])) -> Result<Self, Self::Error> {
let encoded = f4jumble::f4jumble_inv(buf).ok_or(ParseError::InvalidEncoding)?;
// Validate and strip trailing padding bytes.
if hrp.len() > 16 {
return Err(ParseError::InvalidEncoding);
}
let mut expected_padding = [0; PADDING_LEN];
expected_padding[0..hrp.len()].copy_from_slice(hrp.as_bytes());
let encoded = match encoded.split_at(encoded.len() - PADDING_LEN) {
(encoded, tail) if tail == &expected_padding => Ok(encoded),
_ => Err(ParseError::InvalidEncoding),
}?;
iter::repeat(())
.scan(encoded, |encoded, _| match encoded {
// Base case: we've parsed the full encoding.
[] => None,
// The raw encoding of a Unified Address is a concatenation of:
// - typecode: byte
// - length: byte
// - addr: byte[length]
[typecode, length, data @ ..] if data.len() >= *length as usize => {
let (addr, rest) = data.split_at(*length as usize);
*encoded = rest;
Some(Receiver::try_from((*typecode, addr)))
}
// The encoding is truncated.
_ => Some(Err(ParseError::InvalidEncoding)),
})
.collect::<Result<_, _>>()
.and_then(|receivers: Vec<Receiver>| receivers.try_into())
}
}
impl TryFrom<Vec<Receiver>> for Address {
type Error = ParseError;
fn try_from(receivers: Vec<Receiver>) -> Result<Self, Self::Error> {
let mut typecodes = HashSet::with_capacity(receivers.len());
for receiver in &receivers {
let t = receiver.typecode();
if typecodes.contains(&t) {
return Err(ParseError::DuplicateTypecode(t));
} else if (t == Typecode::P2pkh && typecodes.contains(&Typecode::P2sh))
|| (t == Typecode::P2sh && typecodes.contains(&Typecode::P2pkh))
{
return Err(ParseError::BothP2phkAndP2sh);
} else {
typecodes.insert(t);
}
}
if typecodes.iter().all(|t| t.is_transparent()) {
Err(ParseError::OnlyTransparent)
} else {
// All checks pass!
Ok(Address(receivers))
}
}
}
impl Address {
/// Returns the raw encoding of this Unified Address.
pub(crate) fn to_bytes(&self, hrp: &str) -> Vec<u8> {
assert!(hrp.len() <= 16);
let encoded: Vec<_> = self
.0
.iter()
.flat_map(|receiver| {
let addr = receiver.addr();
// Holds by construction.
assert!(addr.len() < 256);
iter::empty()
.chain(Some(receiver.typecode().into()))
.chain(Some(addr.len() as u8))
.chain(addr.iter().cloned())
})
.chain(hrp.as_bytes().iter().cloned())
.chain(iter::repeat(0).take(PADDING_LEN - hrp.len()))
.collect();
f4jumble::f4jumble(&encoded).unwrap()
}
/// Returns the receivers contained within this address, sorted in preference order.
pub fn receivers(&self) -> Vec<Receiver> {
let mut receivers = self.0.clone();
// Unstable sorting is fine, because all receivers are guaranteed by construction
// to have distinct typecodes.
receivers.sort_unstable_by_key(|r| r.typecode());
receivers
}
/// Returns the receivers contained within this address, in the order they were
/// parsed from the string encoding.
///
/// This API is for advanced usage; in most cases you should use `Address::receivers`.
pub fn receivers_as_parsed(&self) -> &[Receiver] {
&self.0
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use proptest::{
array::{uniform11, uniform20, uniform32},
prelude::*,
};
use super::{Address, ParseError, Receiver, Typecode, MAINNET, REGTEST, TESTNET};
prop_compose! {
fn uniform43()(a in uniform11(0u8..), b in uniform32(0u8..)) -> [u8; 43] {
let mut c = [0; 43];
c[..11].copy_from_slice(&a);
c[11..].copy_from_slice(&b);
c
}
}
fn arb_shielded_receiver() -> BoxedStrategy<Receiver> {
prop_oneof![
uniform43().prop_map(Receiver::Sapling),
uniform43().prop_map(Receiver::Orchard),
]
.boxed()
}
fn arb_transparent_receiver() -> BoxedStrategy<Receiver> {
prop_oneof![
uniform20(0u8..).prop_map(Receiver::P2pkh),
uniform20(0u8..).prop_map(Receiver::P2sh),
]
.boxed()
}
prop_compose! {
fn arb_unified_address()(
shielded in prop::collection::hash_set(arb_shielded_receiver(), 1..2),
transparent in prop::option::of(arb_transparent_receiver()),
) -> Address {
Address(shielded.into_iter().chain(transparent).collect())
}
}
proptest! {
#[test]
fn ua_roundtrip(
hrp in prop_oneof![MAINNET, TESTNET, REGTEST],
ua in arb_unified_address(),
) {
let bytes = ua.to_bytes(&hrp);
let decoded = Address::try_from((hrp.as_str(), &bytes[..]));
prop_assert_eq!(decoded, Ok(ua));
}
}
#[test]
fn padding() {
// The test cases below use `Address(vec![Receiver::Orchard([1; 43])])` as base.
// Invalid padding ([0xff; 16] instead of [b'u', 0x00, 0x00, 0x00...])
let invalid_padding = [
0xe6, 0x59, 0xd1, 0xed, 0xf7, 0x4b, 0xe3, 0x5e, 0x5a, 0x54, 0x0e, 0x41, 0x5d, 0x2f,
0x0c, 0x0d, 0x33, 0x42, 0xbd, 0xbe, 0x9f, 0x82, 0x62, 0x01, 0xc1, 0x1b, 0xd4, 0x1e,
0x42, 0x47, 0x86, 0x23, 0x05, 0x4b, 0x98, 0xd7, 0x76, 0x86, 0xa5, 0xe3, 0x1b, 0xd3,
0x03, 0xca, 0x24, 0x44, 0x8e, 0x72, 0xc1, 0x4a, 0xc6, 0xbf, 0x3f, 0x2b, 0xce, 0xa7,
0x7b, 0x28, 0x69, 0xc9, 0x84,
];
assert_eq!(
Address::try_from((MAINNET, &invalid_padding[..])),
Err(ParseError::InvalidEncoding)
);
// Short padding (padded to 15 bytes instead of 16)
let truncated_padding = [
0x9a, 0x56, 0x12, 0xa3, 0x43, 0x45, 0xe0, 0x82, 0x6c, 0xac, 0x24, 0x8b, 0x3b, 0x45,
0x72, 0x9a, 0x53, 0xd5, 0xf8, 0xda, 0xec, 0x07, 0x7c, 0xba, 0x9f, 0xa8, 0xd2, 0x97,
0x5b, 0xda, 0x73, 0x1b, 0xd2, 0xd1, 0x32, 0x6b, 0x7b, 0x36, 0xdd, 0x57, 0x84, 0x2a,
0xa0, 0x21, 0x23, 0x89, 0x73, 0x85, 0xe1, 0x4b, 0x3e, 0x95, 0xb7, 0xd4, 0x67, 0xbc,
0x4b, 0x31, 0xee, 0x5a,
];
assert_eq!(
Address::try_from((MAINNET, &truncated_padding[..])),
Err(ParseError::InvalidEncoding)
);
}
#[test]
fn truncated() {
// The test cases below start from an encoding of
// `Address(vec![Receiver::Orchard([1; 43]), Receiver::Sapling([2; 43])])`
// with the receiver data truncated, but valid padding.
// - Missing the last data byte of the Sapling receiver.
let truncated_sapling_data = [
0xaa, 0xb0, 0x6e, 0x7b, 0x26, 0x7a, 0x22, 0x17, 0x39, 0xfa, 0x07, 0x69, 0xe9, 0x32,
0x2b, 0xac, 0x8c, 0x9e, 0x5e, 0x8a, 0xd9, 0x24, 0x06, 0x5a, 0x13, 0x79, 0x3a, 0x8d,
0xb4, 0x52, 0xfa, 0x18, 0x4e, 0x33, 0x4d, 0x8c, 0x17, 0x77, 0x4d, 0x63, 0x69, 0x34,
0x22, 0x70, 0x3a, 0xea, 0x30, 0x82, 0x5a, 0x6b, 0x37, 0xd1, 0x0d, 0xbe, 0x20, 0xab,
0x82, 0x86, 0x98, 0x34, 0x6a, 0xd8, 0x45, 0x40, 0xd0, 0x25, 0x60, 0xbf, 0x1e, 0xb6,
0xeb, 0x06, 0x85, 0x70, 0x4c, 0x42, 0xbc, 0x19, 0x14, 0xef, 0x7a, 0x05, 0xa0, 0x71,
0xb2, 0x63, 0x80, 0xbb, 0xdc, 0x12, 0x08, 0x48, 0x28, 0x8f, 0x1c, 0x9e, 0xc3, 0x42,
0xc6, 0x5e, 0x68, 0xa2, 0x78, 0x6c, 0x9e,
];
assert_eq!(
Address::try_from((MAINNET, &truncated_sapling_data[..])),
Err(ParseError::InvalidEncoding)
);
// - Truncated after the typecode of the Sapling receiver.
let truncated_after_sapling_typecode = [
0x87, 0x7a, 0xdf, 0x79, 0x6b, 0xe3, 0xb3, 0x40, 0xef, 0xe4, 0x5d, 0xc2, 0x91, 0xa2,
0x81, 0xfc, 0x7d, 0x76, 0xbb, 0xb0, 0x58, 0x98, 0x53, 0x59, 0xd3, 0x3f, 0xbc, 0x4b,
0x86, 0x59, 0x66, 0x62, 0x75, 0x92, 0xba, 0xcc, 0x31, 0x1e, 0x60, 0x02, 0x3b, 0xd8,
0x4c, 0xdf, 0x36, 0xa1, 0xac, 0x82, 0x57, 0xed, 0x0c, 0x98, 0x49, 0x8f, 0x49, 0x7e,
0xe6, 0x70, 0x36, 0x5b, 0x7b, 0x9e,
];
assert_eq!(
Address::try_from((MAINNET, &truncated_after_sapling_typecode[..])),
Err(ParseError::InvalidEncoding)
);
}
#[test]
fn duplicate_typecode() {
// Construct and serialize an invalid UA.
let ua = Address(vec![Receiver::Sapling([1; 43]), Receiver::Sapling([2; 43])]);
let encoded = ua.to_bytes(MAINNET);
assert_eq!(
Address::try_from((MAINNET, &encoded[..])),
Err(ParseError::DuplicateTypecode(Typecode::Sapling))
);
}
#[test]
fn p2pkh_and_p2sh() {
// Construct and serialize an invalid UA.
let ua = Address(vec![Receiver::P2pkh([0; 20]), Receiver::P2sh([0; 20])]);
let encoded = ua.to_bytes(MAINNET);
assert_eq!(
Address::try_from((MAINNET, &encoded[..])),
Err(ParseError::BothP2phkAndP2sh)
);
}
#[test]
fn only_transparent() {
// Encoding of `Address(vec![Receiver::P2pkh([0; 20])])`.
let encoded = vec![
0xf0, 0x9e, 0x9d, 0x6e, 0xf5, 0xa6, 0xac, 0x16, 0x50, 0xf0, 0xdb, 0xe1, 0x2c, 0xa5,
0x36, 0x22, 0xa2, 0x04, 0x89, 0x86, 0xe9, 0x6a, 0x9b, 0xf3, 0xff, 0x6d, 0x2f, 0xe6,
0xea, 0xdb, 0xc5, 0x20, 0x62, 0xf9, 0x6f, 0xa9, 0x86, 0xcc,
];
// We can't actually exercise this error, because at present the only transparent
// receivers we can use are P2PKH and P2SH (which cannot be used together), and
// with only one of them we don't have sufficient data for F4Jumble (so we hit a
// different error).
assert_eq!(
Address::try_from((MAINNET, &encoded[..])),
Err(ParseError::InvalidEncoding)
);
}
#[test]
fn receivers_are_sorted() {
// Construct a UA with receivers in an unsorted order.
let ua = Address(vec![
Receiver::P2pkh([0; 20]),
Receiver::Orchard([0; 43]),
Receiver::Unknown {
typecode: 0xff,
data: vec![],
},
Receiver::Sapling([0; 43]),
]);
// `Address::receivers` sorts the receivers in priority order.
assert_eq!(
ua.receivers(),
vec![
Receiver::Orchard([0; 43]),
Receiver::Sapling([0; 43]),
Receiver::P2pkh([0; 20]),
Receiver::Unknown {
typecode: 0xff,
data: vec![],
},
]
)
}
}

View File

@ -0,0 +1,155 @@
use blake2b_simd::{Params as Blake2bParams, OUTBYTES};
use std::cmp::min;
use std::ops::RangeInclusive;
#[cfg(test)]
mod test_vectors;
const VALID_LENGTH: RangeInclusive<usize> = 48..=16448;
macro_rules! H_PERS {
( $i:expr ) => {
[
85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 72, 95, $i, 0,
]
};
}
macro_rules! G_PERS {
( $i:expr, $j:expr ) => {
[
85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 71, 95, $i, $j,
]
};
}
struct Hashes {
l_l: usize,
l_r: usize,
}
impl Hashes {
fn new(message_length: usize) -> Self {
let l_l = min(OUTBYTES, message_length / 2);
let l_r = message_length - l_l;
Hashes { l_l, l_r }
}
fn h(&self, i: u8, u: &[u8]) -> Vec<u8> {
Blake2bParams::new()
.hash_length(self.l_l)
.personal(&H_PERS!(i))
.hash(&u)
.as_ref()
.to_vec()
}
fn g(&self, i: u8, u: &[u8]) -> Vec<u8> {
(0..ceildiv(self.l_r, OUTBYTES))
.flat_map(|j| {
Blake2bParams::new()
.hash_length(OUTBYTES)
.personal(&G_PERS!(i, j as u8))
.hash(u)
.as_ref()
.to_vec()
.into_iter()
})
.take(self.l_r)
.collect()
}
}
fn xor(a: &[u8], b: &[u8]) -> Vec<u8> {
a.iter().zip(b.iter()).map(|(a0, b0)| a0 ^ b0).collect()
}
fn ceildiv(num: usize, den: usize) -> usize {
(num + den - 1) / den
}
#[allow(clippy::many_single_char_names)]
pub fn f4jumble(a: &[u8]) -> Option<Vec<u8>> {
if VALID_LENGTH.contains(&a.len()) {
let hashes = Hashes::new(a.len());
let (a, b) = a.split_at(hashes.l_l);
let x = xor(b, &hashes.g(0, a));
let y = xor(a, &hashes.h(0, &x));
let d = xor(&x, &hashes.g(1, &y));
let mut c = xor(&y, &hashes.h(1, &d));
c.extend(d);
Some(c)
} else {
None
}
}
#[allow(clippy::many_single_char_names)]
pub fn f4jumble_inv(c: &[u8]) -> Option<Vec<u8>> {
if VALID_LENGTH.contains(&c.len()) {
let hashes = Hashes::new(c.len());
let (c, d) = c.split_at(hashes.l_l);
let y = xor(c, &hashes.h(1, d));
let x = xor(d, &hashes.g(1, &y));
let mut a = xor(&y, &hashes.h(0, &x));
let b = xor(&x, &hashes.g(0, &a));
a.extend(b);
Some(a)
} else {
None
}
}
#[cfg(test)]
mod tests {
use proptest::collection::vec;
use proptest::prelude::*;
use super::{f4jumble, f4jumble_inv, test_vectors::test_vectors, VALID_LENGTH};
#[test]
fn h_pers() {
assert_eq!(&H_PERS!(7), b"UA_F4Jumble_H_\x07\x00");
}
#[test]
fn g_pers() {
assert_eq!(&G_PERS!(7, 13), b"UA_F4Jumble_G_\x07\x0d");
}
proptest! {
#[test]
fn f4jumble_roundtrip(msg in vec(any::<u8>(), VALID_LENGTH)) {
let jumbled = f4jumble(&msg).unwrap();
let jumbled_len = jumbled.len();
prop_assert_eq!(
msg.len(), jumbled_len,
"Jumbled length {} was not equal to message length {}",
jumbled_len, msg.len()
);
let unjumbled = f4jumble_inv(&jumbled).unwrap();
prop_assert_eq!(
jumbled_len, unjumbled.len(),
"Unjumbled length {} was not equal to jumbled length {}",
unjumbled.len(), jumbled_len
);
prop_assert_eq!(msg, unjumbled, "Unjumbled message did not match original message.");
}
}
#[test]
fn f4jumble_check_vectors() {
for v in test_vectors() {
let jumbled = f4jumble(&v.normal).unwrap();
assert_eq!(jumbled, v.jumbled);
let unjumbled = f4jumble_inv(&v.jumbled).unwrap();
assert_eq!(unjumbled, v.normal);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,93 @@
mod convert;
mod encoding;
mod kind;
#[cfg(test)]
mod test_vectors;
pub use convert::{FromAddress, ToAddress, UnsupportedAddress};
pub use encoding::ParseError;
pub use kind::unified;
/// A Zcash address.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ZcashAddress {
net: Network,
kind: AddressKind,
}
/// The Zcash network for which an address is encoded.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Network {
/// Zcash Mainnet.
Main,
/// Zcash Testnet.
Test,
/// Private integration / regression testing, used in `zcashd`.
///
/// For some address types there is no distinction between test and regtest encodings;
/// those will always be parsed as `Network::Test`.
Regtest,
}
/// Known kinds of Zcash addresses.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
enum AddressKind {
Sprout(kind::sprout::Data),
Sapling(kind::sapling::Data),
Unified(unified::Address),
P2pkh(kind::p2pkh::Data),
P2sh(kind::p2sh::Data),
}
impl ZcashAddress {
/// Attempts to parse the given string as a Zcash address.
///
/// This simply calls [`s.parse()`], leveraging the [`FromStr` implementation].
///
/// [`s.parse()`]: std::primitive::str::parse
/// [`FromStr` implementation]: ZcashAddress#impl-FromStr
///
/// # Errors
///
/// - If the parser can detect that the string _must_ contain an address encoding used
/// by Zcash, [`ParseError::InvalidEncoding`] will be returned if any subsequent
/// part of that encoding is invalid.
///
/// - In all other cases, [`ParseError::NotZcash`] will be returned on failure.
///
/// # Examples
///
/// ```
/// use zcash_address::ZcashAddress;
///
/// let encoded = "zs1z7rejlpsa98s2rrrfkwmaxu53e4ue0ulcrw0h4x5g8jl04tak0d3mm47vdtahatqrlkngh9sly";
/// let addr = ZcashAddress::try_from_encoded(&encoded);
/// assert_eq!(encoded.parse(), addr);
/// ```
pub fn try_from_encoded(s: &str) -> Result<Self, ParseError> {
s.parse()
}
/// Converts this address into another type.
///
/// `convert` can convert into any type that implements the [`FromAddress`] trait.
/// This enables `ZcashAddress` to be used as a common parsing and serialization
/// interface for Zcash addresses, while delegating operations on those addresses
/// (such as constructing transactions) to downstream crates.
///
/// If you want to get the encoded string for this address, use the [`Display`]
/// implementation instead via [`address.to_string()`].
///
/// [`Display`]: std::fmt::Display
/// [`address.to_string()`]: std::string::ToString
pub fn convert<T: FromAddress>(self) -> Result<T, UnsupportedAddress> {
match self.kind {
AddressKind::Sprout(data) => T::from_sprout(self.net, data),
AddressKind::Sapling(data) => T::from_sapling(self.net, data),
AddressKind::Unified(data) => T::from_unified(self.net, data),
AddressKind::P2pkh(data) => T::from_transparent_p2pkh(self.net, data),
AddressKind::P2sh(data) => T::from_transparent_p2sh(self.net, data),
}
}
}

View File

@ -0,0 +1,333 @@
use std::iter;
use crate::{
unified::{self, Receiver},
Network, ToAddress, ZcashAddress,
};
#[test]
fn unified() {
struct TestVector {
p2pkh_bytes: Option<[u8; 20]>,
p2sh_bytes: Option<[u8; 20]>,
sapling_raw_addr: Option<[u8; 43]>,
orchard_raw_addr: Option<[u8; 43]>,
unified_addr: Vec<u8>,
}
// From https://github.com/zcash-hackworks/zcash-test-vectors/blob/master/unified_address.py
let test_vectors = vec![
TestVector {
p2pkh_bytes: None,
p2sh_bytes: Some([
0x7a, 0x8f, 0x73, 0x9a, 0x2d, 0x9e, 0x94, 0x5b, 0x0c, 0xe1, 0x52, 0xa8, 0x04, 0x9e,
0x29, 0x4c, 0x4d, 0x6e, 0x66, 0xb1,
]),
sapling_raw_addr: None,
orchard_raw_addr: Some([
0xdc, 0xb1, 0xd2, 0xa3, 0x77, 0x62, 0x14, 0x8d, 0xb4, 0xce, 0xe3, 0xbb, 0xf1, 0x9f,
0xb1, 0xec, 0x05, 0x89, 0x18, 0x94, 0xb1, 0x38, 0x01, 0xc6, 0x22, 0xba, 0x6a, 0x90,
0xfa, 0xf1, 0x11, 0x9f, 0x82, 0x24, 0xae, 0x39, 0x85, 0xc6, 0xab, 0xd3, 0xb7, 0xbb,
0xae,
]),
unified_addr: vec![
0x75, 0x31, 0x65, 0x32, 0x38, 0x66, 0x38, 0x78, 0x7a, 0x6e, 0x65, 0x6d, 0x67, 0x65,
0x74, 0x79, 0x78, 0x72, 0x64, 0x7a, 0x6b, 0x66, 0x67, 0x6a, 0x75, 0x67, 0x73, 0x66,
0x78, 0x39, 0x64, 0x6b, 0x71, 0x32, 0x74, 0x68, 0x6e, 0x65, 0x7a, 0x65, 0x39, 0x6c,
0x33, 0x34, 0x74, 0x70, 0x76, 0x74, 0x70, 0x6d, 0x65, 0x6a, 0x65, 0x67, 0x74, 0x64,
0x35, 0x67, 0x68, 0x6b, 0x77, 0x68, 0x67, 0x6a, 0x72, 0x68, 0x73, 0x64, 0x66, 0x64,
0x6e, 0x74, 0x33, 0x6b, 0x34, 0x73, 0x66, 0x32, 0x75, 0x79, 0x39, 0x65, 0x61, 0x38,
0x63, 0x74, 0x61, 0x75, 0x6d, 0x7a, 0x70, 0x75, 0x64, 0x6c, 0x65, 0x7a, 0x35, 0x66,
0x67, 0x75, 0x6c, 0x77, 0x71, 0x6b, 0x70, 0x75, 0x35, 0x66, 0x76, 0x6a, 0x77, 0x79,
0x63, 0x74, 0x74, 0x7a, 0x79, 0x64, 0x6c, 0x38, 0x36, 0x64, 0x37, 0x34, 0x34, 0x61,
0x34, 0x30, 0x61, 0x71, 0x75, 0x74, 0x7a, 0x68, 0x7a, 0x76, 0x74, 0x79, 0x7a, 0x6e,
0x68,
],
},
TestVector {
p2pkh_bytes: Some([
0xb3, 0x53, 0x42, 0x01, 0xcf, 0xb1, 0xcd, 0x8d, 0xbf, 0x69, 0xb8, 0x25, 0x0c, 0x18,
0xef, 0x41, 0x29, 0x4c, 0xa9, 0x79,
]),
p2sh_bytes: None,
sapling_raw_addr: Some([
0x90, 0x2b, 0x65, 0x65, 0xa1, 0xc4, 0x4e, 0x7e, 0x7a, 0x08, 0x05, 0x71, 0xaf, 0x1d,
0xd7, 0x74, 0x69, 0x7c, 0xc1, 0x26, 0xf1, 0xfc, 0x04, 0x35, 0xd3, 0xcd, 0xbf, 0x86,
0x87, 0x83, 0xe9, 0xfb, 0x46, 0x20, 0xdf, 0x4b, 0xf1, 0x75, 0xcb, 0xf2, 0xc3, 0xe3,
0x6f,
]),
orchard_raw_addr: Some([
0x05, 0xf6, 0x12, 0x73, 0xa7, 0x20, 0x12, 0x95, 0x33, 0x2f, 0xee, 0x45, 0x79, 0x47,
0x45, 0x34, 0x80, 0x9a, 0x0a, 0xeb, 0x81, 0x7a, 0x2b, 0xc0, 0x59, 0x41, 0x66, 0xad,
0x7a, 0x46, 0x20, 0x67, 0x71, 0x25, 0x33, 0xb6, 0xee, 0xc0, 0xfa, 0x2d, 0x1b, 0xe9,
0x9f,
]),
unified_addr: vec![
0x75, 0x31, 0x33, 0x71, 0x73, 0x32, 0x78, 0x79, 0x6b, 0x66, 0x73, 0x74, 0x64, 0x34,
0x33, 0x32, 0x77, 0x70, 0x76, 0x36, 0x63, 0x68, 0x36, 0x73, 0x36, 0x63, 0x6b, 0x68,
0x34, 0x73, 0x34, 0x68, 0x33, 0x73, 0x63, 0x72, 0x74, 0x64, 0x35, 0x68, 0x6b, 0x61,
0x79, 0x36, 0x72, 0x77, 0x36, 0x6a, 0x6e, 0x36, 0x67, 0x65, 0x6e, 0x37, 0x34, 0x72,
0x6d, 0x76, 0x63, 0x61, 0x36, 0x35, 0x77, 0x75, 0x73, 0x67, 0x71, 0x65, 0x36, 0x37,
0x71, 0x6d, 0x6b, 0x6b, 0x66, 0x7a, 0x79, 0x68, 0x6c, 0x6b, 0x77, 0x67, 0x38, 0x30,
0x71, 0x79, 0x6a, 0x33, 0x72, 0x63, 0x63, 0x61, 0x6e, 0x68, 0x35, 0x66, 0x67, 0x37,
0x7a, 0x73, 0x39, 0x30, 0x32, 0x79, 0x32, 0x34, 0x67, 0x6e, 0x32, 0x71, 0x79, 0x66,
0x36, 0x70, 0x6b, 0x63, 0x63, 0x63, 0x68, 0x67, 0x30, 0x35, 0x6a, 0x7a, 0x70, 0x33,
0x79, 0x72, 0x66, 0x6e, 0x34, 0x70, 0x75, 0x38, 0x61, 0x74, 0x65, 0x38, 0x67, 0x37,
0x71, 0x39, 0x72, 0x39, 0x73, 0x74, 0x65, 0x76, 0x78, 0x32, 0x38, 0x34, 0x34, 0x35,
0x72, 0x35, 0x77, 0x6e, 0x33, 0x78, 0x33, 0x35, 0x70, 0x66, 0x37, 0x71, 0x71, 0x33,
0x32, 0x34, 0x6e, 0x74, 0x68, 0x78, 0x74, 0x7a, 0x63, 0x77, 0x78, 0x65, 0x76, 0x79,
0x66, 0x61, 0x34, 0x6c, 0x32, 0x6d, 0x32, 0x6d, 0x32, 0x78, 0x73, 0x37, 0x65, 0x64,
0x76, 0x65, 0x70, 0x39, 0x66, 0x74, 0x61, 0x34, 0x76, 0x74, 0x77, 0x76, 0x68, 0x6b,
0x61, 0x78, 0x78,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: Some([
0xe8, 0xc7, 0x20, 0x3d, 0x99, 0x6a, 0xf7, 0xd4, 0x77, 0x08, 0x37, 0x56, 0xd5, 0x9a,
0xf8, 0x0d, 0x06, 0xa7, 0x45, 0xf4,
]),
sapling_raw_addr: None,
orchard_raw_addr: Some([
0x4e, 0xa7, 0xd6, 0xb3, 0xdf, 0xa3, 0x38, 0x19, 0x2a, 0xf0, 0x6c, 0xbb, 0xf4, 0x7a,
0xd4, 0x05, 0x71, 0x5b, 0xc7, 0x83, 0x2b, 0xed, 0xb1, 0x46, 0x62, 0x17, 0xdc, 0x0d,
0x93, 0x31, 0x4d, 0xe9, 0xf3, 0xc2, 0x5e, 0xec, 0x89, 0xf9, 0xa2, 0x1b, 0xfe, 0x0e,
0x93,
]),
unified_addr: vec![
0x75, 0x31, 0x72, 0x70, 0x77, 0x6b, 0x64, 0x35, 0x78, 0x37, 0x74, 0x65, 0x34, 0x65,
0x7a, 0x63, 0x6d, 0x6c, 0x71, 0x61, 0x72, 0x67, 0x6b, 0x66, 0x33, 0x63, 0x72, 0x73,
0x70, 0x66, 0x71, 0x6b, 0x68, 0x6d, 0x78, 0x77, 0x66, 0x73, 0x71, 0x70, 0x71, 0x30,
0x32, 0x34, 0x6a, 0x61, 0x73, 0x73, 0x32, 0x67, 0x37, 0x34, 0x65, 0x34, 0x72, 0x33,
0x36, 0x7a, 0x70, 0x35, 0x30, 0x71, 0x73, 0x73, 0x33, 0x76, 0x68, 0x63, 0x74, 0x37,
0x32, 0x73, 0x61, 0x63, 0x68, 0x75, 0x73, 0x30, 0x6b, 0x37, 0x30, 0x35, 0x78, 0x79,
0x67, 0x68, 0x75, 0x6c, 0x72, 0x6a, 0x78, 0x63, 0x64, 0x77, 0x77, 0x64, 0x61, 0x71,
0x30, 0x71, 0x6a, 0x6e, 0x6e, 0x73, 0x73, 0x66, 0x32, 0x66, 0x76, 0x61, 0x6b, 0x7a,
0x7a, 0x38, 0x66, 0x34, 0x36, 0x70, 0x77, 0x38, 0x72, 0x63, 0x61, 0x39, 0x33, 0x36,
0x66, 0x65, 0x79, 0x65, 0x66, 0x36, 0x6d, 0x6c, 0x36, 0x65, 0x71, 0x37, 0x77, 0x6c,
0x6c,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: None,
sapling_raw_addr: Some([
0x02, 0xf1, 0x53, 0x6b, 0x62, 0x2c, 0x01, 0x34, 0x67, 0x42, 0xd8, 0xf9, 0x0e, 0x9d,
0x4f, 0xf3, 0x91, 0x37, 0xf1, 0xbe, 0xbe, 0x6e, 0x23, 0xad, 0x99, 0x71, 0x77, 0x6b,
0x33, 0x72, 0x70, 0x24, 0x94, 0xcc, 0x08, 0x95, 0x1e, 0xef, 0x03, 0x2b, 0x35, 0x35,
0x0f,
]),
orchard_raw_addr: None,
unified_addr: vec![
0x75, 0x31, 0x39, 0x64, 0x68, 0x78, 0x6d, 0x38, 0x38, 0x67, 0x76, 0x63, 0x77, 0x39,
0x74, 0x77, 0x74, 0x76, 0x7a, 0x6b, 0x75, 0x37, 0x63, 0x7a, 0x37, 0x6a, 0x75, 0x33,
0x72, 0x76, 0x70, 0x75, 0x35, 0x6a, 0x76, 0x68, 0x32, 0x63, 0x71, 0x38, 0x67, 0x6d,
0x38, 0x71, 0x64, 0x65, 0x76, 0x6e, 0x6c, 0x34, 0x65, 0x73, 0x65, 0x79, 0x38, 0x33,
0x66, 0x7a, 0x72, 0x39, 0x79, 0x6d, 0x66, 0x61, 0x33, 0x67, 0x6a, 0x6d, 0x6d, 0x6b,
0x34, 0x72, 0x78, 0x6a, 0x32, 0x79, 0x70, 0x38, 0x38, 0x67, 0x74, 0x78, 0x64, 0x6b,
0x71, 0x67, 0x6e, 0x6a, 0x36, 0x6e, 0x6e, 0x71, 0x79, 0x74, 0x66, 0x75, 0x68, 0x30,
0x38, 0x75, 0x79, 0x63, 0x35, 0x66, 0x75, 0x72,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: Some([
0x18, 0x3e, 0x31, 0xd4, 0x9f, 0x25, 0xc9, 0xa1, 0x38, 0xf4, 0x9b, 0x1a, 0x53, 0x7e,
0xdc, 0xf0, 0x4b, 0xe3, 0x4a, 0x98,
]),
sapling_raw_addr: Some([
0x32, 0x46, 0xb5, 0x9a, 0x5b, 0x49, 0x2d, 0xab, 0x18, 0x55, 0xcc, 0x17, 0x6b, 0xdd,
0xfa, 0x28, 0x41, 0x8f, 0x11, 0xf9, 0x7f, 0x7b, 0x36, 0x1c, 0xc3, 0xe8, 0x83, 0x4b,
0x2c, 0x30, 0xd2, 0xa1, 0x71, 0x7d, 0xf3, 0x23, 0xef, 0x98, 0xea, 0x7d, 0xe7, 0x1d,
0x2e,
]),
orchard_raw_addr: Some([
0xab, 0x6d, 0x26, 0x25, 0x2c, 0x52, 0x15, 0x47, 0x04, 0x9d, 0xe2, 0x08, 0x28, 0x3d,
0x96, 0x27, 0x8b, 0xb2, 0x21, 0xa6, 0x87, 0x4c, 0xb5, 0xa8, 0x6a, 0xf1, 0xd3, 0xf8,
0xb3, 0xdb, 0x3f, 0xbe, 0xe3, 0xdb, 0xef, 0xed, 0xcb, 0x2c, 0x71, 0xe3, 0xca, 0x1e,
0xad,
]),
unified_addr: vec![
0x75, 0x31, 0x35, 0x63, 0x32, 0x79, 0x72, 0x66, 0x6a, 0x77, 0x74, 0x74, 0x36, 0x36,
0x33, 0x6e, 0x71, 0x30, 0x68, 0x70, 0x71, 0x73, 0x68, 0x36, 0x78, 0x74, 0x6a, 0x79,
0x32, 0x38, 0x38, 0x7a, 0x74, 0x76, 0x61, 0x63, 0x75, 0x61, 0x6c, 0x37, 0x33, 0x6d,
0x66, 0x66, 0x75, 0x72, 0x33, 0x38, 0x33, 0x6d, 0x74, 0x65, 0x70, 0x63, 0x39, 0x68,
0x72, 0x78, 0x74, 0x33, 0x74, 0x70, 0x34, 0x65, 0x61, 0x35, 0x68, 0x37, 0x38, 0x30,
0x70, 0x67, 0x33, 0x68, 0x6b, 0x35, 0x75, 0x6a, 0x32, 0x67, 0x75, 0x39, 0x37, 0x70,
0x32, 0x6c, 0x75, 0x6d, 0x39, 0x71, 0x6a, 0x68, 0x36, 0x32, 0x78, 0x72, 0x61, 0x37,
0x63, 0x33, 0x68, 0x79, 0x36, 0x6e, 0x64, 0x71, 0x32, 0x78, 0x77, 0x68, 0x67, 0x37,
0x65, 0x71, 0x38, 0x71, 0x37, 0x36, 0x76, 0x66, 0x32, 0x38, 0x63, 0x33, 0x32, 0x64,
0x37, 0x76, 0x6d, 0x6c, 0x33, 0x7a, 0x32, 0x39, 0x34, 0x76, 0x74, 0x38, 0x65, 0x63,
0x74, 0x63, 0x6c, 0x77, 0x36, 0x72, 0x30, 0x70, 0x32, 0x33, 0x6e, 0x64, 0x70, 0x35,
0x78, 0x64, 0x7a, 0x66, 0x65, 0x78, 0x63, 0x39, 0x71, 0x77, 0x30, 0x65, 0x77, 0x75,
0x72, 0x79, 0x6e, 0x6a, 0x35, 0x32, 0x35, 0x73, 0x6b, 0x78, 0x37, 0x6d, 0x30, 0x6a,
0x75, 0x6b, 0x63, 0x6a, 0x72, 0x65, 0x61, 0x6b, 0x6a, 0x32, 0x61, 0x70, 0x33, 0x32,
0x37, 0x6b, 0x6a, 0x71, 0x72, 0x76, 0x75, 0x67, 0x65, 0x64, 0x35, 0x66, 0x79, 0x30,
0x73, 0x7a, 0x79,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: None,
sapling_raw_addr: Some([
0x97, 0x0d, 0xc3, 0x45, 0x0d, 0x34, 0x55, 0x41, 0x41, 0xd3, 0x56, 0xcb, 0x54, 0x80,
0x56, 0x27, 0x9c, 0x57, 0x70, 0x8f, 0xa7, 0x3b, 0xd1, 0x6f, 0xfe, 0x9a, 0x2e, 0x24,
0xea, 0x69, 0x48, 0x98, 0xa7, 0xb8, 0xaf, 0x1b, 0x0f, 0xf9, 0x25, 0x85, 0xd0, 0x26,
0x23,
]),
orchard_raw_addr: Some([
0x04, 0x14, 0xbb, 0x62, 0xb8, 0x61, 0x49, 0xee, 0x73, 0x18, 0x51, 0xf2, 0x7d, 0x53,
0x2a, 0xc0, 0x36, 0x11, 0x69, 0xda, 0x46, 0xe6, 0xd5, 0x3d, 0x19, 0xd3, 0xdf, 0xd0,
0x7a, 0x5b, 0xae, 0x22, 0x96, 0x99, 0x22, 0xd8, 0xd0, 0xaf, 0x7d, 0xc1, 0xe1, 0x3b,
0xae,
]),
unified_addr: vec![
0x75, 0x31, 0x7a, 0x64, 0x67, 0x39, 0x7a, 0x37, 0x6c, 0x77, 0x32, 0x71, 0x30, 0x37,
0x35, 0x79, 0x77, 0x39, 0x6e, 0x30, 0x6a, 0x37, 0x66, 0x68, 0x38, 0x72, 0x79, 0x75,
0x6a, 0x73, 0x72, 0x6a, 0x72, 0x72, 0x37, 0x34, 0x7a, 0x71, 0x78, 0x39, 0x39, 0x33,
0x74, 0x63, 0x73, 0x68, 0x74, 0x66, 0x6d, 0x6d, 0x32, 0x71, 0x78, 0x6b, 0x74, 0x76,
0x33, 0x72, 0x30, 0x34, 0x38, 0x65, 0x37, 0x74, 0x72, 0x34, 0x71, 0x35, 0x6a, 0x38,
0x73, 0x66, 0x65, 0x70, 0x33, 0x67, 0x39, 0x6e, 0x76, 0x38, 0x77, 0x70, 0x78, 0x66,
0x39, 0x76, 0x74, 0x72, 0x33, 0x6a, 0x7a, 0x6e, 0x6d, 0x6d, 0x35, 0x36, 0x7a, 0x37,
0x75, 0x70, 0x6a, 0x65, 0x30, 0x35, 0x6b, 0x7a, 0x76, 0x6d, 0x75, 0x72, 0x6d, 0x70,
0x63, 0x71, 0x70, 0x30, 0x34, 0x63, 0x30, 0x33, 0x37, 0x7a, 0x32, 0x64, 0x68, 0x33,
0x35, 0x63, 0x64, 0x38, 0x32, 0x32, 0x35, 0x78, 0x74, 0x33, 0x6b, 0x34, 0x37, 0x6e,
0x7a, 0x66, 0x68, 0x32, 0x79, 0x74, 0x73, 0x76, 0x30, 0x37, 0x6d, 0x33, 0x74, 0x38,
0x67, 0x64, 0x33, 0x33, 0x70, 0x35, 0x72, 0x32, 0x76, 0x37, 0x6b, 0x66, 0x65, 0x67,
0x39, 0x34, 0x35, 0x71, 0x37, 0x6a, 0x63, 0x30, 0x67, 0x68,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: Some([
0x09, 0x8b, 0x79, 0x53, 0x5e, 0x79, 0x0f, 0xe5, 0x3e, 0x29, 0xfe, 0xf2, 0xb3, 0x76,
0x66, 0x97, 0xac, 0x32, 0xb4, 0xf4,
]),
sapling_raw_addr: Some([
0xa8, 0xa8, 0x79, 0x7c, 0x1b, 0xa6, 0x9f, 0x78, 0x67, 0x2a, 0xff, 0xa6, 0x5b, 0x94,
0x39, 0x75, 0x02, 0x69, 0x31, 0xea, 0x62, 0x84, 0x31, 0xf0, 0x99, 0x1e, 0x74, 0x48,
0x72, 0xac, 0x9f, 0x36, 0x94, 0x6f, 0x5d, 0xcd, 0x68, 0x51, 0xa0, 0xb5, 0xaf, 0x29,
0xcf,
]),
orchard_raw_addr: Some([
0x67, 0x8a, 0xb0, 0x07, 0x9b, 0xea, 0x28, 0xbf, 0x16, 0x5c, 0x1a, 0xb9, 0x76, 0xa2,
0xa5, 0x8c, 0x18, 0xa7, 0x81, 0x1c, 0xa2, 0xad, 0x0a, 0xd6, 0x49, 0xe8, 0x76, 0x27,
0x3d, 0x04, 0x32, 0x5d, 0xa6, 0xca, 0x53, 0xcd, 0xb8, 0x3c, 0x11, 0x1e, 0x8e, 0x43,
0x94,
]),
unified_addr: vec![
0x75, 0x31, 0x79, 0x6a, 0x77, 0x33, 0x36, 0x6d, 0x7a, 0x77, 0x7a, 0x34, 0x6a, 0x64,
0x38, 0x79, 0x74, 0x6e, 0x72, 0x64, 0x36, 0x67, 0x6b, 0x68, 0x37, 0x61, 0x79, 0x67,
0x6d, 0x65, 0x6a, 0x32, 0x72, 0x74, 0x6c, 0x71, 0x67, 0x33, 0x76, 0x6a, 0x39, 0x79,
0x65, 0x65, 0x68, 0x66, 0x64, 0x6b, 0x32, 0x33, 0x35, 0x6a, 0x65, 0x73, 0x36, 0x72,
0x71, 0x70, 0x66, 0x6a, 0x6c, 0x77, 0x71, 0x67, 0x33, 0x37, 0x76, 0x34, 0x6d, 0x65,
0x35, 0x75, 0x67, 0x6a, 0x6c, 0x7a, 0x72, 0x64, 0x6e, 0x68, 0x67, 0x6c, 0x38, 0x38,
0x72, 0x72, 0x6c, 0x65, 0x67, 0x38, 0x33, 0x78, 0x38, 0x61, 0x30, 0x71, 0x6d, 0x77,
0x79, 0x7a, 0x79, 0x36, 0x39, 0x38, 0x72, 0x38, 0x76, 0x75, 0x63, 0x6e, 0x34, 0x68,
0x77, 0x61, 0x67, 0x72, 0x36, 0x71, 0x63, 0x65, 0x7a, 0x73, 0x73, 0x66, 0x36, 0x63,
0x68, 0x30, 0x74, 0x37, 0x34, 0x66, 0x63, 0x30, 0x68, 0x76, 0x6e, 0x36, 0x6d, 0x37,
0x37, 0x37, 0x68, 0x70, 0x37, 0x35, 0x37, 0x61, 0x76, 0x66, 0x32, 0x63, 0x6b, 0x7a,
0x63, 0x33, 0x32, 0x39, 0x35, 0x70, 0x33, 0x32, 0x34, 0x30, 0x71, 0x6c, 0x76, 0x75,
0x70, 0x79, 0x64, 0x65, 0x6c, 0x37, 0x33, 0x6c, 0x37, 0x71, 0x63, 0x74, 0x6e, 0x34,
0x30, 0x63, 0x79, 0x6b, 0x35, 0x75, 0x36, 0x73, 0x65, 0x73, 0x61, 0x77, 0x72, 0x36,
0x7a, 0x6d, 0x6b, 0x74, 0x6c, 0x6a, 0x66, 0x78, 0x68, 0x71, 0x63, 0x63, 0x37, 0x65,
0x77, 0x34, 0x68,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: None,
sapling_raw_addr: Some([
0x35, 0x09, 0xc9, 0xe0, 0x69, 0xe8, 0x9f, 0xe5, 0x01, 0xd9, 0x76, 0x22, 0xc2, 0x83,
0xac, 0x98, 0x92, 0x3d, 0xa2, 0xd7, 0xe6, 0xeb, 0x34, 0x6b, 0x4b, 0xaf, 0xa6, 0x78,
0x65, 0xe1, 0xe6, 0xda, 0xe7, 0xcf, 0x21, 0x3b, 0x1e, 0xa3, 0x64, 0x8d, 0xc0, 0x9b,
0x48,
]),
orchard_raw_addr: None,
unified_addr: vec![
0x75, 0x31, 0x71, 0x78, 0x6b, 0x79, 0x6d, 0x6b, 0x68, 0x64, 0x7a, 0x63, 0x72, 0x72,
0x7a, 0x6c, 0x79, 0x34, 0x66, 0x72, 0x71, 0x75, 0x63, 0x64, 0x6b, 0x66, 0x73, 0x6e,
0x6a, 0x37, 0x77, 0x76, 0x77, 0x6c, 0x71, 0x65, 0x75, 0x30, 0x6b, 0x34, 0x66, 0x33,
0x74, 0x72, 0x70, 0x6e, 0x65, 0x7a, 0x34, 0x76, 0x77, 0x7a, 0x75, 0x68, 0x6b, 0x64,
0x61, 0x6a, 0x6b, 0x35, 0x65, 0x37, 0x36, 0x34, 0x37, 0x34, 0x6d, 0x6a, 0x34, 0x39,
0x6b, 0x75, 0x65, 0x39, 0x78, 0x39, 0x68, 0x66, 0x77, 0x65, 0x66, 0x38, 0x6d, 0x34,
0x37, 0x73, 0x63, 0x63, 0x38, 0x61, 0x70, 0x39, 0x33, 0x68, 0x6e, 0x67, 0x73, 0x65,
0x75, 0x67, 0x33, 0x38, 0x64, 0x70, 0x63, 0x71,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: Some([
0x30, 0xd0, 0x69, 0x89, 0x6c, 0xff, 0x30, 0xeb, 0x41, 0x4f, 0x72, 0x7b, 0x89, 0xe0,
0x01, 0xaf, 0xa2, 0xfb, 0x8d, 0xc3,
]),
sapling_raw_addr: Some([
0x55, 0xbc, 0x46, 0xae, 0xa6, 0xf6, 0x0c, 0x1d, 0x61, 0x91, 0x56, 0x40, 0x02, 0x9b,
0x2a, 0xf6, 0x33, 0x4d, 0x7d, 0x27, 0xe1, 0xc4, 0x7a, 0x24, 0x8a, 0xb4, 0x7c, 0x9f,
0xbe, 0x5d, 0x2d, 0x7b, 0xb5, 0x81, 0x87, 0x39, 0xf0, 0x62, 0xe3, 0x71, 0x36, 0x65,
0x4c,
]),
orchard_raw_addr: None,
unified_addr: vec![
0x75, 0x31, 0x64, 0x33, 0x72, 0x6b, 0x71, 0x77, 0x7a, 0x39, 0x6b, 0x37, 0x34, 0x6b,
0x74, 0x75, 0x76, 0x6c, 0x74, 0x6e, 0x32, 0x74, 0x6d, 0x74, 0x6a, 0x36, 0x67, 0x6b,
0x6c, 0x67, 0x39, 0x33, 0x68, 0x77, 0x73, 0x39, 0x37, 0x79, 0x74, 0x73, 0x33, 0x7a,
0x75, 0x36, 0x30, 0x38, 0x76, 0x6b, 0x76, 0x67, 0x33, 0x63, 0x32, 0x79, 0x73, 0x68,
0x6d, 0x6d, 0x33, 0x30, 0x71, 0x36, 0x68, 0x38, 0x6b, 0x64, 0x34, 0x72, 0x74, 0x33,
0x73, 0x30, 0x76, 0x70, 0x34, 0x75, 0x61, 0x6a, 0x77, 0x66, 0x68, 0x39, 0x61, 0x37,
0x35, 0x65, 0x38, 0x61, 0x38, 0x37, 0x30, 0x6e, 0x39, 0x34, 0x6c, 0x6b, 0x70, 0x65,
0x78, 0x35, 0x61, 0x73, 0x37, 0x71, 0x6e, 0x77, 0x33, 0x34, 0x36, 0x34, 0x6e, 0x72,
0x61, 0x74, 0x75, 0x63, 0x35, 0x6e, 0x35, 0x79, 0x35, 0x75, 0x76, 0x6e, 0x7a, 0x76,
0x66, 0x32, 0x6d, 0x34, 0x66, 0x65, 0x6a, 0x68, 0x7a, 0x65, 0x76, 0x68, 0x6e, 0x30,
0x6d,
],
},
TestVector {
p2pkh_bytes: None,
p2sh_bytes: None,
sapling_raw_addr: Some([
0x5c, 0x26, 0xa8, 0x11, 0x77, 0x29, 0x33, 0x4a, 0x95, 0x7c, 0xa7, 0x94, 0x1d, 0x47,
0xb2, 0xce, 0x70, 0x40, 0xe8, 0x44, 0xfa, 0x98, 0x82, 0xc2, 0x5b, 0xfd, 0x2f, 0xcf,
0x51, 0xfa, 0x8a, 0xb2, 0x13, 0x76, 0xf5, 0x30, 0x0d, 0x01, 0x23, 0xf5, 0x70, 0x3e,
0x9e,
]),
orchard_raw_addr: None,
unified_addr: vec![
0x75, 0x31, 0x61, 0x66, 0x61, 0x74, 0x30, 0x64, 0x75, 0x74, 0x61, 0x73, 0x79, 0x66,
0x6b, 0x73, 0x61, 0x61, 0x76, 0x6d, 0x34, 0x72, 0x6c, 0x33, 0x61, 0x78, 0x71, 0x6e,
0x75, 0x78, 0x6c, 0x77, 0x74, 0x37, 0x36, 0x35, 0x73, 0x39, 0x34, 0x76, 0x39, 0x7a,
0x71, 0x6a, 0x74, 0x30, 0x39, 0x7a, 0x79, 0x36, 0x33, 0x64, 0x6c, 0x35, 0x6d, 0x6d,
0x79, 0x70, 0x39, 0x78, 0x35, 0x63, 0x6b, 0x76, 0x72, 0x75, 0x6c, 0x6d, 0x64, 0x71,
0x34, 0x63, 0x6b, 0x6a, 0x33, 0x72, 0x72, 0x76, 0x37, 0x66, 0x39, 0x6c, 0x37, 0x75,
0x6e, 0x78, 0x66, 0x6c, 0x30, 0x64, 0x38, 0x72, 0x36, 0x35, 0x38, 0x32, 0x34, 0x74,
0x67, 0x73, 0x67, 0x72, 0x76, 0x67, 0x30, 0x71,
],
},
];
for tv in test_vectors {
// Double-check test vectors match requirements:
// - Only one of P2PKH and P2SH.
assert!(tv.p2pkh_bytes.is_none() || tv.p2sh_bytes.is_none());
// - At least one shielded receiver.
assert!(tv.sapling_raw_addr.is_some() || tv.orchard_raw_addr.is_some());
let addr_string = String::from_utf8(tv.unified_addr).unwrap();
let receivers = iter::empty()
.chain(tv.orchard_raw_addr.map(Receiver::Orchard))
.chain(tv.sapling_raw_addr.map(Receiver::Sapling))
.chain(tv.p2sh_bytes.map(Receiver::P2sh))
.chain(tv.p2pkh_bytes.map(Receiver::P2pkh))
.collect();
let expected_addr = ZcashAddress::from_unified(Network::Main, unified::Address(receivers));
// Test parsing
let addr: ZcashAddress = addr_string.parse().unwrap();
assert_eq!(addr, expected_addr);
// Test serialization
assert_eq!(expected_addr.to_string(), addr_string);
}
}

View File

@ -15,11 +15,14 @@ edition = "2018"
blake2b_simd = "0.5"
byteorder = "1"
crypto_api_chachapoly = "0.4"
ff = "0.8"
group = "0.8"
rand_core = "0.5.1"
ff = "0.10"
group = "0.10"
rand_core = "0.6"
subtle = "2.2.3"
[dev-dependencies]
zcash_primitives = { version = "0.5", path = "../../zcash_primitives" }
jubjub = "0.5.1"
jubjub = "0.7"
[lib]
bench = false

View File

@ -0,0 +1,69 @@
//! APIs for batch trial decryption.
use std::iter;
use crate::{
try_compact_note_decryption_inner, try_note_decryption_inner, Domain, EphemeralKeyBytes,
ShieldedOutput,
};
/// Trial decryption of a batch of notes with a set of recipients.
///
/// This is the batched version of [`zcash_note_encryption::try_note_decryption`].
pub fn try_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
ivks: &[D::IncomingViewingKey],
outputs: &[(D, Output)],
) -> Vec<Option<(D::Note, D::Recipient, D::Memo)>> {
batch_note_decryption(ivks, outputs, try_note_decryption_inner)
}
/// Trial decryption of a batch of notes for light clients with a set of recipients.
///
/// This is the batched version of [`zcash_note_encryption::try_compact_note_decryption`].
pub fn try_compact_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
ivks: &[D::IncomingViewingKey],
outputs: &[(D, Output)],
) -> Vec<Option<(D::Note, D::Recipient)>> {
batch_note_decryption(ivks, outputs, try_compact_note_decryption_inner)
}
fn batch_note_decryption<D: Domain, Output: ShieldedOutput<D>, F, FR>(
ivks: &[D::IncomingViewingKey],
outputs: &[(D, Output)],
decrypt_inner: F,
) -> Vec<Option<FR>>
where
F: Fn(&D, &D::IncomingViewingKey, &EphemeralKeyBytes, &Output, D::SymmetricKey) -> Option<FR>,
{
// Fetch the ephemeral keys for each output and batch-parse them.
let ephemeral_keys = D::batch_epk(outputs.iter().map(|(_, output)| output.ephemeral_key()));
// Derive the shared secrets for all combinations of (ivk, output).
// The scalar multiplications cannot benefit from batching.
let items = ivks.iter().flat_map(|ivk| {
ephemeral_keys.iter().map(move |(epk, ephemeral_key)| {
(
epk.as_ref().map(|epk| D::ka_agree_dec(ivk, epk)),
ephemeral_key,
)
})
});
// Run the batch-KDF to obtain the symmetric keys from the shared secrets.
let keys = D::batch_kdf(items);
// Finish the trial decryption!
ivks.iter()
.flat_map(|ivk| {
// Reconstruct the matrix of (ivk, output) combinations.
iter::repeat(ivk)
.zip(ephemeral_keys.iter())
.zip(outputs.iter())
})
.zip(keys)
.map(|(((ivk, (_, ephemeral_key)), (domain, output)), key)| {
// The `and_then` propagates any potential rejection from `D::epk`.
key.and_then(|key| decrypt_inner(domain, ivk, ephemeral_key, output, key))
})
.collect()
}

View File

@ -5,9 +5,10 @@
use crypto_api_chachapoly::{ChaCha20Ietf, ChachaPolyIetf};
use rand_core::RngCore;
use std::convert::TryFrom;
use subtle::{Choice, ConstantTimeEq};
pub mod batch;
pub const COMPACT_NOTE_SIZE: usize = 1 + // version
11 + // diversifier
8 + // value
@ -34,6 +35,7 @@ impl AsRef<[u8]> for OutgoingCipherKey {
}
}
#[derive(Clone, Debug)]
pub struct EphemeralKeyBytes(pub [u8; 32]);
impl AsRef<[u8]> for EphemeralKeyBytes {
@ -64,7 +66,7 @@ pub enum NoteValidity {
}
pub trait Domain {
type EphemeralSecretKey;
type EphemeralSecretKey: ConstantTimeEq;
type EphemeralPublicKey;
type SharedSecret;
type SymmetricKey: AsRef<[u8]>;
@ -75,7 +77,7 @@ pub trait Domain {
type OutgoingViewingKey;
type ValueCommitment;
type ExtractedCommitment;
type ExtractedCommitmentBytes: Eq + TryFrom<Self::ExtractedCommitment>;
type ExtractedCommitmentBytes: Eq + for<'a> From<&'a Self::ExtractedCommitment>;
type Memo;
fn derive_esk(note: &Self::Note) -> Option<Self::EphemeralSecretKey>;
@ -99,6 +101,19 @@ pub trait Domain {
fn kdf(secret: Self::SharedSecret, ephemeral_key: &EphemeralKeyBytes) -> Self::SymmetricKey;
/// Computes `Self::kdf` on a batch of items.
///
/// For each item in the batch, if the shared secret is `None`, this returns `None` at
/// that position.
fn batch_kdf<'a>(
items: impl Iterator<Item = (Option<Self::SharedSecret>, &'a EphemeralKeyBytes)>,
) -> Vec<Option<Self::SymmetricKey>> {
// Default implementation: do the non-batched thing.
items
.map(|(secret, ephemeral_key)| secret.map(|secret| Self::kdf(secret, ephemeral_key)))
.collect()
}
// for right now, we just need `recipient` to get `d`; in the future when we
// can get that from a Sapling note, the recipient parameter will be able
// to be removed.
@ -111,7 +126,7 @@ pub trait Domain {
fn derive_ock(
ovk: &Self::OutgoingViewingKey,
cv: &Self::ValueCommitment,
cmstar: &Self::ExtractedCommitment,
cmstar_bytes: &Self::ExtractedCommitmentBytes,
ephemeral_key: &EphemeralKeyBytes,
) -> OutgoingCipherKey;
@ -122,6 +137,24 @@ pub trait Domain {
fn epk_bytes(epk: &Self::EphemeralPublicKey) -> EphemeralKeyBytes;
fn epk(ephemeral_key: &EphemeralKeyBytes) -> Option<Self::EphemeralPublicKey>;
/// Computes `Self::epk` on a batch of ephemeral keys.
///
/// This is useful for protocols where the underlying curve requires an inversion to
/// parse an encoded point.
///
/// For usability, this returns tuples of the ephemeral keys and the result of parsing
/// them.
fn batch_epk(
ephemeral_keys: impl Iterator<Item = EphemeralKeyBytes>,
) -> Vec<(Option<Self::EphemeralPublicKey>, EphemeralKeyBytes)> {
// Default implementation: do the non-batched thing.
ephemeral_keys
.map(|ephemeral_key| (Self::epk(&ephemeral_key), ephemeral_key))
.collect()
}
fn check_epk_bytes<F: Fn(&Self::EphemeralSecretKey) -> NoteValidity>(
note: &Self::Note,
check: F,
@ -139,7 +172,7 @@ pub trait Domain {
&self,
pk_d: &Self::DiversifiedTransmissionKey,
esk: &Self::EphemeralSecretKey,
epk: &Self::EphemeralPublicKey,
ephemeral_key: &EphemeralKeyBytes,
plaintext: &[u8],
) -> Option<(Self::Note, Self::Recipient)>;
@ -149,14 +182,14 @@ pub trait Domain {
fn extract_memo(&self, plaintext: &[u8]) -> Self::Memo;
fn extract_pk_d(
out_plaintext: &[u8; OUT_CIPHERTEXT_SIZE],
out_plaintext: &[u8; OUT_PLAINTEXT_SIZE],
) -> Option<Self::DiversifiedTransmissionKey>;
fn extract_esk(out_plaintext: &[u8; OUT_CIPHERTEXT_SIZE]) -> Option<Self::EphemeralSecretKey>;
fn extract_esk(out_plaintext: &[u8; OUT_PLAINTEXT_SIZE]) -> Option<Self::EphemeralSecretKey>;
}
pub trait ShieldedOutput<D: Domain> {
fn epk(&self) -> &D::EphemeralPublicKey;
fn ephemeral_key(&self) -> EphemeralKeyBytes;
fn cmstar_bytes(&self) -> D::ExtractedCommitmentBytes;
fn enc_ciphertext(&self) -> &[u8];
}
@ -292,7 +325,7 @@ impl<D: Domain> NoteEncryption<D> {
rng: &mut R,
) -> [u8; OUT_CIPHERTEXT_SIZE] {
let (ock, input) = if let Some(ovk) = &self.ovk {
let ock = D::derive_ock(ovk, &cv, &cmstar, &D::epk_bytes(&self.epk));
let ock = D::derive_ock(ovk, &cv, &cmstar.into(), &D::epk_bytes(&self.epk));
let input = D::outgoing_plaintext_bytes(&self.note, &self.esk);
(ock, input)
@ -322,21 +355,33 @@ impl<D: Domain> NoteEncryption<D> {
/// Trial decryption of the full note plaintext by the recipient.
///
/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ivk`.
/// If successful, the corresponding Sapling note and memo are returned, along with the
/// `PaymentAddress` to which the note was sent.
/// If successful, the corresponding note and memo are returned, along with the address to
/// which the note was sent.
///
/// Implements section 4.19.2 of the
/// [Zcash Protocol Specification](https://zips.z.cash/protocol/nu5.pdf#decryptivk)
/// [Zcash Protocol Specification](https://zips.z.cash/protocol/nu5.pdf#decryptivk).
pub fn try_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
domain: &D,
ivk: &D::IncomingViewingKey,
output: &Output,
) -> Option<(D::Note, D::Recipient, D::Memo)> {
let ephemeral_key = output.ephemeral_key();
let epk = D::epk(&ephemeral_key)?;
let shared_secret = D::ka_agree_dec(ivk, &epk);
let key = D::kdf(shared_secret, &ephemeral_key);
try_note_decryption_inner(domain, ivk, &ephemeral_key, output, key)
}
fn try_note_decryption_inner<D: Domain, Output: ShieldedOutput<D>>(
domain: &D,
ivk: &D::IncomingViewingKey,
ephemeral_key: &EphemeralKeyBytes,
output: &Output,
key: D::SymmetricKey,
) -> Option<(D::Note, D::Recipient, D::Memo)> {
assert_eq!(output.enc_ciphertext().len(), ENC_CIPHERTEXT_SIZE);
let shared_secret = D::ka_agree_dec(ivk, output.epk());
let key = D::kdf(shared_secret, &D::epk_bytes(output.epk()));
let mut plaintext = [0; ENC_CIPHERTEXT_SIZE];
assert_eq!(
ChachaPolyIetf::aead_cipher()
@ -354,7 +399,7 @@ pub fn try_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
let (note, to) = parse_note_plaintext_without_memo_ivk(
domain,
ivk,
output.epk(),
ephemeral_key,
&output.cmstar_bytes(),
&plaintext,
)?;
@ -366,13 +411,13 @@ pub fn try_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
fn parse_note_plaintext_without_memo_ivk<D: Domain>(
domain: &D,
ivk: &D::IncomingViewingKey,
epk: &D::EphemeralPublicKey,
ephemeral_key: &EphemeralKeyBytes,
cmstar_bytes: &D::ExtractedCommitmentBytes,
plaintext: &[u8],
) -> Option<(D::Note, D::Recipient)> {
let (note, to) = domain.parse_note_plaintext_without_memo_ivk(ivk, &plaintext)?;
if let NoteValidity::Valid = check_note_validity::<D>(&note, epk, cmstar_bytes) {
if let NoteValidity::Valid = check_note_validity::<D>(&note, ephemeral_key, cmstar_bytes) {
Some((note, to))
} else {
None
@ -381,16 +426,13 @@ fn parse_note_plaintext_without_memo_ivk<D: Domain>(
fn check_note_validity<D: Domain>(
note: &D::Note,
epk: &D::EphemeralPublicKey,
ephemeral_key: &EphemeralKeyBytes,
cmstar_bytes: &D::ExtractedCommitmentBytes,
) -> NoteValidity {
if D::ExtractedCommitmentBytes::try_from(D::cmstar(&note))
.map_or(false, |cs| &cs == cmstar_bytes)
{
let epk_bytes = D::epk_bytes(epk);
if &D::ExtractedCommitmentBytes::from(&D::cmstar(&note)) == cmstar_bytes {
D::check_epk_bytes(&note, |derived_esk| {
if D::epk_bytes(&D::ka_derive_public(&note, &derived_esk))
.ct_eq(&epk_bytes)
.ct_eq(&ephemeral_key)
.into()
{
NoteValidity::Valid
@ -407,8 +449,8 @@ fn check_note_validity<D: Domain>(
/// Trial decryption of the compact note plaintext by the recipient for light clients.
///
/// Attempts to decrypt and validate the first 52 bytes of `enc_ciphertext` using the
/// given `ivk`. If successful, the corresponding Sapling note is returned, along with the
/// `PaymentAddress` to which the note was sent.
/// given `ivk`. If successful, the corresponding note is returned, along with the address
/// to which the note was sent.
///
/// Implements the procedure specified in [`ZIP 307`].
///
@ -418,10 +460,23 @@ pub fn try_compact_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
ivk: &D::IncomingViewingKey,
output: &Output,
) -> Option<(D::Note, D::Recipient)> {
assert_eq!(output.enc_ciphertext().len(), COMPACT_NOTE_SIZE);
let ephemeral_key = output.ephemeral_key();
let shared_secret = D::ka_agree_dec(&ivk, output.epk());
let key = D::kdf(shared_secret, &D::epk_bytes(output.epk()));
let epk = D::epk(&ephemeral_key)?;
let shared_secret = D::ka_agree_dec(&ivk, &epk);
let key = D::kdf(shared_secret, &ephemeral_key);
try_compact_note_decryption_inner(domain, ivk, &ephemeral_key, output, key)
}
fn try_compact_note_decryption_inner<D: Domain, Output: ShieldedOutput<D>>(
domain: &D,
ivk: &D::IncomingViewingKey,
ephemeral_key: &EphemeralKeyBytes,
output: &Output,
key: D::SymmetricKey,
) -> Option<(D::Note, D::Recipient)> {
assert_eq!(output.enc_ciphertext().len(), COMPACT_NOTE_SIZE);
// Start from block 1 to skip over Poly1305 keying output
let mut plaintext = [0; COMPACT_NOTE_SIZE];
@ -431,21 +486,41 @@ pub fn try_compact_note_decryption<D: Domain, Output: ShieldedOutput<D>>(
parse_note_plaintext_without_memo_ivk(
domain,
ivk,
output.epk(),
ephemeral_key,
&output.cmstar_bytes(),
&plaintext,
)
}
/// Recovery of the full note plaintext by the sender.
///
/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ovk`.
/// If successful, the corresponding note and memo are returned, along with the address to
/// which the note was sent.
///
/// Implements [Zcash Protocol Specification section 4.19.3][decryptovk].
///
/// [decryptovk]: https://zips.z.cash/protocol/nu5.pdf#decryptovk
pub fn try_output_recovery_with_ovk<D: Domain, Output: ShieldedOutput<D>>(
domain: &D,
ovk: &D::OutgoingViewingKey,
output: &Output,
cv: &D::ValueCommitment,
out_ciphertext: &[u8],
) -> Option<(D::Note, D::Recipient, D::Memo)> {
let ock = D::derive_ock(ovk, &cv, &output.cmstar_bytes(), &output.ephemeral_key());
try_output_recovery_with_ock(domain, &ock, output, out_ciphertext)
}
/// Recovery of the full note plaintext by the sender.
///
/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ock`.
/// If successful, the corresponding Sapling note and memo are returned, along with the
/// `PaymentAddress` to which the note was sent.
/// If successful, the corresponding note and memo are returned, along with the address to
/// which the note was sent.
///
/// Implements part of section 4.19.3 of the
/// [Zcash Protocol Specification](https://zips.z.cash/protocol/nu5.pdf#decryptovk)
/// For decryption using a Full Viewing Key see [`try_sapling_output_recovery`].
/// [Zcash Protocol Specification](https://zips.z.cash/protocol/nu5.pdf#decryptovk).
/// For decryption using a Full Viewing Key see [`try_output_recovery_with_ovk`].
pub fn try_output_recovery_with_ock<D: Domain, Output: ShieldedOutput<D>>(
domain: &D,
ock: &OutgoingCipherKey,
@ -455,7 +530,7 @@ pub fn try_output_recovery_with_ock<D: Domain, Output: ShieldedOutput<D>>(
assert_eq!(output.enc_ciphertext().len(), ENC_CIPHERTEXT_SIZE);
assert_eq!(out_ciphertext.len(), OUT_CIPHERTEXT_SIZE);
let mut op = [0; OUT_CIPHERTEXT_SIZE];
let mut op = [0; OUT_PLAINTEXT_SIZE];
assert_eq!(
ChachaPolyIetf::aead_cipher()
.open_to(&mut op, &out_ciphertext, &[], ock.as_ref(), &[0u8; 12])
@ -466,11 +541,12 @@ pub fn try_output_recovery_with_ock<D: Domain, Output: ShieldedOutput<D>>(
let pk_d = D::extract_pk_d(&op)?;
let esk = D::extract_esk(&op)?;
let ephemeral_key = output.ephemeral_key();
let shared_secret = D::ka_agree_enc(&esk, &pk_d);
// The small-order point check at the point of output parsing rejects
// non-canonical encodings, so reencoding here for the KDF should
// be okay.
let key = D::kdf(shared_secret, &D::epk_bytes(output.epk()));
let key = D::kdf(shared_secret, &ephemeral_key);
let mut plaintext = [0; ENC_CIPHERTEXT_SIZE];
assert_eq!(
@ -487,11 +563,19 @@ pub fn try_output_recovery_with_ock<D: Domain, Output: ShieldedOutput<D>>(
);
let (note, to) =
domain.parse_note_plaintext_without_memo_ovk(&pk_d, &esk, output.epk(), &plaintext)?;
domain.parse_note_plaintext_without_memo_ovk(&pk_d, &esk, &ephemeral_key, &plaintext)?;
let memo = domain.extract_memo(&plaintext);
// ZIP 212: Check that the esk provided to this function is consistent with the esk we
// can derive from the note.
if let Some(derived_esk) = D::derive_esk(&note) {
if (!derived_esk.ct_eq(&esk)).into() {
return None;
}
}
if let NoteValidity::Valid =
check_note_validity::<D>(&note, output.epk(), &output.cmstar_bytes())
check_note_validity::<D>(&note, &ephemeral_key, &output.cmstar_bytes())
{
Some((note, to, memo))
} else {

View File

@ -8,6 +8,12 @@ and this library adheres to Rust's notion of
## [Unreleased]
### Changed
- MSRV is now 1.51.0.
- `epk: jubjub::ExtendedPoint` has been replaced by
`ephemeral_key: zcash_note_encryption::EphemeralKeyBytes` in various places:
- `zcash_client_backend::wallet::WalletShieldedOutput`: the `epk` field has
been replaced by `ephemeral_key`.
- `zcash_client_backend::proto::compact_formats::CompactOutput`: the `epk`
method has been replaced by `ephemeral_key`.
- Renamed the following in `zcash_client_backend::data_api` to use lower-case
abbreviations (matching Rust naming conventions):
- `error::Error::InvalidExtSK` to `Error::InvalidExtSk`

View File

@ -14,20 +14,20 @@ edition = "2018"
[dependencies]
bech32 = "0.8"
bls12_381 = "0.3.1"
bls12_381 = "0.5"
bs58 = { version = "0.4", features = ["check"] }
base64 = "0.13"
ff = "0.8"
group = "0.8"
ff = "0.10"
group = "0.10"
hex = "0.4"
hdwallet = { version = "0.3.0", optional = true }
jubjub = "0.5.1"
jubjub = "0.7"
log = "0.4"
nom = "6.1"
percent-encoding = "2.1.0"
proptest = { version = "0.10.1", optional = true }
proptest = { version = "1.0.0", optional = true }
protobuf = "2.20"
rand_core = "0.5.1"
rand_core = "0.6"
ripemd160 = { version = "0.9.1", optional = true }
secp256k1 = { version = "0.19", optional = true }
sha2 = { version = "0.9", optional = true }
@ -41,8 +41,7 @@ protobuf-codegen-pure = "2.20"
[dev-dependencies]
gumdrop = "0.8"
rand_core = "0.5.1"
rand_xorshift = "0.2"
rand_xorshift = "0.3"
tempfile = "3.1.0"
zcash_client_sqlite = { version = "0.3", path = "../zcash_client_sqlite" }
zcash_proofs = { version = "0.5", path = "../zcash_proofs" }
@ -51,5 +50,8 @@ zcash_proofs = { version = "0.5", path = "../zcash_proofs" }
transparent-inputs = ["ripemd160", "hdwallet", "sha2", "secp256k1"]
test-dependencies = ["proptest", "zcash_primitives/test-dependencies", "hdwallet", "sha2"]
[lib]
bench = false
[badges]
maintenance = { status = "actively-developed" }

View File

@ -8,6 +8,7 @@ use crate::encoding::{
};
/// An address that funds can be sent to.
// TODO: rename to ParsedAddress
#[derive(Debug, PartialEq, Clone)]
pub enum RecipientAddress {
Shielded(PaymentAddress),

View File

@ -452,14 +452,14 @@ pub mod testing {
_received_tx: &DecryptedTransaction,
_nullifiers: &[(AccountId, Nullifier)],
) -> Result<Self::TxRef, Self::Error> {
Ok(TxId([0u8; 32]))
Ok(TxId::from_bytes([0u8; 32]))
}
fn store_sent_tx(
&mut self,
_sent_tx: &SentTransaction,
) -> Result<Self::TxRef, Self::Error> {
Ok(TxId([0u8; 32]))
Ok(TxId::from_bytes([0u8; 32]))
}
fn rewind_to_height(&mut self, _block_height: BlockHeight) -> Result<(), Self::Error> {

View File

@ -24,6 +24,9 @@ pub enum ChainInvalid {
#[derive(Debug)]
pub enum Error<NoteId> {
/// The amount specified exceeds the allowed range.
InvalidAmount,
/// Unable to create a new spend because the wallet balance is not sufficient.
/// The first argument is the amount available, the second is the amount needed
/// to construct a valid transaction.
@ -80,6 +83,10 @@ impl ChainInvalid {
impl<N: fmt::Display> fmt::Display for Error<N> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self {
Error::InvalidAmount => write!(
f,
"The value lies outside the valid range of Zcash amounts."
),
Error::InsufficientBalance(have, need) => write!(
f,
"Insufficient balance (have {}, need {} including fee)",

View File

@ -1,6 +1,6 @@
use std::fmt::Debug;
use zcash_primitives::{
consensus::{self, BranchId, NetworkUpgrade},
consensus::{self, NetworkUpgrade},
memo::MemoBytes,
sapling::prover::TxProver,
transaction::{
@ -58,7 +58,7 @@ where
let sapling_outputs = decrypt_transaction(params, height, tx, &extfvks);
if !(sapling_outputs.is_empty() && tx.vout.is_empty()) {
if !(sapling_outputs.is_empty() && tx.transparent_bundle().iter().all(|b| b.vout.is_empty())) {
let nullifiers = data.get_all_nullifiers()?;
data.store_decrypted_tx(
&DecryptedTransaction {
@ -224,12 +224,22 @@ where
.get_target_and_anchor_heights()
.and_then(|x| x.ok_or_else(|| Error::ScanRequired.into()))?;
let target_value = request.payments.iter().map(|p| p.amount).sum::<Amount>() + DEFAULT_FEE;
let value = request
.payments
.iter()
.map(|p| p.amount)
.sum::<Option<Amount>>()
.ok_or_else(|| E::from(Error::InvalidAmount))?;
let target_value = (value + DEFAULT_FEE).ok_or_else(|| E::from(Error::InvalidAmount))?;
let spendable_notes =
wallet_db.select_unspent_sapling_notes(account, target_value, anchor_height)?;
// Confirm we were able to select sufficient value
let selected_value = spendable_notes.iter().map(|n| n.note_value).sum();
let selected_value = spendable_notes
.iter()
.map(|n| n.note_value)
.sum::<Option<_>>()
.ok_or_else(|| E::from(Error::InvalidAmount))?;
if selected_value < target_value {
return Err(E::from(Error::InsufficientBalance(
selected_value,
@ -238,7 +248,6 @@ where
}
// Create the transaction
let consensus_branch_id = BranchId::for_height(params, height);
let mut builder = Builder::new(params.clone(), height);
for selected in spendable_notes {
let from = extfvk
@ -280,9 +289,7 @@ where
}?
}
let (tx, tx_metadata) = builder
.build(consensus_branch_id, &prover)
.map_err(Error::Builder)?;
let (tx, tx_metadata) = builder.build(&prover).map_err(Error::Builder)?;
let sent_outputs = request.payments.iter().enumerate().map(|(i, payment)| {
let idx = match &payment.recipient_address {
@ -291,10 +298,13 @@ where
tx_metadata.output_index(i).expect("An output should exist in the transaction for each shielded payment."),
RecipientAddress::Transparent(addr) => {
let script = addr.script();
tx.vout
.iter()
.enumerate()
.find(|(_, tx_out)| tx_out.script_pubkey == script)
tx.transparent_bundle()
.and_then(|b| {
b.vout
.iter()
.enumerate()
.find(|(_, tx_out)| tx_out.script_pubkey == script)
})
.map(|(index, _)| index)
.expect("An output should exist in the transaction for each transparent payment.")
}
@ -351,14 +361,18 @@ where
// get UTXOs from DB
let utxos = wallet_db.get_unspent_transparent_utxos(&taddr, latest_anchor - confirmations)?;
let total_amount = utxos.iter().map(|utxo| utxo.value).sum::<Amount>();
let total_amount = utxos
.iter()
.map(|utxo| utxo.value)
.sum::<Option<Amount>>()
.ok_or_else(|| E::from(Error::InvalidAmount))?;
let fee = DEFAULT_FEE;
if fee >= total_amount {
return Err(E::from(Error::InsufficientBalance(total_amount, fee)));
}
let amount_to_shield = total_amount - fee;
let amount_to_shield = (total_amount - fee).ok_or_else(|| E::from(Error::InvalidAmount))?;
let mut builder = Builder::new(params.clone(), latest_scanned_height);
@ -384,11 +398,7 @@ where
.add_sapling_output(Some(ovk), z_address.clone(), amount_to_shield, memo.clone())
.map_err(Error::Builder)?;
let consensus_branch_id = BranchId::for_height(params, latest_anchor);
let (tx, tx_metadata) = builder
.build(consensus_branch_id, &prover)
.map_err(Error::Builder)?;
let (tx, tx_metadata) = builder.build(&prover).map_err(Error::Builder)?;
let output_index = tx_metadata.output_index(0).expect(
"No sapling note was created in autoshielding transaction. This is a programming error.",
);

View File

@ -44,27 +44,30 @@ pub fn decrypt_transaction<P: consensus::Parameters>(
) -> Vec<DecryptedOutput> {
let mut decrypted = vec![];
for (account, extfvk) in extfvks.iter() {
let ivk = extfvk.fvk.vk.ivk();
let ovk = extfvk.fvk.ovk;
if let Some(bundle) = tx.sapling_bundle() {
for (account, extfvk) in extfvks.iter() {
let ivk = extfvk.fvk.vk.ivk();
let ovk = extfvk.fvk.ovk;
for (index, output) in tx.shielded_outputs.iter().enumerate() {
let ((note, to, memo), outgoing) =
match try_sapling_note_decryption(params, height, &ivk, output) {
Some(ret) => (ret, false),
None => match try_sapling_output_recovery(params, height, &ovk, output) {
Some(ret) => (ret, true),
None => continue,
},
};
decrypted.push(DecryptedOutput {
index,
note,
account: *account,
to,
memo,
outgoing,
})
for (index, output) in bundle.shielded_outputs.iter().enumerate() {
let ((note, to, memo), outgoing) =
match try_sapling_note_decryption(params, height, &ivk, output) {
Some(ret) => (ret, false),
None => match try_sapling_output_recovery(params, height, &ovk, output) {
Some(ret) => (ret, true),
None => continue,
},
};
decrypted.push(DecryptedOutput {
index,
note,
account: *account,
to,
memo,
outgoing,
})
}
}
}

View File

@ -1,17 +1,19 @@
//! Generated code for handling light client protobuf structs.
use ff::PrimeField;
use group::GroupEncoding;
use std::convert::{TryFrom, TryInto};
use zcash_primitives::{
block::{BlockHash, BlockHeader},
consensus::BlockHeight,
sapling::Nullifier,
transaction::components::sapling::{CompactOutputDescription, OutputDescription},
transaction::{
components::sapling::{self, CompactOutputDescription, OutputDescription},
TxId,
},
};
use zcash_note_encryption::COMPACT_NOTE_SIZE;
use zcash_note_encryption::{EphemeralKeyBytes, COMPACT_NOTE_SIZE};
pub mod compact_formats;
@ -74,6 +76,15 @@ impl compact_formats::CompactBlock {
}
}
impl compact_formats::CompactTx {
/// Returns the transaction Id
pub fn txid(&self) -> TxId {
let mut hash = [0u8; 32];
hash.copy_from_slice(&self.hash);
TxId::from_bytes(hash)
}
}
impl compact_formats::CompactOutput {
/// Returns the note commitment for this output.
///
@ -91,21 +102,19 @@ impl compact_formats::CompactOutput {
/// A convenience method that parses [`CompactOutput.epk`].
///
/// [`CompactOutput.epk`]: #structfield.epk
pub fn epk(&self) -> Result<jubjub::ExtendedPoint, ()> {
let p = jubjub::ExtendedPoint::from_bytes(&self.epk[..].try_into().map_err(|_| ())?);
if p.is_some().into() {
Ok(p.unwrap())
} else {
Err(())
}
pub fn ephemeral_key(&self) -> Result<EphemeralKeyBytes, ()> {
self.epk[..]
.try_into()
.map(EphemeralKeyBytes)
.map_err(|_| ())
}
}
impl From<OutputDescription> for compact_formats::CompactOutput {
fn from(out: OutputDescription) -> compact_formats::CompactOutput {
impl<A: sapling::Authorization> From<OutputDescription<A>> for compact_formats::CompactOutput {
fn from(out: OutputDescription<A>) -> compact_formats::CompactOutput {
let mut result = compact_formats::CompactOutput::new();
result.set_cmu(out.cmu.to_repr().to_vec());
result.set_epk(out.ephemeral_key.to_bytes().to_vec());
result.set_epk(out.ephemeral_key.as_ref().to_vec());
result.set_ciphertext(out.enc_ciphertext[..COMPACT_NOTE_SIZE].to_vec());
result
}
@ -117,7 +126,7 @@ impl TryFrom<compact_formats::CompactOutput> for CompactOutputDescription {
fn try_from(value: compact_formats::CompactOutput) -> Result<Self, Self::Error> {
Ok(CompactOutputDescription {
cmu: value.cmu()?,
epk: value.epk()?,
ephemeral_key: value.ephemeral_key()?,
enc_ciphertext: value.ciphertext,
})
}

View File

@ -3,6 +3,7 @@
use subtle::{Choice, ConditionallySelectable};
use zcash_note_encryption::EphemeralKeyBytes;
use zcash_primitives::{
merkle_tree::IncrementalWitness,
sapling::{
@ -68,7 +69,7 @@ pub struct WalletShieldedSpend {
pub struct WalletShieldedOutput<N> {
pub index: usize,
pub cmu: bls12_381::Scalar,
pub epk: jubjub::ExtendedPoint,
pub ephemeral_key: EphemeralKeyBytes,
pub account: AccountId,
pub note: Note,
pub to: PaymentAddress,

View File

@ -12,7 +12,7 @@ use zcash_primitives::{
note_encryption::{try_sapling_compact_note_decryption, SaplingDomain},
Node, Note, Nullifier, PaymentAddress, SaplingIvk,
},
transaction::{components::sapling::CompactOutputDescription, TxId},
transaction::components::sapling::CompactOutputDescription,
zip32::ExtendedFullViewingKey,
};
@ -32,7 +32,8 @@ use crate::wallet::{AccountId, WalletShieldedOutput, WalletShieldedSpend, Wallet
fn scan_output<P: consensus::Parameters, K: ScanningKey>(
params: &P,
height: BlockHeight,
(index, output): (usize, CompactOutput),
index: usize,
output: CompactOutput,
vks: &[(&AccountId, &K)],
spent_from_accounts: &HashSet<AccountId>,
tree: &mut CommitmentTree<Node>,
@ -75,7 +76,7 @@ fn scan_output<P: consensus::Parameters, K: ScanningKey>(
return Some(WalletShieldedOutput {
index,
cmu: output.cmu,
epk: output.epk,
ephemeral_key: output.ephemeral_key,
account: **account,
note,
to,
@ -198,6 +199,8 @@ pub fn scan_block<P: consensus::Parameters, K: ScanningKey>(
let block_height = block.height();
for tx in block.vtx.into_iter() {
let txid = tx.txid();
let index = tx.index as usize;
let num_spends = tx.spends.len();
let num_outputs = tx.outputs.len();
@ -249,7 +252,7 @@ pub fn scan_block<P: consensus::Parameters, K: ScanningKey>(
})
.collect();
for to_scan in tx.outputs.into_iter().enumerate() {
for (idx, c_out) in tx.outputs.into_iter().enumerate() {
// Grab mutable references to new witnesses from previous outputs
// in this transaction so that we can update them. Scoped so we
// don't hold mutable references to shielded_outputs for too long.
@ -261,7 +264,8 @@ pub fn scan_block<P: consensus::Parameters, K: ScanningKey>(
if let Some(output) = scan_output(
params,
block_height,
to_scan,
idx,
c_out,
vks,
&spent_from_accounts,
tree,
@ -275,11 +279,9 @@ pub fn scan_block<P: consensus::Parameters, K: ScanningKey>(
}
if !(shielded_spends.is_empty() && shielded_outputs.is_empty()) {
let mut txid = TxId([0u8; 32]);
txid.0.copy_from_slice(&tx.hash);
wtxs.push(WalletTx {
txid,
index: tx.index as usize,
index,
num_spends,
num_outputs,
shielded_spends,

View File

@ -15,11 +15,11 @@ edition = "2018"
[dependencies]
bech32 = "0.8"
bs58 = { version = "0.4", features = ["check"] }
ff = "0.8"
group = "0.8"
jubjub = "0.5.1"
ff = "0.10"
group = "0.10"
jubjub = "0.7"
protobuf = "2.20"
rand_core = "0.5.1"
rand_core = "0.6"
rusqlite = { version = "0.24", features = ["bundled", "time"] }
secp256k1 = { version = "0.19" }
time = "0.2"
@ -27,10 +27,12 @@ zcash_client_backend = { version = "0.5", path = "../zcash_client_backend", feat
zcash_primitives = { version = "0.5", path = "../zcash_primitives", features = ["transparent-inputs"] }
[dev-dependencies]
rand_core = "0.5.1"
tempfile = "3"
zcash_proofs = { version = "0.5", path = "../zcash_proofs" }
[features]
mainnet = []
test-dependencies = ["zcash_client_backend/test-dependencies"]
[lib]
bench = false

View File

@ -344,13 +344,19 @@ mod tests {
scan_cached_blocks(&tests::network(), &db_cache, &mut db_write, None).unwrap();
// Account balance should reflect both received notes
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value + value2);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value + value2).unwrap()
);
// "Rewind" to height of last scanned block
rewind_to_height(&db_data, sapling_activation_height() + 1).unwrap();
// Account balance should be unaltered
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value + value2);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value + value2).unwrap()
);
// Rewind so that one block is dropped
rewind_to_height(&db_data, sapling_activation_height()).unwrap();
@ -362,7 +368,10 @@ mod tests {
scan_cached_blocks(&tests::network(), &db_cache, &mut db_write, None).unwrap();
// Account balance should again reflect both received notes
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value + value2);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value + value2).unwrap()
);
}
#[test]
@ -467,7 +476,10 @@ mod tests {
scan_cached_blocks(&tests::network(), &db_cache, &mut db_write, None).unwrap();
// Account balance should reflect both received notes
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value + value2);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value + value2).unwrap()
);
}
#[test]
@ -523,6 +535,9 @@ mod tests {
scan_cached_blocks(&tests::network(), &db_cache, &mut db_write, None).unwrap();
// Account balance should equal the change
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value - value2);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value - value2).unwrap()
);
}
}

View File

@ -563,16 +563,18 @@ impl<'a, P: consensus::Parameters> WalletWrite for DataConnStmtCache<'a, P> {
// if we have some transparent outputs yet no shielded outputs, then this is t2t and we
// can safely ignore it otherwise, this is z2t and it might have originated from our
// wallet
if !d_tx.tx.vout.is_empty() {
if !d_tx.tx.transparent_bundle().iter().any(|b| b.vout.is_empty()) {
// store received z->t transactions in the same way they would be stored by
// create_spend_to_address If there are any of our shielded inputs, we interpret this
// as our z->t tx and store the vouts as our sent notes.
// FIXME this is a weird heuristic that is bound to trip us up somewhere.
if let Some((account_id, _)) = nullifiers.iter().find(|(_, nf)|
d_tx.tx.shielded_spends.iter().any(|input| *nf == input.nullifier)
if let Some((account_id, _)) = nullifiers.iter().find(
|(_, nf)|
d_tx.tx.sapling_bundle().iter().flat_map(|b| b.shielded_spends.iter())
.any(|input| *nf == input.nullifier)
) {
for (output_index, txout) in d_tx.tx.vout.iter().enumerate() {
for (output_index, txout) in d_tx.tx.transparent_bundle().iter().flat_map(|b| b.vout.iter()).enumerate() {
wallet::put_sent_utxo(
up,
tx_ref,
@ -601,8 +603,10 @@ impl<'a, P: consensus::Parameters> WalletWrite for DataConnStmtCache<'a, P> {
//
// Assumes that create_spend_to_address() will never be called in parallel, which is a
// reasonable assumption for a light client such as a mobile phone.
for spend in &sent_tx.tx.shielded_spends {
wallet::mark_sapling_note_spent(up, tx_ref, &spend.nullifier)?;
if let Some(bundle) = sent_tx.tx.sapling_bundle() {
for spend in &bundle.shielded_spends {
wallet::mark_sapling_note_spent(up, tx_ref, &spend.nullifier)?;
}
}
for utxo_outpoint in &sent_tx.utxos_spent {
@ -855,7 +859,7 @@ mod tests {
let note = Note {
g_d: change_addr.diversifier().g_d().unwrap(),
pk_d: *change_addr.pk_d(),
value: (in_value - value).into(),
value: (in_value - value).unwrap().into(),
rseed,
};
let encryptor = sapling_note_encryption::<_, Network>(

View File

@ -14,7 +14,7 @@ use std::convert::TryFrom;
use zcash_primitives::{
block::BlockHash,
consensus::{self, BlockHeight, NetworkUpgrade},
consensus::{self, BlockHeight, BranchId, NetworkUpgrade, Parameters},
memo::{Memo, MemoBytes},
merkle_tree::{CommitmentTree, IncrementalWitness},
sapling::{Node, Note, Nullifier, PaymentAddress},
@ -311,15 +311,25 @@ pub fn get_received_memo<P>(wdb: &WalletDb<P>, id_note: i64) -> Result<Memo, Sql
.map_err(SqliteClientError::from)
}
pub fn get_transaction<P>(wdb: &WalletDb<P>, id_tx: i64) -> Result<Transaction, SqliteClientError> {
let tx_bytes: Vec<_> = wdb.conn.query_row(
"SELECT raw FROM transactions
pub fn get_transaction<P: Parameters>(
wdb: &WalletDb<P>,
id_tx: i64,
) -> Result<Transaction, SqliteClientError> {
let (tx_bytes, block_height): (Vec<_>, BlockHeight) = wdb.conn.query_row(
"SELECT raw, block FROM transactions
WHERE id_tx = ?",
&[id_tx],
|row| row.get(0),
|row| {
let h: u32 = row.get(1)?;
Ok((row.get(0)?, BlockHeight::from(h)))
},
)?;
Transaction::read(&tx_bytes[..]).map_err(SqliteClientError::from)
Transaction::read(
&tx_bytes[..],
BranchId::for_height(&wdb.params, block_height),
)
.map_err(SqliteClientError::from)
}
/// Returns the memo for a sent note.
@ -408,7 +418,7 @@ pub fn block_height_extrema<P>(
///
/// let data_file = NamedTempFile::new().unwrap();
/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();
/// let height = get_tx_height(&db, TxId([0u8; 32]));
/// let height = get_tx_height(&db, TxId::from_bytes([0u8; 32]));
/// ```
pub fn get_tx_height<P>(
wdb: &WalletDb<P>,
@ -417,7 +427,7 @@ pub fn get_tx_height<P>(
wdb.conn
.query_row(
"SELECT block FROM transactions WHERE txid = ?",
&[txid.0.to_vec()],
&[txid.as_ref().to_vec()],
|row| row.get(0).map(u32::into),
)
.optional()
@ -772,7 +782,7 @@ pub fn put_tx_meta<'a, P, N>(
tx: &WalletTx<N>,
height: BlockHeight,
) -> Result<i64, SqliteClientError> {
let txid = tx.txid.0.to_vec();
let txid = tx.txid.as_ref().to_vec();
if stmts
.stmt_update_tx_meta
.execute(params![u32::from(height), (tx.index as i64), txid])?
@ -799,21 +809,21 @@ pub fn put_tx_data<'a, P>(
tx: &Transaction,
created_at: Option<time::OffsetDateTime>,
) -> Result<i64, SqliteClientError> {
let txid = tx.txid().0.to_vec();
let txid = tx.txid().as_ref().to_vec();
let mut raw_tx = vec![];
tx.write(&mut raw_tx)?;
if stmts
.stmt_update_tx_data
.execute(params![u32::from(tx.expiry_height), raw_tx, txid,])?
.execute(params![u32::from(tx.expiry_height()), raw_tx, txid,])?
== 0
{
// It isn't there, so insert our transaction into the database.
stmts.stmt_insert_tx_data.execute(params![
txid,
created_at,
u32::from(tx.expiry_height),
u32::from(tx.expiry_height()),
raw_tx
])?;

View File

@ -68,9 +68,9 @@ pub fn get_unspent_sapling_notes<P>(
"SELECT diversifier, value, rcm, witness
FROM received_notes
INNER JOIN transactions ON transactions.id_tx = received_notes.tx
INNER JOIN sapling_witnesses ON sapling_witnesses.note = received_notes.id_note
WHERE account = :account
AND spent IS NULL
INNER JOIN sapling_witnesses ON sapling_witnesses.note = received_notes.id_note
WHERE account = :account
AND spent IS NULL
AND transactions.block <= :anchor_height
AND sapling_witnesses.block = :anchor_height",
)?;
@ -153,7 +153,7 @@ mod tests {
use zcash_primitives::{
block::BlockHash,
consensus::BlockHeight,
consensus::{BlockHeight, BranchId},
legacy::TransparentAddress,
sapling::{note_encryption::try_sapling_output_recovery, prover::TxProver},
transaction::{components::Amount, Transaction},
@ -367,7 +367,10 @@ mod tests {
// Verified balance does not include the second note
let (_, anchor_height2) = (&db_data).get_target_and_anchor_heights().unwrap().unwrap();
assert_eq!(get_balance(&db_data, AccountId(0)).unwrap(), value + value);
assert_eq!(
get_balance(&db_data, AccountId(0)).unwrap(),
(value + value).unwrap()
);
assert_eq!(
get_balance_at(&db_data, AccountId(0), anchor_height2).unwrap(),
value
@ -627,7 +630,7 @@ mod tests {
|row| row.get(0),
)
.unwrap();
let tx = Transaction::read(&raw_tx[..]).unwrap();
let tx = Transaction::read(&raw_tx[..], BranchId::Canopy).unwrap();
// Fetch the output index from the database
let output_index: i64 = db_write
@ -641,7 +644,7 @@ mod tests {
)
.unwrap();
let output = &tx.shielded_outputs[output_index as usize];
let output = &tx.sapling_bundle().unwrap().shielded_outputs[output_index as usize];
try_sapling_output_recovery(
&network,

View File

@ -10,10 +10,18 @@ edition = "2018"
[dependencies]
blake2b_simd = "0.5"
zcash_primitives = { version = "0.5", path = "../zcash_primitives", features = ["zfuture"] }
zcash_primitives = { version = "0.5", path = "../zcash_primitives", features = ["zfuture" ] }
[dev-dependencies]
ff = "0.8"
jubjub = "0.5.1"
rand_core = "0.5.1"
ff = "0.10"
jubjub = "0.7"
orchard = "0.0"
rand_core = "0.6"
zcash_proofs = { version = "0.5", path = "../zcash_proofs" }
secp256k1 = { version = "0.20", features = ["rand", "bitcoin_hashes"] }
[features]
transparent-inputs = []
[lib]
bench = false

View File

@ -2,8 +2,10 @@
use std::convert::TryFrom;
use zcash_primitives::consensus::{BlockHeight, BranchId};
use zcash_primitives::extensions::transparent::{Error, Extension, Precondition, Witness};
use zcash_primitives::transaction::{components::TzeOut, Transaction};
use zcash_primitives::extensions::transparent::{
AuthData, Error, Extension, Precondition, Witness,
};
use zcash_primitives::transaction::{components::tze::TzeOut, Transaction};
use crate::transparent::demo;
@ -67,7 +69,7 @@ pub trait Epoch {
fn verify<'a>(
&self,
precondition: &Precondition,
witness: &Witness,
witness: &Witness<AuthData>,
ctx: &Context<'a>,
) -> Result<(), Error<Self::Error>>;
}
@ -76,15 +78,18 @@ pub trait Epoch {
/// by the context.
impl<'a> demo::Context for Context<'a> {
fn is_tze_only(&self) -> bool {
self.tx.vin.is_empty()
&& self.tx.vout.is_empty()
&& self.tx.shielded_spends.is_empty()
&& self.tx.shielded_outputs.is_empty()
&& self.tx.joinsplits.is_empty()
self.tx.transparent_bundle().is_none()
&& self.tx.sapling_bundle().is_none()
&& self.tx.sprout_bundle().is_none()
&& self.tx.orchard_bundle().is_none()
}
fn tx_tze_outputs(&self) -> &[TzeOut] {
&self.tx.tze_outputs
if let Some(bundle) = self.tx.tze_bundle() {
&bundle.vout
} else {
&[]
}
}
}
@ -98,7 +103,7 @@ impl Epoch for EpochVTest {
fn verify<'a>(
&self,
precondition: &Precondition,
witness: &Witness,
witness: &Witness<AuthData>,
ctx: &Context<'a>,
) -> Result<(), Error<Self::Error>> {
let ext_id = ExtensionId::try_from(precondition.extension_id)

View File

@ -21,12 +21,16 @@
use std::convert::TryFrom;
use std::convert::TryInto;
use std::fmt;
use std::ops::{Deref, DerefMut};
use blake2b_simd::Params;
use zcash_primitives::{
extensions::transparent::{Extension, ExtensionTxBuilder, FromPayload, ToPayload},
transaction::components::{amount::Amount, TzeOut, TzeOutPoint},
transaction::components::{
amount::Amount,
tze::{OutPoint, TzeOut},
},
};
/// Types and constants used for Mode 0 (open a channel)
@ -336,6 +340,20 @@ pub struct DemoBuilder<B> {
pub extension_id: u32,
}
impl<B> Deref for DemoBuilder<B> {
type Target = B;
fn deref(&self) -> &Self::Target {
&self.txn_builder
}
}
impl<B> DerefMut for DemoBuilder<B> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.txn_builder
}
}
/// Errors that can occur in construction of transactions using `DemoBuilder`.
#[derive(Debug)]
pub enum DemoBuildError<E> {
@ -356,7 +374,7 @@ pub enum DemoBuildError<E> {
/// Convenience methods for use with [`zcash_primitives::transaction::builder::Builder`]
/// for constructing transactions that utilize the features of the demo extension.
impl<'a, B: ExtensionTxBuilder<'a>> DemoBuilder<&mut B> {
impl<'a, B: ExtensionTxBuilder<'a>> DemoBuilder<B> {
/// Add a channel-opening precondition to the outputs of the transaction under
/// construction.
pub fn demo_open(
@ -374,7 +392,7 @@ impl<'a, B: ExtensionTxBuilder<'a>> DemoBuilder<&mut B> {
/// precondition to the transaction under construction.
pub fn demo_transfer_to_close(
&mut self,
prevout: (TzeOutPoint, TzeOut),
prevout: (OutPoint, TzeOut),
transfer_amount: Amount,
preimage_1: [u8; 32],
hash_2: [u8; 32],
@ -416,7 +434,7 @@ impl<'a, B: ExtensionTxBuilder<'a>> DemoBuilder<&mut B> {
/// Add a channel-closing witness to the transaction under construction.
pub fn demo_close(
&mut self,
prevout: (TzeOutPoint, TzeOut),
prevout: (OutPoint, TzeOut),
preimage_2: [u8; 32],
) -> Result<(), DemoBuildError<B::BuildError>> {
let hash_2 = {
@ -467,25 +485,65 @@ mod tests {
use zcash_proofs::prover::LocalTxProver;
use zcash_primitives::{
consensus::{BranchId, H0, TEST_NETWORK},
consensus::{BlockHeight, BranchId, NetworkUpgrade, Parameters},
constants,
extensions::transparent::{self as tze, Extension, FromPayload, ToPayload},
legacy::TransparentAddress,
merkle_tree::{CommitmentTree, IncrementalWitness},
sapling::Node,
sapling::Rseed,
sapling::{Node, Rseed},
transaction::{
builder::Builder,
components::{
amount::{Amount, DEFAULT_FEE},
TzeIn, TzeOut, TzeOutPoint,
tze::{Authorized, Bundle, OutPoint, TzeIn, TzeOut},
},
Transaction, TransactionData,
Transaction, TransactionData, TxVersion,
},
zip32::ExtendedSpendingKey,
};
use super::{close, hash_1, open, Context, DemoBuilder, Precondition, Program, Witness};
#[derive(PartialEq, Copy, Clone, Debug)]
struct FutureNetwork;
impl Parameters for FutureNetwork {
fn activation_height(&self, nu: NetworkUpgrade) -> Option<BlockHeight> {
match nu {
NetworkUpgrade::Overwinter => Some(BlockHeight::from_u32(207_500)),
NetworkUpgrade::Sapling => Some(BlockHeight::from_u32(280_000)),
NetworkUpgrade::Blossom => Some(BlockHeight::from_u32(584_000)),
NetworkUpgrade::Heartwood => Some(BlockHeight::from_u32(903_800)),
NetworkUpgrade::Canopy => Some(BlockHeight::from_u32(1_028_500)),
NetworkUpgrade::Nu5 => Some(BlockHeight::from_u32(1_200_000)),
NetworkUpgrade::ZFuture => Some(BlockHeight::from_u32(1_400_000)),
}
}
fn coin_type(&self) -> u32 {
constants::testnet::COIN_TYPE
}
fn hrp_sapling_extended_spending_key(&self) -> &str {
constants::testnet::HRP_SAPLING_EXTENDED_SPENDING_KEY
}
fn hrp_sapling_extended_full_viewing_key(&self) -> &str {
constants::testnet::HRP_SAPLING_EXTENDED_FULL_VIEWING_KEY
}
fn hrp_sapling_payment_address(&self) -> &str {
constants::testnet::HRP_SAPLING_PAYMENT_ADDRESS
}
fn b58_pubkey_address_prefix(&self) -> [u8; 2] {
constants::testnet::B58_PUBKEY_ADDRESS_PREFIX
}
fn b58_script_address_prefix(&self) -> [u8; 2] {
constants::testnet::B58_SCRIPT_ADDRESS_PREFIX
}
}
fn demo_hashes(preimage_1: &[u8; 32], preimage_2: &[u8; 32]) -> ([u8; 32], [u8; 32]) {
let hash_2 = {
let mut hash = [0; 32];
@ -563,15 +621,24 @@ mod tests {
/// by the context.
impl<'a> Context for Ctx<'a> {
fn is_tze_only(&self) -> bool {
self.tx.vin.is_empty()
&& self.tx.vout.is_empty()
&& self.tx.shielded_spends.is_empty()
&& self.tx.shielded_outputs.is_empty()
&& self.tx.joinsplits.is_empty()
self.tx.transparent_bundle().is_none()
&& self.tx.sapling_bundle().is_none()
&& self.tx.sprout_bundle().is_none()
&& self.tx.orchard_bundle().is_none()
}
fn tx_tze_outputs(&self) -> &[TzeOut] {
&self.tx.tze_outputs
match self.tx.tze_bundle() {
Some(b) => &b.vout,
None => &[],
}
}
}
fn demo_builder<'a>(height: BlockHeight) -> DemoBuilder<Builder<'a, FutureNetwork, OsRng>> {
DemoBuilder {
txn_builder: Builder::new(FutureNetwork, height),
extension_id: 0,
}
}
@ -616,47 +683,89 @@ mod tests {
precondition: tze::Precondition::from(0, &Precondition::open(hash_1)),
};
let mut mtx_a = TransactionData::zfuture();
mtx_a.tze_outputs.push(out_a);
let tx_a = mtx_a.freeze().unwrap();
let tx_a = TransactionData::from_parts(
TxVersion::ZFuture,
BranchId::ZFuture,
0,
0u32.into(),
None,
None,
None,
None,
Some(Bundle {
vin: vec![],
vout: vec![out_a],
authorization: Authorized,
}),
)
.freeze()
.unwrap();
//
// Transfer
//
let in_b = TzeIn {
prevout: TzeOutPoint::new(tx_a.txid().0, 0),
prevout: OutPoint::new(tx_a.txid(), 0),
witness: tze::Witness::from(0, &Witness::open(preimage_1)),
};
let out_b = TzeOut {
value: Amount::from_u64(1).unwrap(),
precondition: tze::Precondition::from(0, &Precondition::close(hash_2)),
};
let mut mtx_b = TransactionData::zfuture();
mtx_b.tze_inputs.push(in_b);
mtx_b.tze_outputs.push(out_b);
let tx_b = mtx_b.freeze().unwrap();
let tx_b = TransactionData::from_parts(
TxVersion::ZFuture,
BranchId::ZFuture,
0,
0u32.into(),
None,
None,
None,
None,
Some(Bundle {
vin: vec![in_b],
vout: vec![out_b],
authorization: Authorized,
}),
)
.freeze()
.unwrap();
//
// Closing transaction
//
let in_c = TzeIn {
prevout: TzeOutPoint::new(tx_b.txid().0, 0),
prevout: OutPoint::new(tx_b.txid(), 0),
witness: tze::Witness::from(0, &Witness::close(preimage_2)),
};
let mut mtx_c = TransactionData::zfuture();
mtx_c.tze_inputs.push(in_c);
let tx_c = mtx_c.freeze().unwrap();
let tx_c = TransactionData::from_parts(
TxVersion::ZFuture,
BranchId::ZFuture,
0,
0u32.into(),
None,
None,
None,
None,
Some(Bundle {
vin: vec![in_c],
vout: vec![],
authorization: Authorized,
}),
)
.freeze()
.unwrap();
// Verify tx_b
{
let ctx = Ctx { tx: &tx_b };
assert_eq!(
Program.verify(
&tx_a.tze_outputs[0].precondition,
&tx_b.tze_inputs[0].witness,
&tx_a.tze_bundle().unwrap().vout[0].precondition,
&tx_b.tze_bundle().unwrap().vin[0].witness,
&ctx
),
Ok(())
@ -668,8 +777,8 @@ mod tests {
let ctx = Ctx { tx: &tx_c };
assert_eq!(
Program.verify(
&tx_b.tze_outputs[0].precondition,
&tx_c.tze_inputs[0].witness,
&tx_b.tze_bundle().unwrap().vout[0].precondition,
&tx_c.tze_bundle().unwrap().vin[0].witness,
&ctx
),
Ok(())
@ -682,6 +791,10 @@ mod tests {
let preimage_1 = [1; 32];
let preimage_2 = [2; 32];
let tx_height = FutureNetwork
.activation_height(NetworkUpgrade::ZFuture)
.unwrap();
// Only run the test if we have the prover parameters.
let prover = match LocalTxProver::with_default_location() {
Some(prover) => prover,
@ -693,7 +806,6 @@ mod tests {
//
let mut rng = OsRng;
let mut builder_a = Builder::new_with_rng_zfuture(TEST_NETWORK, H0, rng);
// create some inputs to spend
let extsk = ExtendedSpendingKey::master(&[]);
@ -708,95 +820,78 @@ mod tests {
tree.append(cm1).unwrap();
let witness1 = IncrementalWitness::from_tree(&tree);
let mut builder_a = demo_builder(tx_height);
builder_a
.add_sapling_spend(extsk, *to.diversifier(), note1, witness1.path().unwrap())
.unwrap();
let mut db_a = DemoBuilder {
txn_builder: &mut builder_a,
extension_id: 0,
};
let value = Amount::from_u64(100000).unwrap();
let (h1, h2) = demo_hashes(&preimage_1, &preimage_2);
db_a.demo_open(value, h1)
builder_a
.demo_open(value, h1)
.map_err(|e| format!("open failure: {:?}", e))
.unwrap();
let (tx_a, _) = builder_a
.build(BranchId::Canopy, &prover)
.txn_builder
.build(&prover)
.map_err(|e| format!("build failure: {:?}", e))
.unwrap();
let tze_a = tx_a.tze_bundle().unwrap();
//
// Transfer
//
let mut builder_b = Builder::new_with_rng_zfuture(TEST_NETWORK, H0, rng);
let mut db_b = DemoBuilder {
txn_builder: &mut builder_b,
extension_id: 0,
};
let prevout_a = (
TzeOutPoint::new(tx_a.txid().0, 0),
tx_a.tze_outputs[0].clone(),
);
let value_xfr = value - DEFAULT_FEE;
db_b.demo_transfer_to_close(prevout_a, value_xfr, preimage_1, h2)
let mut builder_b = demo_builder(tx_height + 1);
let prevout_a = (OutPoint::new(tx_a.txid(), 0), tze_a.vout[0].clone());
let value_xfr = (value - DEFAULT_FEE).unwrap();
builder_b
.demo_transfer_to_close(prevout_a, value_xfr, preimage_1, h2)
.map_err(|e| format!("transfer failure: {:?}", e))
.unwrap();
let (tx_b, _) = builder_b
.build(BranchId::Canopy, &prover)
.txn_builder
.build(&prover)
.map_err(|e| format!("build failure: {:?}", e))
.unwrap();
let tze_b = tx_b.tze_bundle().unwrap();
//
// Closing transaction
//
let mut builder_c = Builder::new_with_rng_zfuture(TEST_NETWORK, H0, rng);
let mut db_c = DemoBuilder {
txn_builder: &mut builder_c,
extension_id: 0,
};
let prevout_b = (
TzeOutPoint::new(tx_a.txid().0, 0),
tx_b.tze_outputs[0].clone(),
);
db_c.demo_close(prevout_b, preimage_2)
let mut builder_c = demo_builder(tx_height + 2);
let prevout_b = (OutPoint::new(tx_a.txid(), 0), tze_b.vout[0].clone());
builder_c
.demo_close(prevout_b, preimage_2)
.map_err(|e| format!("close failure: {:?}", e))
.unwrap();
builder_c
.add_transparent_output(
&TransparentAddress::PublicKey([0; 20]),
value_xfr - DEFAULT_FEE,
(value_xfr - DEFAULT_FEE).unwrap(),
)
.unwrap();
let (tx_c, _) = builder_c
.build(BranchId::Canopy, &prover)
.txn_builder
.build(&prover)
.map_err(|e| format!("build failure: {:?}", e))
.unwrap();
let tze_c = tx_c.tze_bundle().unwrap();
// Verify tx_b
let ctx0 = Ctx { tx: &tx_b };
assert_eq!(
Program.verify(
&tx_a.tze_outputs[0].precondition,
&tx_b.tze_inputs[0].witness,
&ctx0
),
Program.verify(&tze_a.vout[0].precondition, &tze_b.vin[0].witness, &ctx0),
Ok(())
);
// Verify tx_c
let ctx1 = Ctx { tx: &tx_c };
assert_eq!(
Program.verify(
&tx_b.tze_outputs[0].precondition,
&tx_c.tze_inputs[0].witness,
&ctx1
),
Program.verify(&tze_b.vout[0].precondition, &tze_c.vin[0].witness, &ctx1),
Ok(())
);
}

View File

@ -6,6 +6,18 @@ and this library adheres to Rust's notion of
[Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Support for multiple history tree versions:
- `zcash_history::Version` trait.
- `zcash_history::V1`, marking the original history tree version.
- `zcash_history::V2`, marking the history tree version from NU5.
- `zcash_history::Entry::new_leaf`
### Changed
- `zcash_history::{Entry, IndexedNode, Tree}` now have a `Version` parameter.
### Removed
- `impl From<NodeData> for Entry` (replaced by `Entry::new_leaf`).
## [0.2.0] - 2020-03-13
No changes, just a version bump.

View File

@ -15,3 +15,6 @@ quickcheck = "0.9"
bigint = "4"
byteorder = "1"
blake2 = { package = "blake2b_simd", version = "0.5" }
[lib]
bench = false

View File

@ -1,8 +1,8 @@
use zcash_history::{Entry, EntryLink, NodeData, Tree};
use zcash_history::{Entry, EntryLink, NodeData, Tree, V1};
pub struct NodeDataIterator {
return_stack: Vec<NodeData>,
tree: Tree,
tree: Tree<V1>,
cursor: usize,
leaf_cursor: usize,
}
@ -56,7 +56,7 @@ impl NodeDataIterator {
let tree = Tree::new(
3,
vec![(2, root)],
vec![(0, leaf(1).into()), (1, leaf(2).into())],
vec![(0, Entry::new_leaf(leaf(1))), (1, Entry::new_leaf(leaf(2)))],
);
NodeDataIterator {

View File

@ -1,12 +1,12 @@
use zcash_history::{Entry, EntryLink, NodeData, Tree};
use zcash_history::{Entry, EntryLink, NodeData, Tree, V1};
#[path = "lib/shared.rs"]
mod share;
fn draft(into: &mut Vec<(u32, Entry)>, vec: &[NodeData], peak_pos: usize, h: u32) {
fn draft(into: &mut Vec<(u32, Entry<V1>)>, vec: &[NodeData], peak_pos: usize, h: u32) {
let node_data = vec[peak_pos - 1].clone();
let peak: Entry = match h {
0 => node_data.into(),
let peak = match h {
0 => Entry::new_leaf(node_data),
_ => Entry::new(
node_data,
EntryLink::Stored((peak_pos - (1 << h) - 1) as u32),
@ -19,7 +19,7 @@ fn draft(into: &mut Vec<(u32, Entry)>, vec: &[NodeData], peak_pos: usize, h: u32
into.push(((peak_pos - 1) as u32, peak));
}
fn prepare_tree(vec: &[NodeData]) -> Tree {
fn prepare_tree(vec: &[NodeData]) -> Tree<V1> {
assert!(!vec.is_empty());
// integer log2 of (vec.len()+1), -1

View File

@ -1,26 +1,34 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use crate::{EntryKind, EntryLink, Error, NodeData, MAX_NODE_DATA_SIZE};
use crate::{EntryKind, EntryLink, Error, Version, MAX_NODE_DATA_SIZE};
/// Max serialized length of entry data.
pub const MAX_ENTRY_SIZE: usize = MAX_NODE_DATA_SIZE + 9;
/// MMR Entry.
#[derive(Debug)]
pub struct Entry {
pub struct Entry<V: Version> {
pub(crate) kind: EntryKind,
pub(crate) data: NodeData,
pub(crate) data: V::NodeData,
}
impl Entry {
impl<V: Version> Entry<V> {
/// New entry of type node.
pub fn new(data: NodeData, left: EntryLink, right: EntryLink) -> Self {
pub fn new(data: V::NodeData, left: EntryLink, right: EntryLink) -> Self {
Entry {
kind: EntryKind::Node(left, right),
data,
}
}
/// Creates a new leaf.
pub fn new_leaf(data: V::NodeData) -> Self {
Entry {
kind: EntryKind::Leaf,
data,
}
}
/// Returns if is this node complete (has total of 2^N leaves)
pub fn complete(&self) -> bool {
let leaves = self.leaf_count();
@ -29,7 +37,7 @@ impl Entry {
/// Number of leaves under this node.
pub fn leaf_count(&self) -> u64 {
self.data.end_height - (self.data.start_height - 1)
V::end_height(&self.data) - (V::start_height(&self.data) - 1)
}
/// Is this node a leaf.
@ -67,7 +75,7 @@ impl Entry {
}
};
let data = NodeData::read(consensus_branch_id, r)?;
let data = V::read(consensus_branch_id, r)?;
Ok(Entry { kind, data })
}
@ -88,7 +96,7 @@ impl Entry {
}
}
self.data.write(w)?;
V::write(&self.data, w)?;
Ok(())
}
@ -100,16 +108,7 @@ impl Entry {
}
}
impl From<NodeData> for Entry {
fn from(s: NodeData) -> Self {
Entry {
kind: EntryKind::Leaf,
data: s,
}
}
}
impl std::fmt::Display for Entry {
impl<V: Version> std::fmt::Display for Entry<V> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.kind {
EntryKind::Node(l, r) => write!(f, "node({}, {}, ..)", l, r),

View File

@ -9,10 +9,12 @@
mod entry;
mod node_data;
mod tree;
mod version;
pub use entry::{Entry, MAX_ENTRY_SIZE};
pub use node_data::{NodeData, MAX_NODE_DATA_SIZE};
pub use tree::Tree;
pub use version::{Version, V1, V2};
/// Crate-level error type
#[derive(Debug)]

View File

@ -1,6 +1,7 @@
use bigint::U256;
use blake2::Params as Blake2Params;
use byteorder::{ByteOrder, LittleEndian, ReadBytesExt, WriteBytesExt};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use crate::Version;
/// Maximum serialized size of the node metadata.
pub const MAX_NODE_DATA_SIZE: usize = 32 + // subtree commitment
@ -13,10 +14,13 @@ pub const MAX_NODE_DATA_SIZE: usize = 32 + // subtree commitment
32 + // subtree total work
9 + // start height (compact uint)
9 + // end height (compact uint)
9; // Sapling tx count (compact uint)
// = total of 171
9 + // Sapling tx count (compact uint)
32 + // start Orchard tree root
32 + // end Orchard tree root
9; // Orchard tx count (compact uint)
// = total of 244
/// Node metadata.
/// V1 node metadata.
#[repr(C)]
#[derive(Debug, Clone, Default)]
#[cfg_attr(test, derive(PartialEq))]
@ -47,49 +51,20 @@ pub struct NodeData {
pub sapling_tx: u64,
}
fn blake2b_personal(personalization: &[u8], input: &[u8]) -> [u8; 32] {
let hash_result = Blake2Params::new()
.hash_length(32)
.personal(personalization)
.to_state()
.update(input)
.finalize();
let mut result = [0u8; 32];
result.copy_from_slice(hash_result.as_bytes());
result
}
fn personalization(branch_id: u32) -> [u8; 16] {
let mut result = [0u8; 16];
result[..12].copy_from_slice(b"ZcashHistory");
LittleEndian::write_u32(&mut result[12..], branch_id);
result
}
impl NodeData {
/// Combine two nodes metadata.
pub fn combine(left: &NodeData, right: &NodeData) -> NodeData {
assert_eq!(left.consensus_branch_id, right.consensus_branch_id);
let mut hash_buf = [0u8; MAX_NODE_DATA_SIZE * 2];
let size = {
let mut cursor = ::std::io::Cursor::new(&mut hash_buf[..]);
left.write(&mut cursor)
.expect("Writing to memory buf with enough length cannot fail; qed");
right
.write(&mut cursor)
.expect("Writing to memory buf with enough length cannot fail; qed");
cursor.position() as usize
};
let hash = blake2b_personal(
&personalization(left.consensus_branch_id),
&hash_buf[..size],
);
crate::V1::combine(left, right)
}
pub(crate) fn combine_inner(
subtree_commitment: [u8; 32],
left: &NodeData,
right: &NodeData,
) -> NodeData {
NodeData {
consensus_branch_id: left.consensus_branch_id,
subtree_commitment: hash,
subtree_commitment,
start_time: left.start_time,
end_time: right.end_time,
start_target: left.start_target,
@ -180,27 +155,64 @@ impl NodeData {
/// Convert to byte representation.
pub fn to_bytes(&self) -> Vec<u8> {
let mut buf = [0u8; MAX_NODE_DATA_SIZE];
let pos = {
let mut cursor = std::io::Cursor::new(&mut buf[..]);
self.write(&mut cursor).expect("Cursor cannot fail");
cursor.position() as usize
};
buf[0..pos].to_vec()
crate::V1::to_bytes(self)
}
/// Convert from byte representation.
pub fn from_bytes<T: AsRef<[u8]>>(consensus_branch_id: u32, buf: T) -> std::io::Result<Self> {
let mut cursor = std::io::Cursor::new(buf);
Self::read(consensus_branch_id, &mut cursor)
crate::V1::from_bytes(consensus_branch_id, buf)
}
/// Hash node metadata
pub fn hash(&self) -> [u8; 32] {
let bytes = self.to_bytes();
crate::V1::hash(self)
}
}
blake2b_personal(&personalization(self.consensus_branch_id), &bytes)
/// V2 node metadata.
#[derive(Debug, Clone, Default)]
#[cfg_attr(test, derive(PartialEq))]
pub struct V2 {
/// The V1 node data retained in V2.
pub v1: NodeData,
/// Start Orchard tree root.
pub start_orchard_root: [u8; 32],
/// End Orchard tree root.
pub end_orchard_root: [u8; 32],
/// Number of Orchard transactions.
pub orchard_tx: u64,
}
impl V2 {
pub(crate) fn combine_inner(subtree_commitment: [u8; 32], left: &V2, right: &V2) -> V2 {
V2 {
v1: NodeData::combine_inner(subtree_commitment, &left.v1, &right.v1),
start_orchard_root: left.start_orchard_root,
end_orchard_root: right.end_orchard_root,
orchard_tx: left.orchard_tx + right.orchard_tx,
}
}
/// Write to the byte representation.
pub fn write<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.v1.write(w)?;
w.write_all(&self.start_orchard_root)?;
w.write_all(&self.end_orchard_root)?;
NodeData::write_compact(w, self.orchard_tx)?;
Ok(())
}
/// Read from the byte representation.
pub fn read<R: std::io::Read>(consensus_branch_id: u32, r: &mut R) -> std::io::Result<Self> {
let mut data = V2 {
v1: NodeData::read(consensus_branch_id, r)?,
..Default::default()
};
r.read_exact(&mut data.start_orchard_root)?;
r.read_exact(&mut data.end_orchard_root)?;
data.orchard_tx = NodeData::read_compact(r)?;
Ok(data)
}
}

View File

@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::{Entry, EntryKind, EntryLink, Error, NodeData};
use crate::{Entry, EntryKind, EntryLink, Error, Version};
/// Represents partially loaded tree.
///
@ -13,11 +13,11 @@ use crate::{Entry, EntryKind, EntryLink, Error, NodeData};
/// Intended use of this `Tree` is to instantiate it based on partially loaded data (see example
/// how to pick right nodes from the array representation of MMR Tree), perform several operations
/// (append-s/delete-s) and then drop it.
pub struct Tree {
stored: HashMap<u32, Entry>,
pub struct Tree<V: Version> {
stored: HashMap<u32, Entry<V>>,
// This can grow indefinitely if `Tree` is misused as a self-contained data structure
generated: Vec<Entry>,
generated: Vec<Entry<V>>,
// number of persistent(!) tree entries
stored_count: u32,
@ -25,9 +25,9 @@ pub struct Tree {
root: EntryLink,
}
impl Tree {
impl<V: Version> Tree<V> {
/// Resolve link originated from this tree
pub fn resolve_link(&self, link: EntryLink) -> Result<IndexedNode, Error> {
pub fn resolve_link(&self, link: EntryLink) -> Result<IndexedNode<V>, Error> {
match link {
EntryLink::Generated(index) => self.generated.get(index as usize),
EntryLink::Stored(index) => self.stored.get(&index),
@ -36,14 +36,14 @@ impl Tree {
.ok_or(Error::ExpectedInMemory(link))
}
fn push(&mut self, data: Entry) -> EntryLink {
fn push(&mut self, data: Entry<V>) -> EntryLink {
let idx = self.stored_count;
self.stored_count += 1;
self.stored.insert(idx, data);
EntryLink::Stored(idx)
}
fn push_generated(&mut self, data: Entry) -> EntryLink {
fn push_generated(&mut self, data: Entry<V>) -> EntryLink {
self.generated.push(data);
EntryLink::Generated(self.generated.len() as u32 - 1)
}
@ -51,7 +51,7 @@ impl Tree {
/// Populate tree with plain list of the leaves/nodes. For now, only for tests,
/// since this `Tree` structure is for partially loaded tree (but it might change)
#[cfg(test)]
pub fn populate(loaded: Vec<Entry>, root: EntryLink) -> Self {
pub fn populate(loaded: Vec<Entry<V>>, root: EntryLink) -> Self {
let mut result = Tree::invalid();
result.stored_count = loaded.len() as u32;
for (idx, item) in loaded.into_iter().enumerate() {
@ -83,7 +83,7 @@ impl Tree {
/// # Panics
///
/// Will panic if `peaks` is empty.
pub fn new(length: u32, peaks: Vec<(u32, Entry)>, extra: Vec<(u32, Entry)>) -> Self {
pub fn new(length: u32, peaks: Vec<(u32, Entry<V>)>, extra: Vec<(u32, Entry<V>)>) -> Self {
assert!(!peaks.is_empty());
let mut result = Tree::invalid();
@ -135,9 +135,9 @@ impl Tree {
/// Returns links to actual nodes that has to be persisted as the result of the append.
/// If completed without error, at least one link to the appended
/// node (with metadata provided in `new_leaf`) will be returned.
pub fn append_leaf(&mut self, new_leaf: NodeData) -> Result<Vec<EntryLink>, Error> {
pub fn append_leaf(&mut self, new_leaf: V::NodeData) -> Result<Vec<EntryLink>, Error> {
let root = self.root;
let new_leaf_link = self.push(new_leaf.into());
let new_leaf_link = self.push(Entry::new_leaf(new_leaf));
let mut appended = vec![new_leaf_link];
let mut peaks = Vec::new();
@ -274,7 +274,7 @@ impl Tree {
}
/// Reference to the root node.
pub fn root_node(&self) -> Result<IndexedNode, Error> {
pub fn root_node(&self) -> Result<IndexedNode<V>, Error> {
self.resolve_link(self.root)
}
@ -286,12 +286,12 @@ impl Tree {
/// Reference to the node with link attached.
#[derive(Debug)]
pub struct IndexedNode<'a> {
node: &'a Entry,
pub struct IndexedNode<'a, V: Version> {
node: &'a Entry<V>,
link: EntryLink,
}
impl<'a> IndexedNode<'a> {
impl<'a, V: Version> IndexedNode<'a, V> {
fn left(&self) -> Result<EntryLink, Error> {
self.node.left().map_err(|e| e.augment(self.link))
}
@ -301,12 +301,12 @@ impl<'a> IndexedNode<'a> {
}
/// Reference to the entry struct.
pub fn node(&self) -> &Entry {
pub fn node(&self) -> &Entry<V> {
self.node
}
/// Reference to the entry metadata.
pub fn data(&self) -> &NodeData {
pub fn data(&self) -> &V::NodeData {
&self.node.data
}
@ -316,43 +316,50 @@ impl<'a> IndexedNode<'a> {
}
}
fn combine_nodes<'a>(left: IndexedNode<'a>, right: IndexedNode<'a>) -> Entry {
fn combine_nodes<'a, V: Version>(left: IndexedNode<'a, V>, right: IndexedNode<'a, V>) -> Entry<V> {
Entry {
kind: EntryKind::Node(left.link, right.link),
data: NodeData::combine(&left.node.data, &right.node.data),
data: V::combine(&left.node.data, &right.node.data),
}
}
#[cfg(test)]
mod tests {
use super::{Entry, EntryKind, EntryLink, NodeData, Tree};
use super::{Entry, EntryKind, EntryLink, Tree};
use crate::{node_data, NodeData, Version, V2};
use assert_matches::assert_matches;
use quickcheck::{quickcheck, TestResult};
fn leaf(height: u32) -> NodeData {
NodeData {
consensus_branch_id: 1,
subtree_commitment: [0u8; 32],
start_time: 0,
end_time: 0,
start_target: 0,
end_target: 0,
start_sapling_root: [0u8; 32],
end_sapling_root: [0u8; 32],
subtree_total_work: 0.into(),
start_height: height as u64,
end_height: height as u64,
sapling_tx: 7,
fn leaf(height: u32) -> node_data::V2 {
node_data::V2 {
v1: NodeData {
consensus_branch_id: 1,
subtree_commitment: [0u8; 32],
start_time: 0,
end_time: 0,
start_target: 0,
end_target: 0,
start_sapling_root: [0u8; 32],
end_sapling_root: [0u8; 32],
subtree_total_work: 0.into(),
start_height: height as u64,
end_height: height as u64,
sapling_tx: 7,
},
start_orchard_root: [0u8; 32],
end_orchard_root: [0u8; 32],
orchard_tx: 42,
}
}
fn initial() -> Tree {
let node1: Entry = leaf(1).into();
let node2: Entry = leaf(2).into();
fn initial() -> Tree<V2> {
let node1 = Entry::new_leaf(leaf(1));
let node2 = Entry::new_leaf(leaf(2));
let node3 = Entry {
data: NodeData::combine(&node1.data, &node2.data),
data: V2::combine(&node1.data, &node2.data),
kind: EntryKind::Leaf,
};
@ -360,7 +367,7 @@ mod tests {
}
// returns tree with specified number of leafs and it's root
fn generated(length: u32) -> Tree {
fn generated(length: u32) -> Tree<V2> {
assert!(length >= 3);
let mut tree = initial();
for i in 2..length {
@ -391,7 +398,7 @@ mod tests {
//
// so only (3) is added as real leaf
// while new root, (4g) is generated one
assert_eq!(new_root.data.end_height, 3);
assert_eq!(new_root.data.v1.end_height, 3);
assert_eq!(appended.len(), 1);
// ** APPEND 4 **
@ -415,7 +422,7 @@ mod tests {
//
// so (4), (5), (6) are added as real leaves
// and new root, (6) is stored one
assert_eq!(new_root.data.end_height, 4);
assert_eq!(new_root.data.v1.end_height, 4);
assert_eq!(appended.len(), 3);
assert_matches!(tree.root(), EntryLink::Stored(6));
@ -442,7 +449,7 @@ mod tests {
//
// so (7) is added as real leaf
// and new root, (8g) is generated one
assert_eq!(new_root.data.end_height, 5);
assert_eq!(new_root.data.v1.end_height, 5);
assert_eq!(appended.len(), 1);
assert_matches!(tree.root(), EntryLink::Generated(_));
tree.for_children(tree.root(), |l, r| {
@ -474,7 +481,7 @@ mod tests {
//
// so (7) is added as real leaf
// and new root, (10g) is generated one
assert_eq!(new_root.data.end_height, 6);
assert_eq!(new_root.data.v1.end_height, 6);
assert_eq!(appended.len(), 2);
assert_matches!(tree.root(), EntryLink::Generated(_));
tree.for_children(tree.root(), |l, r| {
@ -509,7 +516,7 @@ mod tests {
//
// so (10) is added as real leaf
// and new root, (12g) is generated one
assert_eq!(new_root.data.end_height, 7);
assert_eq!(new_root.data.v1.end_height, 7);
assert_eq!(appended.len(), 1);
assert_matches!(tree.root(), EntryLink::Generated(_));
tree.for_children(tree.root(), |l, r| {

View File

@ -0,0 +1,181 @@
use std::fmt;
use std::io;
use blake2::Params as Blake2Params;
use byteorder::{ByteOrder, LittleEndian};
use crate::{node_data, NodeData, MAX_NODE_DATA_SIZE};
fn blake2b_personal(personalization: &[u8], input: &[u8]) -> [u8; 32] {
let hash_result = Blake2Params::new()
.hash_length(32)
.personal(personalization)
.to_state()
.update(input)
.finalize();
let mut result = [0u8; 32];
result.copy_from_slice(hash_result.as_bytes());
result
}
fn personalization(branch_id: u32) -> [u8; 16] {
let mut result = [0u8; 16];
result[..12].copy_from_slice(b"ZcashHistory");
LittleEndian::write_u32(&mut result[12..], branch_id);
result
}
/// A version of the chain history tree.
pub trait Version {
/// The node data for this tree version.
type NodeData: fmt::Debug;
/// Returns the consensus branch ID for the given node data.
fn consensus_branch_id(data: &Self::NodeData) -> u32;
/// Returns the start height for the given node data.
fn start_height(data: &Self::NodeData) -> u64;
/// Returns the end height for the given node data.
fn end_height(data: &Self::NodeData) -> u64;
/// Combines two nodes' metadata.
fn combine(left: &Self::NodeData, right: &Self::NodeData) -> Self::NodeData {
assert_eq!(
Self::consensus_branch_id(left),
Self::consensus_branch_id(right)
);
let mut hash_buf = [0u8; MAX_NODE_DATA_SIZE * 2];
let size = {
let mut cursor = ::std::io::Cursor::new(&mut hash_buf[..]);
Self::write(left, &mut cursor)
.expect("Writing to memory buf with enough length cannot fail; qed");
Self::write(right, &mut cursor)
.expect("Writing to memory buf with enough length cannot fail; qed");
cursor.position() as usize
};
let hash = blake2b_personal(
&personalization(Self::consensus_branch_id(left)),
&hash_buf[..size],
);
Self::combine_inner(hash, left, right)
}
/// Combines two nodes metadata.
///
/// For internal use.
fn combine_inner(
subtree_commitment: [u8; 32],
left: &Self::NodeData,
right: &Self::NodeData,
) -> Self::NodeData;
/// Parses node data from the given reader.
fn read<R: io::Read>(consensus_branch_id: u32, r: &mut R) -> io::Result<Self::NodeData>;
/// Writes the byte representation of the given node data to the given writer.
fn write<W: io::Write>(data: &Self::NodeData, w: &mut W) -> io::Result<()>;
/// Converts to byte representation.
#[allow(clippy::wrong_self_convention)]
fn to_bytes(data: &Self::NodeData) -> Vec<u8> {
let mut buf = [0u8; MAX_NODE_DATA_SIZE];
let pos = {
let mut cursor = std::io::Cursor::new(&mut buf[..]);
Self::write(data, &mut cursor).expect("Cursor cannot fail");
cursor.position() as usize
};
buf[0..pos].to_vec()
}
/// Convert from byte representation.
fn from_bytes<T: AsRef<[u8]>>(consensus_branch_id: u32, buf: T) -> io::Result<Self::NodeData> {
let mut cursor = std::io::Cursor::new(buf);
Self::read(consensus_branch_id, &mut cursor)
}
/// Hash node metadata
fn hash(data: &Self::NodeData) -> [u8; 32] {
let bytes = Self::to_bytes(data);
blake2b_personal(&personalization(Self::consensus_branch_id(data)), &bytes)
}
}
/// Version 1 of the Zcash chain history tree.
///
/// This version was used for the Heartwood and Canopy epochs.
pub enum V1 {}
impl Version for V1 {
type NodeData = NodeData;
fn consensus_branch_id(data: &Self::NodeData) -> u32 {
data.consensus_branch_id
}
fn start_height(data: &Self::NodeData) -> u64 {
data.start_height
}
fn end_height(data: &Self::NodeData) -> u64 {
data.end_height
}
fn combine_inner(
subtree_commitment: [u8; 32],
left: &Self::NodeData,
right: &Self::NodeData,
) -> Self::NodeData {
NodeData::combine_inner(subtree_commitment, left, right)
}
fn read<R: io::Read>(consensus_branch_id: u32, r: &mut R) -> io::Result<Self::NodeData> {
NodeData::read(consensus_branch_id, r)
}
fn write<W: io::Write>(data: &Self::NodeData, w: &mut W) -> io::Result<()> {
data.write(w)
}
}
/// Version 2 of the Zcash chain history tree.
///
/// This version is used from the NU5 epoch.
pub enum V2 {}
impl Version for V2 {
type NodeData = node_data::V2;
fn consensus_branch_id(data: &Self::NodeData) -> u32 {
data.v1.consensus_branch_id
}
fn start_height(data: &Self::NodeData) -> u64 {
data.v1.start_height
}
fn end_height(data: &Self::NodeData) -> u64 {
data.v1.end_height
}
fn combine_inner(
subtree_commitment: [u8; 32],
left: &Self::NodeData,
right: &Self::NodeData,
) -> Self::NodeData {
node_data::V2::combine_inner(subtree_commitment, left, right)
}
fn read<R: io::Read>(consensus_branch_id: u32, r: &mut R) -> io::Result<Self::NodeData> {
node_data::V2::read(consensus_branch_id, r)
}
fn write<W: io::Write>(data: &Self::NodeData, w: &mut W) -> io::Result<()> {
data.write(w)
}
}

View File

@ -6,6 +6,50 @@ and this library adheres to Rust's notion of
[Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- `zcash_primitives::sapling::redjubjub::PublicKey::verify_with_zip216`, for
controlling how RedJubjub signatures are validated. `PublicKey::verify` has
been altered to always use post-ZIP 216 validation rules.
- `zcash_primitives::transaction::Builder::with_progress_notifier`, for setting
a notification channel on which transaction build progress updates will be
sent.
- `zcash_primitives::transaction::Txid::{read, write, from_bytes}`
- `zcash_primitives::sapling::NoteValue` a typesafe wrapper for Sapling note values.
- `zcash_primitives::consensus::BranchId::{height_range, height_bounds}` functions
to provide range values for branch active heights.
- `zcash_primitives::consensus::NetworkUpgrade::Nu5` value representing the Nu5 upgrade.
- `zcash_primitives::consensus::BranchId::Nu5` value representing the Nu5 consensus branch.
- New modules under `zcash_primitives::transaction::components` for building parts of
transactions:
- `sapling::builder` for Sapling transaction components.
- `transparent::builder` for transparent transaction components.
- `tze::builder` for TZE transaction components.
- `orchard` parsing and serialization for Orchard transaction components.
- `zcash_primitives::transaction::Authorization` a trait representing a type-level
record of authorization types that correspond to signatures, witnesses, and
proofs for each Zcash sub-protocol (transparent, Sprout, Sapling, TZE, and
Orchard). This type makes it possible to encode a type-safe state machine
for the application of authorizing data to a transaction; implementations of
this trait represent different states of the authorization process.
- New bundle types under the `zcash_primitives::transaction` submodules, one for
each Zcash sub-protocol. These are now used instead of bare fields
within the `TransactionData` type.
- `components::sapling::Bundle` bundle of
Sapling transaction elements. This new struct is parameterized by a
type bounded on a newly added `sapling::Authorization` trait which
is used to enable static reasoning about the state of Sapling proofs and
authorizing data, as described above.
- `components::transparent::Bundle` bundle of
transparent transaction elements. This new struct is parameterized by a
type bounded on a newly added `transparent::Authorization` trait which
is used to enable static reasoning about the state of transparent witness
data, as described above.
- `components::tze::Bundle` bundle of TZE
transaction elements. This new struct is parameterized by a
type bounded on a newly added `tze::Authorization` trait which
is used to enable static reasoning about the state of TZE witness
data, as described above.
### Changed
- MSRV is now 1.51.0.
- The following modules and helpers have been moved into
@ -19,6 +63,40 @@ and this library adheres to Rust's notion of
- `zcash_primitives::util::{hash_to_scalar, generate_random_rseed}`
- Renamed `zcash_primitives::transaction::components::JSDescription` to
`JsDescription` (matching Rust naming conventions).
- `zcash_primitives::transaction::TxId` contents is now private.
- Renamed `zcash_primitives::transaction::components::tze::hash` to
`zcash_primitives::transaction::components::tze::txid`
- `zcash_primitives::transaction::components::tze::TzeOutPoint` constructor
now taxes a TxId rather than a raw byte array.
- `zcash_primitives::transaction::components::Amount` addition, subtraction,
and summation now return `Option` rather than panicing on overflow.
- `zcash_primitives::transaction::builder`:
- `Error` has been modified to wrap the error types produced by its child
builders.
- `Builder::build` no longer takes a consensus branch ID parameter. The
builder now selects the correct consensus branch ID for the given target
height.
- The `zcash_primitives::transaction::TransactionData` struct has been modified
such that it now contains common header information, and then contains
a separate `Bundle` value for each sub-protocol (transparent, Sprout, Sapling,
and TZE) and an Orchard bundle value has been added. `TransactionData` is now
parameterized by a type bounded on the newly added
`zcash_primitives::transaction::Authorization` trait. This bound has been
propagated to the individual transaction builders, such that the authorization
state of a transaction is clearly represented in the type and the presence
or absence of witness and/or proof data is statically known, instead of being only
determined at runtime via the presence or absence of `Option`al values.
- `zcash_primitives::transaction::components::sapling` parsing and serialization
have been adapted for use with the new `sapling::Bundle` type.
- `zcash_primitives::transaction::Transaction` parsing and serialization
have been adapted for use with the new `TransactionData` organization.
- Generators for property testing have been moved out of the main transaction
module such that they are now colocated in the modules with the types
that they generate.
- The `ephemeral_key` field of `OutputDescription` has had its type changed from
`jubjub::ExtendedPoint` to `zcash_note_encryption::EphemeralKeyBytes`.
- The `epk: jubjub::ExtendedPoint` field of `CompactOutputDescription ` has been
replaced by `ephemeral_key: zcash_note_encryption::EphemeralKeyBytes`.
## [0.5.0] - 2021-03-26
### Added
@ -121,7 +199,7 @@ and this library adheres to Rust's notion of
- `try_sapling_output_recovery`
- `try_sapling_output_recovery_with_ock`
- `zcash_primitives::primitives::SaplingIvk` is now used where functions
previously used undistinguished `jubjub::Fr` values; this affects Sapling
previously used undistinguished `jubjub::Fr` values; this affects Sapling
note decryption and handling of IVKs by the wallet backend code.
- `zcash_primitives::primitives::ViewingKey::ivk` now returns `SaplingIvk`
- `zcash_primitives::primitives::Note::nf` now returns `Nullifier`.

View File

@ -17,43 +17,50 @@ all-features = true
[dependencies]
aes = "0.6"
bitvec = "0.18"
bitvec = "0.22"
blake2b_simd = "0.5"
blake2s_simd = "0.5"
bls12_381 = "0.3.1"
bls12_381 = "0.5"
byteorder = "1"
crypto_api_chachapoly = "0.4"
equihash = { version = "0.1", path = "../components/equihash" }
ff = "0.8"
ff = "0.10"
fpe = "0.4"
group = "0.8"
group = "0.10"
hex = "0.4"
jubjub = "0.5.1"
jubjub = "0.7"
lazy_static = "1"
log = "0.4"
proptest = { version = "0.10.1", optional = true }
rand = "0.7"
rand_core = "0.5.1"
nonempty = "0.7"
orchard = "0.0"
pasta_curves = "0.1"
proptest = { version = "1.0.0", optional = true }
rand = "0.8"
rand_core = "0.6"
ripemd160 = { version = "0.9", optional = true }
secp256k1 = { version = "0.19", optional = true }
sha2 = "0.9"
subtle = "2.2.3"
zcash_note_encryption = { version = "0.0", path = "../components/zcash_note_encryption" }
# Temporary workaround for https://github.com/myrrlyn/funty/issues/3
funty = "=1.1.0"
[dev-dependencies]
criterion = "0.3"
hex-literal = "0.3"
proptest = "0.10.1"
rand_xorshift = "0.2"
proptest = "1.0.0"
rand_xorshift = "0.3"
orchard = { version = "0.0", features = ["test-dependencies"] }
[target.'cfg(unix)'.dev-dependencies]
pprof = { version = "0.4.2", features = ["criterion", "flamegraph"] }
[features]
transparent-inputs = ["ripemd160", "secp256k1"]
test-dependencies = ["proptest"]
zfuture = []
[lib]
bench = false
[[bench]]
name = "note_decryption"
harness = false

View File

@ -1,17 +1,30 @@
use criterion::{criterion_group, criterion_main, Criterion};
use std::iter;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use ff::Field;
use group::GroupEncoding;
use rand_core::OsRng;
use zcash_note_encryption::batch;
use zcash_primitives::{
consensus::{NetworkUpgrade::Canopy, Parameters, TestNetwork, TEST_NETWORK},
memo::MemoBytes,
sapling::{
note_encryption::{sapling_note_encryption, try_sapling_note_decryption},
note_encryption::{
sapling_note_encryption, try_sapling_compact_note_decryption,
try_sapling_note_decryption, SaplingDomain,
},
util::generate_random_rseed,
Diversifier, PaymentAddress, SaplingIvk, ValueCommitment,
},
transaction::components::{OutputDescription, GROTH_PROOF_SIZE},
transaction::components::{
sapling::{CompactOutputDescription, GrothProofBytes, OutputDescription},
GROTH_PROOF_SIZE,
},
};
#[cfg(unix)]
use pprof::criterion::{Output, PProfProfiler};
fn bench_note_decryption(c: &mut Criterion) {
let mut rng = OsRng;
let height = TEST_NETWORK.activation_height(Canopy).unwrap();
@ -20,7 +33,7 @@ fn bench_note_decryption(c: &mut Criterion) {
let invalid_ivk = SaplingIvk(jubjub::Fr::random(&mut rng));
// Construct a fake Sapling output as if we had just deserialized a transaction.
let output = {
let output: OutputDescription<GrothProofBytes> = {
let diversifier = Diversifier([0; 11]);
let pk_d = diversifier.g_d().unwrap() * valid_ivk.0;
let pa = PaymentAddress::from_parts(diversifier, pk_d).unwrap();
@ -40,7 +53,7 @@ fn bench_note_decryption(c: &mut Criterion) {
let ne =
sapling_note_encryption::<_, TestNetwork>(None, note, pa, MemoBytes::empty(), &mut rng);
let ephemeral_key = *ne.epk();
let ephemeral_key = ne.epk().to_bytes().into();
let enc_ciphertext = ne.encrypt_note_plaintext();
let out_ciphertext = ne.encrypt_outgoing_plaintext(&cv, &cmu, &mut rng);
@ -54,16 +67,85 @@ fn bench_note_decryption(c: &mut Criterion) {
}
};
let mut group = c.benchmark_group("Sapling note decryption");
{
let mut group = c.benchmark_group("sapling-note-decryption");
group.throughput(Throughput::Elements(1));
group.bench_function("valid", |b| {
b.iter(|| try_sapling_note_decryption(&TEST_NETWORK, height, &valid_ivk, &output).unwrap())
});
group.bench_function("valid", |b| {
b.iter(|| {
try_sapling_note_decryption(&TEST_NETWORK, height, &valid_ivk, &output).unwrap()
})
});
group.bench_function("invalid", |b| {
b.iter(|| try_sapling_note_decryption(&TEST_NETWORK, height, &invalid_ivk, &output))
});
group.bench_function("invalid", |b| {
b.iter(|| try_sapling_note_decryption(&TEST_NETWORK, height, &invalid_ivk, &output))
});
let compact = CompactOutputDescription::from(output.clone());
group.bench_function("compact-valid", |b| {
b.iter(|| {
try_sapling_compact_note_decryption(&TEST_NETWORK, height, &valid_ivk, &compact)
.unwrap()
})
});
group.bench_function("compact-invalid", |b| {
b.iter(|| {
try_sapling_compact_note_decryption(&TEST_NETWORK, height, &invalid_ivk, &compact)
})
});
}
{
let valid_ivks = vec![valid_ivk];
let invalid_ivks = vec![invalid_ivk];
// We benchmark with one IVK so the overall batch size is equal to the number of
// outputs.
let size = 10;
let outputs: Vec<_> = iter::repeat(output)
.take(size)
.map(|output| {
(
SaplingDomain::for_height(TEST_NETWORK.clone(), height),
output,
)
})
.collect();
let mut group = c.benchmark_group("sapling-batch-note-decryption");
group.throughput(Throughput::Elements(size as u64));
group.bench_function(BenchmarkId::new("valid", size), |b| {
b.iter(|| batch::try_note_decryption(&valid_ivks, &outputs))
});
group.bench_function(BenchmarkId::new("invalid", size), |b| {
b.iter(|| batch::try_note_decryption(&invalid_ivks, &outputs))
});
let compact: Vec<_> = outputs
.into_iter()
.map(|(domain, output)| (domain, CompactOutputDescription::from(output.clone())))
.collect();
group.bench_function(BenchmarkId::new("compact-valid", size), |b| {
b.iter(|| batch::try_compact_note_decryption(&valid_ivks, &compact))
});
group.bench_function(BenchmarkId::new("compact-invalid", size), |b| {
b.iter(|| batch::try_compact_note_decryption(&invalid_ivks, &compact))
});
}
}
#[cfg(unix)]
criterion_group! {
name = benches;
config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = bench_note_decryption
}
#[cfg(not(unix))]
criterion_group!(benches, bench_note_decryption);
criterion_main!(benches);

View File

@ -2,6 +2,9 @@ use criterion::{criterion_group, criterion_main, Criterion};
use rand_core::{OsRng, RngCore};
use zcash_primitives::sapling::pedersen_hash::{pedersen_hash, Personalization};
#[cfg(unix)]
use pprof::criterion::{Output, PProfProfiler};
fn bench_pedersen_hash(c: &mut Criterion) {
let rng = &mut OsRng;
let bits = (0..510)
@ -9,10 +12,17 @@ fn bench_pedersen_hash(c: &mut Criterion) {
.collect::<Vec<_>>();
let personalization = Personalization::MerkleTree(31);
c.bench_function("Pedersen hash", |b| {
c.bench_function("pedersen-hash", |b| {
b.iter(|| pedersen_hash(personalization, bits.clone()))
});
}
#[cfg(unix)]
criterion_group! {
name = benches;
config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = bench_pedersen_hash
}
#[cfg(not(unix))]
criterion_group!(benches, bench_pedersen_hash);
criterion_main!(benches);

View File

@ -4,4 +4,3 @@
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 23a823e9da7e6ae62a79153ab97362dd9d81b8d9eafc396c87870dfa8aa7354c # shrinks to tx = Transaction { txid: TxId([67, 236, 122, 87, 159, 85, 97, 164, 42, 126, 150, 55, 173, 65, 86, 103, 39, 53, 166, 88, 190, 39, 82, 24, 24, 1, 247, 35, 186, 51, 22, 210]), data: TransactionData( version = Sprout(1), vin = [], vout = [], lock_time = 0, expiry_height = BlockHeight(0), value_balance = Amount(1), shielded_spends = [], shielded_outputs = [], joinsplits = [], joinsplit_pubkey = None, binding_sig = None) }

View File

@ -3,7 +3,7 @@
use std::cmp::{Ord, Ordering};
use std::convert::TryFrom;
use std::fmt;
use std::ops::{Add, Sub};
use std::ops::{Add, Bound, RangeBounds, Sub};
use crate::constants;
@ -195,6 +195,7 @@ impl Parameters for MainNetwork {
NetworkUpgrade::Blossom => Some(BlockHeight(653_600)),
NetworkUpgrade::Heartwood => Some(BlockHeight(903_000)),
NetworkUpgrade::Canopy => Some(BlockHeight(1_046_400)),
NetworkUpgrade::Nu5 => None,
#[cfg(feature = "zfuture")]
NetworkUpgrade::ZFuture => None,
}
@ -239,6 +240,7 @@ impl Parameters for TestNetwork {
NetworkUpgrade::Blossom => Some(BlockHeight(584_000)),
NetworkUpgrade::Heartwood => Some(BlockHeight(903_800)),
NetworkUpgrade::Canopy => Some(BlockHeight(1_028_500)),
NetworkUpgrade::Nu5 => None,
#[cfg(feature = "zfuture")]
NetworkUpgrade::ZFuture => None,
}
@ -352,6 +354,10 @@ pub enum NetworkUpgrade {
///
/// [Canopy]: https://z.cash/upgrade/canopy/
Canopy,
/// The [Nu5] network upgrade.
///
/// [Nu5]: https://z.cash/upgrade/nu5/
Nu5,
/// The ZFUTURE network upgrade.
///
/// This upgrade is expected never to activate on mainnet;
@ -369,6 +375,7 @@ impl fmt::Display for NetworkUpgrade {
NetworkUpgrade::Blossom => write!(f, "Blossom"),
NetworkUpgrade::Heartwood => write!(f, "Heartwood"),
NetworkUpgrade::Canopy => write!(f, "Canopy"),
NetworkUpgrade::Nu5 => write!(f, "Nu5"),
#[cfg(feature = "zfuture")]
NetworkUpgrade::ZFuture => write!(f, "ZFUTURE"),
}
@ -383,6 +390,7 @@ impl NetworkUpgrade {
NetworkUpgrade::Blossom => BranchId::Blossom,
NetworkUpgrade::Heartwood => BranchId::Heartwood,
NetworkUpgrade::Canopy => BranchId::Canopy,
NetworkUpgrade::Nu5 => BranchId::Nu5,
#[cfg(feature = "zfuture")]
NetworkUpgrade::ZFuture => BranchId::ZFuture,
}
@ -399,6 +407,7 @@ const UPGRADES_IN_ORDER: &[NetworkUpgrade] = &[
NetworkUpgrade::Blossom,
NetworkUpgrade::Heartwood,
NetworkUpgrade::Canopy,
NetworkUpgrade::Nu5,
];
pub const ZIP212_GRACE_PERIOD: u32 = 32256;
@ -415,7 +424,7 @@ pub const ZIP212_GRACE_PERIOD: u32 = 32256;
///
/// See [ZIP 200](https://zips.z.cash/zip-0200) for more details.
///
/// [`signature_hash`]: crate::transaction::signature_hash
/// [`signature_hash`]: crate::transaction::sighash::signature_hash
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BranchId {
/// The consensus rules at the launch of Zcash.
@ -430,6 +439,8 @@ pub enum BranchId {
Heartwood,
/// The consensus rules deployed by [`NetworkUpgrade::Canopy`].
Canopy,
/// The consensus rules deployed by [`NetworkUpgrade::Nu5`].
Nu5,
/// Candidates for future consensus rules; this branch will never
/// activate on mainnet.
#[cfg(feature = "zfuture")]
@ -447,6 +458,7 @@ impl TryFrom<u32> for BranchId {
0x2bb4_0e60 => Ok(BranchId::Blossom),
0xf5b9_230b => Ok(BranchId::Heartwood),
0xe9ff_75a6 => Ok(BranchId::Canopy),
0xf919_a198 => Ok(BranchId::Nu5),
#[cfg(feature = "zfuture")]
0xffff_ffff => Ok(BranchId::ZFuture),
_ => Err("Unknown consensus branch ID"),
@ -463,6 +475,7 @@ impl From<BranchId> for u32 {
BranchId::Blossom => 0x2bb4_0e60,
BranchId::Heartwood => 0xf5b9_230b,
BranchId::Canopy => 0xe9ff_75a6,
BranchId::Nu5 => 0xf919_a198,
#[cfg(feature = "zfuture")]
BranchId::ZFuture => 0xffff_ffff,
}
@ -484,6 +497,103 @@ impl BranchId {
// Sprout rules apply before any network upgrade
BranchId::Sprout
}
/// Returns the range of heights for the consensus epoch associated with this branch id.
///
/// The resulting tuple implements the [`RangeBounds<BlockHeight>`] trait.
pub fn height_range<P: Parameters>(&self, params: &P) -> Option<impl RangeBounds<BlockHeight>> {
self.height_bounds(params).map(|(lower, upper)| {
(
Bound::Included(lower),
upper.map_or(Bound::Unbounded, Bound::Excluded),
)
})
}
/// Returns the range of heights for the consensus epoch associated with this branch id.
///
/// The return type of this value is slightly more precise than [`Self::height_range`]:
/// - `Some((x, Some(y)))` means that the consensus rules corresponding to this branch id
/// are in effect for the range `x..y`
/// - `Some((x, None))` means that the consensus rules corresponding to this branch id are
/// in effect for the range `x..`
/// - `None` means that the consensus rules corresponding to this branch id are never in effect.
pub fn height_bounds<P: Parameters>(
&self,
params: &P,
) -> Option<(BlockHeight, Option<BlockHeight>)> {
match self {
BranchId::Sprout => params
.activation_height(NetworkUpgrade::Overwinter)
.map(|upper| (BlockHeight(0), Some(upper))),
BranchId::Overwinter => params
.activation_height(NetworkUpgrade::Overwinter)
.map(|lower| (lower, params.activation_height(NetworkUpgrade::Sapling))),
BranchId::Sapling => params
.activation_height(NetworkUpgrade::Sapling)
.map(|lower| (lower, params.activation_height(NetworkUpgrade::Blossom))),
BranchId::Blossom => params
.activation_height(NetworkUpgrade::Blossom)
.map(|lower| (lower, params.activation_height(NetworkUpgrade::Heartwood))),
BranchId::Heartwood => params
.activation_height(NetworkUpgrade::Heartwood)
.map(|lower| (lower, params.activation_height(NetworkUpgrade::Canopy))),
BranchId::Canopy => params
.activation_height(NetworkUpgrade::Canopy)
.map(|lower| (lower, params.activation_height(NetworkUpgrade::Nu5))),
BranchId::Nu5 => params.activation_height(NetworkUpgrade::Nu5).map(|lower| {
#[cfg(feature = "zfuture")]
let upper = params.activation_height(NetworkUpgrade::ZFuture);
#[cfg(not(feature = "zfuture"))]
let upper = None;
(lower, upper)
}),
#[cfg(feature = "zfuture")]
BranchId::ZFuture => params
.activation_height(NetworkUpgrade::ZFuture)
.map(|lower| (lower, None)),
}
}
pub fn sprout_uses_groth_proofs(&self) -> bool {
!matches!(self, BranchId::Sprout | BranchId::Overwinter)
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::sample::select;
use proptest::strategy::{Just, Strategy};
use super::{BlockHeight, BranchId, Parameters};
pub fn arb_branch_id() -> impl Strategy<Value = BranchId> {
select(vec![
BranchId::Sprout,
BranchId::Overwinter,
BranchId::Sapling,
BranchId::Blossom,
BranchId::Heartwood,
BranchId::Canopy,
BranchId::Nu5,
#[cfg(feature = "zfuture")]
BranchId::ZFuture,
])
}
pub fn arb_height<P: Parameters>(
branch_id: BranchId,
params: &P,
) -> impl Strategy<Value = Option<BlockHeight>> {
branch_id
.height_bounds(params)
.map_or(Strategy::boxed(Just(None)), |(lower, upper)| {
Strategy::boxed(
(lower.0..upper.map_or(std::u32::MAX, |u| u.0))
.prop_map(|h| Some(BlockHeight(h))),
)
})
}
}
#[cfg(test)]
@ -503,7 +613,9 @@ mod tests {
MAIN_NETWORK.activation_height(nu_a),
MAIN_NETWORK.activation_height(nu_b),
) {
(a, b) if a < b => (),
(Some(a), Some(b)) if a < b => (),
(Some(_), None) => (),
(None, None) => (),
_ => panic!(
"{} should not be before {} in UPGRADES_IN_ORDER",
nu_a, nu_b

View File

@ -1,8 +1,16 @@
//! Core traits and structs for Transparent Zcash Extensions.
use crate::transaction::components::{Amount, TzeOut, TzeOutPoint};
use std::fmt;
use crate::transaction::components::{
tze::{self, TzeOut},
Amount,
};
/// A typesafe wrapper for witness payloads
#[derive(Debug, Clone, PartialEq)]
pub struct AuthData(pub Vec<u8>);
/// Binary parsing capability for TZE preconditions & witnesses.
///
/// Serialization formats interpreted by implementations of this trait become consensus-critical
@ -62,28 +70,28 @@ impl Precondition {
/// treated as opaque to all but the extension corresponding to the encapsulated `extension_id`
/// value.
#[derive(Clone, Debug, PartialEq)]
pub struct Witness {
pub struct Witness<T> {
pub extension_id: u32,
pub mode: u32,
pub payload: Vec<u8>,
pub payload: T,
}
impl Witness {
impl Witness<AuthData> {
/// Produce the intermediate format for an extension-specific witness
/// type.
pub fn from<P: ToPayload>(extension_id: u32, value: &P) -> Witness {
pub fn from<P: ToPayload>(extension_id: u32, value: &P) -> Witness<AuthData> {
let (mode, payload) = value.to_payload();
Witness {
extension_id,
mode,
payload,
payload: AuthData(payload),
}
}
/// Attempt to parse an extension-specific witness value from the
/// intermediate representation.
pub fn try_to<P: FromPayload>(&self) -> Result<P, P::Error> {
P::from_payload(self.mode, &self.payload)
P::from_payload(self.mode, &self.payload.0)
}
}
@ -137,7 +145,7 @@ pub trait Extension<C> {
fn verify(
&self,
precondition: &Precondition,
witness: &Witness,
witness: &Witness<AuthData>,
context: &C,
) -> Result<(), Self::Error>
where
@ -146,7 +154,7 @@ pub trait Extension<C> {
{
self.verify_inner(
&Self::Precondition::from_payload(precondition.mode, &precondition.payload)?,
&Self::Witness::from_payload(witness.mode, &witness.payload)?,
&Self::Witness::from_payload(witness.mode, &witness.payload.0)?,
&context,
)
}
@ -178,7 +186,7 @@ pub trait ExtensionTxBuilder<'a> {
&mut self,
extension_id: u32,
mode: u32,
prevout: (TzeOutPoint, TzeOut),
prevout: (tze::OutPoint, TzeOut),
witness_builder: WBuilder,
) -> Result<(), Self::BuildError>
where

View File

@ -47,7 +47,7 @@ impl<Node: Hashable> PathFiller<Node> {
///
/// The depth of the Merkle tree is fixed at 32, equal to the depth of the Sapling
/// commitment tree.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct CommitmentTree<Node: Hashable> {
left: Option<Node>,
right: Option<Node>,
@ -1057,3 +1057,25 @@ mod tests {
}
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use core::fmt::Debug;
use proptest::collection::vec;
use proptest::prelude::*;
use super::{CommitmentTree, Hashable};
pub fn arb_commitment_tree<Node: Hashable + Debug, T: Strategy<Value = Node>>(
min_size: usize,
arb_node: T,
) -> impl Strategy<Value = CommitmentTree<Node>> {
vec(arb_node, min_size..(min_size + 100)).prop_map(|v| {
let mut tree = CommitmentTree::empty();
for node in v.into_iter() {
tree.append(node).unwrap();
}
tree
})
}
}

View File

@ -11,18 +11,19 @@ pub mod util;
use bitvec::{order::Lsb0, view::AsBits};
use blake2s_simd::Params as Blake2sParams;
use byteorder::{LittleEndian, WriteBytesExt};
use ff::PrimeField;
use ff::{Field, PrimeField};
use group::{Curve, Group, GroupEncoding};
use lazy_static::lazy_static;
use rand_core::{CryptoRng, RngCore};
use std::array::TryFromSliceError;
use std::convert::TryInto;
use std::convert::{TryFrom, TryInto};
use std::io::{self, Read, Write};
use subtle::{Choice, ConstantTimeEq};
use crate::{
constants::{self, SPENDING_KEY_GENERATOR},
merkle_tree::Hashable,
transaction::components::amount::MAX_MONEY,
};
use self::{
@ -360,6 +361,11 @@ impl Nullifier {
self.0.to_vec()
}
}
impl AsRef<[u8]> for Nullifier {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl ConstantTimeEq for Nullifier {
fn ct_eq(&self, other: &Self) -> Choice {
@ -367,6 +373,27 @@ impl ConstantTimeEq for Nullifier {
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct NoteValue(u64);
impl TryFrom<u64> for NoteValue {
type Error = ();
fn try_from(value: u64) -> Result<Self, Self::Error> {
if value <= MAX_MONEY as u64 {
Ok(NoteValue(value))
} else {
Err(())
}
}
}
impl From<NoteValue> for u64 {
fn from(value: NoteValue) -> u64 {
value.0
}
}
#[derive(Clone, Debug)]
pub struct Note {
/// The value of the note
@ -470,14 +497,7 @@ impl Note {
pub(crate) fn generate_or_derive_esk_internal<R: RngCore>(&self, rng: &mut R) -> jubjub::Fr {
match self.derive_esk() {
None => {
// create random 64 byte buffer
let mut buffer = [0u8; 64];
rng.fill_bytes(&mut buffer);
// reduce to uniform value
jubjub::Fr::from_bytes_wide(&buffer)
}
None => jubjub::Fr::random(rng),
Some(esk) => esk,
}
}
@ -492,3 +512,58 @@ impl Note {
}
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::prelude::*;
use std::cmp::min;
use std::convert::TryFrom;
use crate::{
transaction::components::amount::MAX_MONEY, zip32::testing::arb_extended_spending_key,
};
use super::{Node, Note, NoteValue, PaymentAddress, Rseed};
prop_compose! {
pub fn arb_note_value()(value in 0u64..=MAX_MONEY as u64) -> NoteValue {
NoteValue::try_from(value).unwrap()
}
}
prop_compose! {
/// The
pub fn arb_positive_note_value(bound: u64)(
value in 1u64..=(min(bound, MAX_MONEY as u64))
) -> NoteValue {
NoteValue::try_from(value).unwrap()
}
}
pub fn arb_payment_address() -> impl Strategy<Value = PaymentAddress> {
arb_extended_spending_key()
.prop_map(|sk| sk.default_address().map(|(_, a)| a))
.prop_filter("A valid payment address is required.", |r| r.is_ok())
.prop_map(|r| r.unwrap())
}
prop_compose! {
pub fn arb_node()(value in prop::array::uniform32(prop::num::u8::ANY)) -> Node {
Node::new(value)
}
}
prop_compose! {
pub fn arb_note(value: NoteValue)(
addr in arb_payment_address(),
rseed in prop::array::uniform32(prop::num::u8::ANY).prop_map(Rseed::AfterZip212)
) -> Note {
Note {
value: value.into(),
g_d: addr.g_d().unwrap(), // this unwrap is safe because arb_payment_address always generates an address witha valid g_d
pk_d: *addr.pk_d(),
rseed
}
}
}
}

View File

@ -3,21 +3,25 @@ use blake2b_simd::{Hash as Blake2bHash, Params as Blake2bParams};
use byteorder::{LittleEndian, WriteBytesExt};
use ff::PrimeField;
use group::{cofactor::CofactorGroup, GroupEncoding};
use jubjub::{AffinePoint, ExtendedPoint};
use rand_core::RngCore;
use std::convert::TryInto;
use zcash_note_encryption::{
try_compact_note_decryption, try_note_decryption, try_output_recovery_with_ock, Domain,
EphemeralKeyBytes, NoteEncryption, NotePlaintextBytes, NoteValidity, OutPlaintextBytes,
OutgoingCipherKey, ShieldedOutput, COMPACT_NOTE_SIZE, NOTE_PLAINTEXT_SIZE, OUT_CIPHERTEXT_SIZE,
OUT_PLAINTEXT_SIZE,
try_compact_note_decryption, try_note_decryption, try_output_recovery_with_ock,
try_output_recovery_with_ovk, Domain, EphemeralKeyBytes, NoteEncryption, NotePlaintextBytes,
NoteValidity, OutPlaintextBytes, OutgoingCipherKey, ShieldedOutput, COMPACT_NOTE_SIZE,
NOTE_PLAINTEXT_SIZE, OUT_PLAINTEXT_SIZE,
};
use crate::{
consensus::{self, BlockHeight, NetworkUpgrade::Canopy, ZIP212_GRACE_PERIOD},
memo::MemoBytes,
sapling::{keys::OutgoingViewingKey, Diversifier, Note, PaymentAddress, Rseed, SaplingIvk},
transaction::components::{amount::Amount, sapling::OutputDescription},
transaction::components::{
amount::Amount,
sapling::{self, OutputDescription},
},
};
pub const KDF_SAPLING_PERSONALIZATION: &[u8; 16] = b"Zcash_SaplingKDF";
@ -54,7 +58,7 @@ fn kdf_sapling(dhsecret: jubjub::SubgroupPoint, ephemeral_key: &EphemeralKeyByte
pub fn prf_ock(
ovk: &OutgoingViewingKey,
cv: &jubjub::ExtendedPoint,
cmu: &bls12_381::Scalar,
cmu_bytes: &[u8; 32],
ephemeral_key: &EphemeralKeyBytes,
) -> OutgoingCipherKey {
OutgoingCipherKey(
@ -64,7 +68,7 @@ pub fn prf_ock(
.to_state()
.update(&ovk.0)
.update(&cv.to_bytes())
.update(&cmu.to_repr())
.update(cmu_bytes)
.update(ephemeral_key.as_ref())
.finalize()
.as_bytes()
@ -116,6 +120,12 @@ pub struct SaplingDomain<P: consensus::Parameters> {
height: BlockHeight,
}
impl<P: consensus::Parameters> SaplingDomain<P> {
pub fn for_height(params: P, height: BlockHeight) -> Self {
Self { params, height }
}
}
impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
type EphemeralSecretKey = jubjub::Scalar;
// It is acceptable for this to be a point because we enforce by consensus that
@ -175,6 +185,37 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
kdf_sapling(dhsecret, epk)
}
fn batch_kdf<'a>(
items: impl Iterator<Item = (Option<Self::SharedSecret>, &'a EphemeralKeyBytes)>,
) -> Vec<Option<Self::SymmetricKey>> {
let (shared_secrets, ephemeral_keys): (Vec<_>, Vec<_>) = items.unzip();
let secrets: Vec<_> = shared_secrets
.iter()
.filter_map(|s| s.map(ExtendedPoint::from))
.collect();
let mut secrets_affine = vec![AffinePoint::identity(); shared_secrets.len()];
group::Curve::batch_normalize(&secrets, &mut secrets_affine);
let mut secrets_affine = secrets_affine.into_iter();
shared_secrets
.into_iter()
.map(|s| s.and_then(|_| secrets_affine.next()))
.zip(ephemeral_keys.into_iter())
.map(|(secret, ephemeral_key)| {
secret.map(|dhsecret| {
Blake2bParams::new()
.hash_length(32)
.personal(KDF_SAPLING_PERSONALIZATION)
.to_state()
.update(&dhsecret.to_bytes())
.update(ephemeral_key.as_ref())
.finalize()
})
})
.collect()
}
fn note_plaintext_bytes(
note: &Self::Note,
to: &Self::Recipient,
@ -209,10 +250,10 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
fn derive_ock(
ovk: &Self::OutgoingViewingKey,
cv: &Self::ValueCommitment,
cmu: &Self::ExtractedCommitment,
cmu_bytes: &Self::ExtractedCommitmentBytes,
epk: &EphemeralKeyBytes,
) -> OutgoingCipherKey {
prf_ock(ovk, cv, cmu, epk)
prf_ock(ovk, cv, cmu_bytes, epk)
}
fn outgoing_plaintext_bytes(
@ -230,6 +271,26 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
epk_bytes(epk)
}
fn epk(ephemeral_key: &EphemeralKeyBytes) -> Option<Self::EphemeralPublicKey> {
// ZIP 216: We unconditionally reject non-canonical encodings, because these have
// always been rejected by consensus (due to small-order checks).
// https://zips.z.cash/zip-0216#specification
jubjub::ExtendedPoint::from_bytes(&ephemeral_key.0).into()
}
fn batch_epk(
ephemeral_keys: impl Iterator<Item = EphemeralKeyBytes>,
) -> Vec<(Option<Self::EphemeralPublicKey>, EphemeralKeyBytes)> {
let ephemeral_keys: Vec<_> = ephemeral_keys.collect();
let epks = jubjub::AffinePoint::batch_from_bytes(ephemeral_keys.iter().map(|b| b.0));
epks.into_iter()
.zip(ephemeral_keys.into_iter())
.map(|(epk, ephemeral_key)| {
(epk.map(jubjub::ExtendedPoint::from).into(), ephemeral_key)
})
.collect()
}
fn check_epk_bytes<F: FnOnce(&Self::EphemeralSecretKey) -> NoteValidity>(
note: &Note,
check: F,
@ -256,11 +317,11 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
&self,
pk_d: &Self::DiversifiedTransmissionKey,
esk: &Self::EphemeralSecretKey,
epk: &Self::EphemeralPublicKey,
ephemeral_key: &EphemeralKeyBytes,
plaintext: &[u8],
) -> Option<(Self::Note, Self::Recipient)> {
sapling_parse_note_plaintext_without_memo(&self, plaintext, |diversifier| {
if (diversifier.g_d()? * esk).to_bytes() == epk.to_bytes() {
if (diversifier.g_d()? * esk).to_bytes() == ephemeral_key.0 {
Some(*pk_d)
} else {
None
@ -272,7 +333,7 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
note.cmu()
}
fn extract_pk_d(op: &[u8; OUT_CIPHERTEXT_SIZE]) -> Option<Self::DiversifiedTransmissionKey> {
fn extract_pk_d(op: &[u8; OUT_PLAINTEXT_SIZE]) -> Option<Self::DiversifiedTransmissionKey> {
let pk_d = jubjub::SubgroupPoint::from_bytes(
op[0..32].try_into().expect("slice is the correct length"),
);
@ -284,7 +345,7 @@ impl<P: consensus::Parameters> Domain for SaplingDomain<P> {
}
}
fn extract_esk(op: &[u8; OUT_CIPHERTEXT_SIZE]) -> Option<Self::EphemeralSecretKey> {
fn extract_esk(op: &[u8; OUT_PLAINTEXT_SIZE]) -> Option<Self::EphemeralSecretKey> {
jubjub::Fr::from_repr(
op[32..OUT_PLAINTEXT_SIZE]
.try_into()
@ -383,7 +444,7 @@ pub fn try_sapling_output_recovery_with_ock<P: consensus::Parameters>(
params: &P,
height: BlockHeight,
ock: &OutgoingCipherKey,
output: &OutputDescription,
output: &OutputDescription<sapling::GrothProofBytes>,
) -> Option<(Note, PaymentAddress, MemoBytes)> {
let domain = SaplingDomain {
params: params.clone(),
@ -405,19 +466,14 @@ pub fn try_sapling_output_recovery<P: consensus::Parameters>(
params: &P,
height: BlockHeight,
ovk: &OutgoingViewingKey,
output: &OutputDescription,
output: &OutputDescription<sapling::GrothProofBytes>,
) -> Option<(Note, PaymentAddress, MemoBytes)> {
try_sapling_output_recovery_with_ock(
params,
let domain = SaplingDomain {
params: params.clone(),
height,
&prf_ock(
&ovk,
&output.cv,
&output.cmu,
&epk_bytes(&output.ephemeral_key),
),
output,
)
};
try_output_recovery_with_ovk(&domain, ovk, output, &output.cv, &output.out_ciphertext)
}
#[cfg(test)]
@ -431,8 +487,8 @@ mod tests {
use std::convert::TryInto;
use zcash_note_encryption::{
NoteEncryption, OutgoingCipherKey, ENC_CIPHERTEXT_SIZE, NOTE_PLAINTEXT_SIZE,
OUT_CIPHERTEXT_SIZE, OUT_PLAINTEXT_SIZE,
batch, EphemeralKeyBytes, NoteEncryption, OutgoingCipherKey, ENC_CIPHERTEXT_SIZE,
NOTE_PLAINTEXT_SIZE, OUT_CIPHERTEXT_SIZE, OUT_PLAINTEXT_SIZE,
};
use super::{
@ -455,7 +511,7 @@ mod tests {
},
transaction::components::{
amount::Amount,
sapling::{CompactOutputDescription, OutputDescription},
sapling::{self, CompactOutputDescription, OutputDescription},
GROTH_PROOF_SIZE,
},
};
@ -467,7 +523,7 @@ mod tests {
OutgoingViewingKey,
OutgoingCipherKey,
SaplingIvk,
OutputDescription,
OutputDescription<sapling::GrothProofBytes>,
) {
let ivk = SaplingIvk(jubjub::Fr::random(&mut rng));
@ -497,7 +553,11 @@ mod tests {
height: BlockHeight,
ivk: &SaplingIvk,
mut rng: &mut R,
) -> (OutgoingViewingKey, OutgoingCipherKey, OutputDescription) {
) -> (
OutgoingViewingKey,
OutgoingCipherKey,
OutputDescription<sapling::GrothProofBytes>,
) {
let diversifier = Diversifier([0; 11]);
let pk_d = diversifier.g_d().unwrap() * ivk.0;
let pa = PaymentAddress::from_parts_unchecked(diversifier, pk_d);
@ -524,12 +584,12 @@ mod tests {
&mut rng,
);
let epk = *ne.epk();
let ock = prf_ock(&ovk, &cv, &cmu, &epk_bytes(&epk));
let ock = prf_ock(&ovk, &cv, &cmu.to_repr(), &epk_bytes(&epk));
let output = OutputDescription {
cv,
cmu,
ephemeral_key: epk,
ephemeral_key: epk.to_bytes().into(),
enc_ciphertext: ne.encrypt_note_plaintext(),
out_ciphertext: ne.encrypt_outgoing_plaintext(&cv, &cmu, &mut rng),
zkproof: [0u8; GROTH_PROOF_SIZE],
@ -542,12 +602,12 @@ mod tests {
ovk: &OutgoingViewingKey,
cv: &jubjub::ExtendedPoint,
cmu: &bls12_381::Scalar,
epk: &jubjub::ExtendedPoint,
ephemeral_key: &EphemeralKeyBytes,
enc_ciphertext: &mut [u8; ENC_CIPHERTEXT_SIZE],
out_ciphertext: &[u8; OUT_CIPHERTEXT_SIZE],
modify_plaintext: impl Fn(&mut [u8; NOTE_PLAINTEXT_SIZE]),
) {
let ock = prf_ock(&ovk, &cv, &cmu, &epk_bytes(epk));
let ock = prf_ock(&ovk, &cv, &cmu.to_repr(), ephemeral_key);
let mut op = [0; OUT_CIPHERTEXT_SIZE];
assert_eq!(
@ -562,7 +622,7 @@ mod tests {
let esk = jubjub::Fr::from_repr(op[32..OUT_PLAINTEXT_SIZE].try_into().unwrap()).unwrap();
let shared_secret = sapling_ka_agree(&esk, &pk_d.into());
let key = kdf_sapling(shared_secret, &epk_bytes(&epk));
let key = kdf_sapling(shared_secret, ephemeral_key);
let mut plaintext = {
let mut buf = [0; ENC_CIPHERTEXT_SIZE];
@ -655,7 +715,7 @@ mod tests {
for &height in heights.iter() {
let (_, _, ivk, mut output) = random_enc_ciphertext(height, &mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng).to_bytes().into();
assert_eq!(
try_sapling_note_decryption(&TEST_NETWORK, height, &ivk, &output,),
@ -820,7 +880,7 @@ mod tests {
for &height in heights.iter() {
let (_, _, ivk, mut output) = random_enc_ciphertext(height, &mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng).to_bytes().into();
assert_eq!(
try_sapling_compact_note_decryption(
@ -1052,7 +1112,7 @@ mod tests {
for &height in heights.iter() {
let (ovk, ock, _, mut output) = random_enc_ciphertext(height, &mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng);
output.ephemeral_key = jubjub::ExtendedPoint::random(&mut rng).to_bytes().into();
assert_eq!(
try_sapling_output_recovery(&TEST_NETWORK, height, &ovk, &output,),
@ -1266,7 +1326,7 @@ mod tests {
let cv = read_point!(tv.cv);
let cmu = read_bls12_381_scalar!(tv.cmu);
let esk = read_jubjub_scalar!(tv.esk);
let epk = read_point!(tv.epk);
let ephemeral_key = EphemeralKeyBytes(tv.epk);
//
// Test the individual components
@ -1275,11 +1335,11 @@ mod tests {
let shared_secret = sapling_ka_agree(&esk, &pk_d.into());
assert_eq!(shared_secret.to_bytes(), tv.shared_secret);
let k_enc = kdf_sapling(shared_secret, &epk_bytes(&epk));
let k_enc = kdf_sapling(shared_secret, &ephemeral_key);
assert_eq!(k_enc.as_bytes(), tv.k_enc);
let ovk = OutgoingViewingKey(tv.ovk);
let ock = prf_ock(&ovk, &cv, &cmu, &epk_bytes(&epk));
let ock = prf_ock(&ovk, &cv, &cmu.to_repr(), &ephemeral_key);
assert_eq!(ock.as_ref(), tv.ock);
let to = PaymentAddress::from_parts(Diversifier(tv.default_d), pk_d).unwrap();
@ -1289,7 +1349,7 @@ mod tests {
let output = OutputDescription {
cv,
cmu,
ephemeral_key: epk,
ephemeral_key: ephemeral_key,
enc_ciphertext: tv.c_enc,
out_ciphertext: tv.c_out,
zkproof: [0u8; GROTH_PROOF_SIZE],
@ -1331,6 +1391,37 @@ mod tests {
None => panic!("Output recovery failed"),
}
match &batch::try_note_decryption(
&[ivk.clone()],
&[(
SaplingDomain::for_height(TEST_NETWORK, height),
output.clone(),
)],
)[..]
{
[Some((decrypted_note, decrypted_to, decrypted_memo))] => {
assert_eq!(decrypted_note, &note);
assert_eq!(decrypted_to, &to);
assert_eq!(&decrypted_memo.as_array()[..], &tv.memo[..]);
}
_ => panic!("Note decryption failed"),
}
match &batch::try_compact_note_decryption(
&[ivk.clone()],
&[(
SaplingDomain::for_height(TEST_NETWORK, height),
CompactOutputDescription::from(output.clone()),
)],
)[..]
{
[Some((decrypted_note, decrypted_to))] => {
assert_eq!(decrypted_note, &note);
assert_eq!(decrypted_to, &to);
}
_ => panic!("Note decryption failed"),
}
//
// Test encryption
//
@ -1350,4 +1441,41 @@ mod tests {
);
}
}
#[test]
fn batching() {
let mut rng = OsRng;
let height = TEST_NETWORK.activation_height(Canopy).unwrap();
// Test batch trial-decryption with multiple IVKs and outputs.
let invalid_ivk = SaplingIvk(jubjub::Fr::random(rng));
let valid_ivk = SaplingIvk(jubjub::Fr::random(rng));
let outputs: Vec<_> = (0..10)
.map(|_| {
(
SaplingDomain::for_height(TEST_NETWORK, height),
random_enc_ciphertext_with(height, &valid_ivk, &mut rng).2,
)
})
.collect();
let res = batch::try_note_decryption(&[invalid_ivk.clone(), valid_ivk.clone()], &outputs);
assert_eq!(res.len(), 20);
// The batched trial decryptions with invalid_ivk failed.
assert_eq!(&res[..10], &vec![None; 10][..]);
for (result, (_, output)) in res[10..].iter().zip(outputs.iter()) {
// Confirm that the outputs should indeed have failed with invalid_ivk
assert_eq!(
try_sapling_note_decryption(&TEST_NETWORK, height, &invalid_ivk, output),
None
);
// Confirm the successful batched trial decryptions gave the same result.
assert!(result.is_some());
assert_eq!(
result,
&try_sapling_note_decryption(&TEST_NETWORK, height, &valid_ivk, output)
);
}
}
}

View File

@ -5,7 +5,7 @@
use ff::{Field, PrimeField};
use group::GroupEncoding;
use jubjub::{ExtendedPoint, SubgroupPoint};
use jubjub::{AffinePoint, ExtendedPoint, SubgroupPoint};
use rand_core::RngCore;
use std::io::{self, Read, Write};
use std::ops::{AddAssign, MulAssign, Neg};
@ -123,13 +123,27 @@ impl PublicKey {
}
pub fn verify(&self, msg: &[u8], sig: &Signature, p_g: SubgroupPoint) -> bool {
self.verify_with_zip216(msg, sig, p_g, true)
}
pub fn verify_with_zip216(
&self,
msg: &[u8],
sig: &Signature,
p_g: SubgroupPoint,
zip216_enabled: bool,
) -> bool {
// c = H*(Rbar || M)
let c = h_star(&sig.rbar[..], msg);
// Signature checks:
// R != invalid
let r = {
let r = ExtendedPoint::from_bytes(&sig.rbar);
let r = if zip216_enabled {
ExtendedPoint::from_bytes(&sig.rbar)
} else {
AffinePoint::from_bytes_pre_zip216_compatibility(sig.rbar).map(|p| p.to_extended())
};
if r.is_none().into() {
return false;
}

View File

@ -1,4 +1,5 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use nonempty::NonEmpty;
use std::io::{self, Read, Write};
const MAX_SIZE: usize = 0x02000000;
@ -70,7 +71,7 @@ impl Vector {
F: Fn(&mut R) -> io::Result<E>,
{
let count = CompactSize::read(&mut reader)?;
(0..count).map(|_| func(&mut reader)).collect()
Array::read(reader, count, func)
}
pub fn write<W: Write, E, F>(mut writer: W, vec: &[E], func: F) -> io::Result<()>
@ -80,6 +81,40 @@ impl Vector {
CompactSize::write(&mut writer, vec.len())?;
vec.iter().try_for_each(|e| func(&mut writer, e))
}
pub fn write_nonempty<W: Write, E, F>(
mut writer: W,
vec: &NonEmpty<E>,
func: F,
) -> io::Result<()>
where
F: Fn(&mut W, &E) -> io::Result<()>,
{
CompactSize::write(&mut writer, vec.len())?;
vec.iter().try_for_each(|e| func(&mut writer, e))
}
}
pub struct Array;
impl Array {
pub fn read<R: Read, E, F>(mut reader: R, count: usize, func: F) -> io::Result<Vec<E>>
where
F: Fn(&mut R) -> io::Result<E>,
{
(0..count).map(|_| func(&mut reader)).collect()
}
pub fn write<W: Write, E, I: IntoIterator<Item = E>, F>(
mut writer: W,
vec: I,
func: F,
) -> io::Result<()>
where
F: Fn(&mut W, &E) -> io::Result<()>,
{
vec.into_iter().try_for_each(|e| func(&mut writer, &e))
}
}
pub struct Optional;

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
//! Structs representing the components within Zcash transactions.
pub mod amount;
pub mod orchard;
pub mod sapling;
pub mod sprout;
pub mod transparent;
@ -13,7 +14,7 @@ pub use self::{
};
#[cfg(feature = "zfuture")]
pub use self::tze::{TzeIn, TzeOut, TzeOutPoint};
pub use self::tze::{TzeIn, TzeOut};
// π_A + π_B + π_C
pub const GROTH_PROOF_SIZE: usize = 48 + 96 + 48;

View File

@ -1,5 +1,8 @@
use std::convert::TryFrom;
use std::iter::Sum;
use std::ops::{Add, AddAssign, Sub, SubAssign};
use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};
use orchard::value as orchard;
pub const COIN: i64 = 1_0000_0000;
pub const MAX_MONEY: i64 = 21_000_000 * COIN;
@ -101,12 +104,26 @@ impl Amount {
}
}
impl TryFrom<i64> for Amount {
type Error = ();
fn try_from(value: i64) -> Result<Self, ()> {
Amount::from_i64(value)
}
}
impl From<Amount> for i64 {
fn from(amount: Amount) -> i64 {
amount.0
}
}
impl From<&Amount> for i64 {
fn from(amount: &Amount) -> i64 {
amount.0
}
}
impl From<Amount> for u64 {
fn from(amount: Amount) -> u64 {
amount.0 as u64
@ -114,36 +131,74 @@ impl From<Amount> for u64 {
}
impl Add<Amount> for Amount {
type Output = Amount;
type Output = Option<Amount>;
fn add(self, rhs: Amount) -> Amount {
Amount::from_i64(self.0 + rhs.0).expect("addition should remain in range")
fn add(self, rhs: Amount) -> Option<Amount> {
Amount::from_i64(self.0 + rhs.0).ok()
}
}
impl Add<Amount> for Option<Amount> {
type Output = Self;
fn add(self, rhs: Amount) -> Option<Amount> {
self.and_then(|lhs| lhs + rhs)
}
}
impl AddAssign<Amount> for Amount {
fn add_assign(&mut self, rhs: Amount) {
*self = *self + rhs
*self = (*self + rhs).expect("Addition must produce a valid amount value.")
}
}
impl Sub<Amount> for Amount {
type Output = Amount;
type Output = Option<Amount>;
fn sub(self, rhs: Amount) -> Amount {
Amount::from_i64(self.0 - rhs.0).expect("subtraction should remain in range")
fn sub(self, rhs: Amount) -> Option<Amount> {
Amount::from_i64(self.0 - rhs.0).ok()
}
}
impl Sub<Amount> for Option<Amount> {
type Output = Self;
fn sub(self, rhs: Amount) -> Option<Amount> {
self.and_then(|lhs| lhs - rhs)
}
}
impl SubAssign<Amount> for Amount {
fn sub_assign(&mut self, rhs: Amount) {
*self = *self - rhs
*self = (*self - rhs).expect("Subtraction must produce a valid amount value.")
}
}
impl Sum for Amount {
fn sum<I: Iterator<Item = Amount>>(iter: I) -> Amount {
iter.fold(Amount::zero(), Add::add)
impl Sum<Amount> for Option<Amount> {
fn sum<I: Iterator<Item = Amount>>(iter: I) -> Self {
iter.fold(Some(Amount::zero()), |acc, a| acc? + a)
}
}
impl Neg for Amount {
type Output = Self;
fn neg(self) -> Self {
Amount(-self.0)
}
}
impl From<Amount> for orchard::ValueSum {
fn from(v: Amount) -> Self {
orchard::ValueSum::from_raw(v.0)
}
}
impl TryFrom<orchard::ValueSum> for Amount {
type Error = ();
fn try_from(v: orchard::ValueSum) -> Result<Amount, Self::Error> {
i64::try_from(v).map_err(|_| ()).and_then(Amount::try_from)
}
}
@ -153,11 +208,23 @@ pub mod testing {
use super::{Amount, MAX_MONEY};
prop_compose! {
pub fn arb_amount()(amt in -MAX_MONEY..MAX_MONEY) -> Amount {
Amount::from_i64(amt).unwrap()
}
}
prop_compose! {
pub fn arb_nonnegative_amount()(amt in 0i64..MAX_MONEY) -> Amount {
Amount::from_i64(amt).unwrap()
}
}
prop_compose! {
pub fn arb_positive_amount()(amt in 1i64..MAX_MONEY) -> Amount {
Amount::from_i64(amt).unwrap()
}
}
}
#[cfg(test)]
@ -213,10 +280,9 @@ mod tests {
}
#[test]
#[should_panic]
fn add_panics_on_overflow() {
fn add_overflow() {
let v = Amount(MAX_MONEY);
let _sum = v + Amount(1);
assert_eq!(v + Amount(1), None)
}
#[test]
@ -227,10 +293,9 @@ mod tests {
}
#[test]
#[should_panic]
fn sub_panics_on_underflow() {
fn sub_underflow() {
let v = Amount(-MAX_MONEY);
let _diff = v - Amount(1);
assert_eq!(v - Amount(1), None)
}
#[test]

View File

@ -0,0 +1,281 @@
/// Functions for parsing & serialization of Orchard transaction components.
use std::convert::TryFrom;
use std::io::{self, Read, Write};
use byteorder::{ReadBytesExt, WriteBytesExt};
use nonempty::NonEmpty;
use orchard::{
bundle::{Action, Authorization, Authorized, Flags},
note::{ExtractedNoteCommitment, Nullifier, TransmittedNoteCiphertext},
primitives::redpallas::{self, SigType, Signature, SpendAuth, VerificationKey},
value::ValueCommitment,
Anchor,
};
use super::Amount;
use crate::serialize::{Array, CompactSize};
use crate::{serialize::Vector, transaction::Transaction};
pub const FLAG_SPENDS_ENABLED: u8 = 0b0000_0001;
pub const FLAG_OUTPUTS_ENABLED: u8 = 0b0000_0010;
pub const FLAGS_EXPECTED_UNSET: u8 = !(FLAG_SPENDS_ENABLED | FLAG_OUTPUTS_ENABLED);
/// Marker for a bundle with no proofs or signatures.
#[derive(Debug)]
pub struct Unauthorized;
impl Authorization for Unauthorized {
type SpendAuth = ();
}
/// Reads an [`orchard::Bundle`] from a v5 transaction format.
pub fn read_v5_bundle<R: Read>(
mut reader: R,
) -> io::Result<Option<orchard::Bundle<Authorized, Amount>>> {
#[allow(clippy::redundant_closure)]
let actions_without_auth = Vector::read(&mut reader, |r| read_action_without_auth(r))?;
if actions_without_auth.is_empty() {
Ok(None)
} else {
let flags = read_flags(&mut reader)?;
let value_balance = Transaction::read_amount(&mut reader)?;
let anchor = read_anchor(&mut reader)?;
let proof_bytes = Vector::read(&mut reader, |r| r.read_u8())?;
let actions = NonEmpty::from_vec(
actions_without_auth
.into_iter()
.map(|act| act.try_map(|_| read_signature::<_, redpallas::SpendAuth>(&mut reader)))
.collect::<Result<Vec<_>, _>>()?,
)
.expect("A nonzero number of actions was read from the transaction data.");
let binding_signature = read_signature::<_, redpallas::Binding>(&mut reader)?;
let authorization = orchard::bundle::Authorized::from_parts(
orchard::Proof::new(proof_bytes),
binding_signature,
);
Ok(Some(orchard::Bundle::from_parts(
actions,
flags,
value_balance,
anchor,
authorization,
)))
}
}
pub fn read_value_commitment<R: Read>(mut reader: R) -> io::Result<ValueCommitment> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let cv = ValueCommitment::from_bytes(&bytes);
if cv.is_none().into() {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"invalid Pallas point for value commitment".to_owned(),
))
} else {
Ok(cv.unwrap())
}
}
pub fn read_nullifier<R: Read>(mut reader: R) -> io::Result<Nullifier> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let nullifier_ctopt = Nullifier::from_bytes(&bytes);
if nullifier_ctopt.is_none().into() {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
"invalid Pallas point for nullifier".to_owned(),
))
} else {
Ok(nullifier_ctopt.unwrap())
}
}
pub fn read_verification_key<R: Read>(mut reader: R) -> io::Result<VerificationKey<SpendAuth>> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
VerificationKey::try_from(bytes).map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidInput,
"invalid verification key".to_owned(),
)
})
}
pub fn read_cmx<R: Read>(mut reader: R) -> io::Result<ExtractedNoteCommitment> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let cmx = ExtractedNoteCommitment::from_bytes(&bytes);
Option::from(cmx).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"invalid Pallas base for field cmx".to_owned(),
)
})
}
pub fn read_note_ciphertext<R: Read>(mut reader: R) -> io::Result<TransmittedNoteCiphertext> {
let mut tnc = TransmittedNoteCiphertext {
epk_bytes: [0u8; 32],
enc_ciphertext: [0u8; 580],
out_ciphertext: [0u8; 80],
};
reader.read_exact(&mut tnc.epk_bytes)?;
reader.read_exact(&mut tnc.enc_ciphertext)?;
reader.read_exact(&mut tnc.out_ciphertext)?;
Ok(tnc)
}
pub fn read_action_without_auth<R: Read>(mut reader: R) -> io::Result<Action<()>> {
let cv_net = read_value_commitment(&mut reader)?;
let nf_old = read_nullifier(&mut reader)?;
let rk = read_verification_key(&mut reader)?;
let cmx = read_cmx(&mut reader)?;
let encrypted_note = read_note_ciphertext(&mut reader)?;
Ok(Action::from_parts(
nf_old,
rk,
cmx,
encrypted_note,
cv_net,
(),
))
}
pub fn read_flags<R: Read>(mut reader: R) -> io::Result<Flags> {
let mut byte = [0u8; 1];
reader.read_exact(&mut byte)?;
Flags::from_byte(byte[0])
}
pub fn read_anchor<R: Read>(mut reader: R) -> io::Result<Anchor> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
Anchor::from_bytes(bytes).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"invalid Orchard anchor".to_owned(),
)
})
}
pub fn read_signature<R: Read, T: SigType>(mut reader: R) -> io::Result<Signature<T>> {
let mut bytes = [0u8; 64];
reader.read_exact(&mut bytes)?;
Ok(Signature::from(bytes))
}
/// Writes an [`orchard::Bundle`] in the v5 transaction format.
pub fn write_v5_bundle<W: Write>(
bundle: Option<&orchard::Bundle<Authorized, Amount>>,
mut writer: W,
) -> io::Result<()> {
if let Some(bundle) = &bundle {
Vector::write_nonempty(&mut writer, bundle.actions(), |w, a| {
write_action_without_auth(w, a)
})?;
writer.write_all(&[bundle.flags().to_byte()])?;
writer.write_all(&bundle.value_balance().to_i64_le_bytes())?;
writer.write_all(&bundle.anchor().to_bytes())?;
Vector::write(
&mut writer,
bundle.authorization().proof().as_ref(),
|w, b| w.write_u8(*b),
)?;
Array::write(
&mut writer,
bundle.actions().iter().map(|a| a.authorization()),
|w, auth| w.write_all(&<[u8; 64]>::from(*auth)),
)?;
writer.write_all(&<[u8; 64]>::from(
bundle.authorization().binding_signature(),
))?;
} else {
CompactSize::write(&mut writer, 0)?;
}
Ok(())
}
pub fn write_value_commitment<W: Write>(mut writer: W, cv: &ValueCommitment) -> io::Result<()> {
writer.write_all(&cv.to_bytes())
}
pub fn write_nullifier<W: Write>(mut writer: W, nf: &Nullifier) -> io::Result<()> {
writer.write_all(&nf.to_bytes())
}
pub fn write_verification_key<W: Write>(
mut writer: W,
rk: &redpallas::VerificationKey<SpendAuth>,
) -> io::Result<()> {
writer.write_all(&<[u8; 32]>::from(rk))
}
pub fn write_cmx<W: Write>(mut writer: W, cmx: &ExtractedNoteCommitment) -> io::Result<()> {
writer.write_all(&cmx.to_bytes())
}
pub fn write_note_ciphertext<W: Write>(
mut writer: W,
nc: &TransmittedNoteCiphertext,
) -> io::Result<()> {
writer.write_all(&nc.epk_bytes)?;
writer.write_all(&nc.enc_ciphertext)?;
writer.write_all(&nc.out_ciphertext)
}
pub fn write_action_without_auth<W: Write>(
mut writer: W,
act: &Action<<Authorized as Authorization>::SpendAuth>,
) -> io::Result<()> {
write_value_commitment(&mut writer, &act.cv_net())?;
write_nullifier(&mut writer, &act.nullifier())?;
write_verification_key(&mut writer, &act.rk())?;
write_cmx(&mut writer, &act.cmx())?;
write_note_ciphertext(&mut writer, &act.encrypted_note())?;
Ok(())
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::prelude::*;
use orchard::bundle::{
testing::{self as t_orch},
Authorized, Bundle,
};
use crate::transaction::{
components::amount::{testing::arb_amount, Amount},
TxVersion,
};
prop_compose! {
pub fn arb_bundle(n_actions: usize)(
orchard_value_balance in arb_amount(),
bundle in t_orch::arb_bundle(n_actions)
) -> Bundle<Authorized, Amount> {
// overwrite the value balance, as we can't guarantee that the
// value doesn't exceed the MAX_MONEY bounds.
bundle.try_map_value_balance::<_, (), _>(|_| Ok(orchard_value_balance)).unwrap()
}
}
pub fn arb_bundle_for_version(
v: TxVersion,
) -> impl Strategy<Value = Option<Bundle<Authorized, Amount>>> {
if v.has_orchard() {
Strategy::boxed((1usize..100).prop_flat_map(|n| prop::option::of(arb_bundle(n))))
} else {
Strategy::boxed(Just(None))
}
}
}

View File

@ -1,34 +1,67 @@
use core::fmt::Debug;
use ff::PrimeField;
use group::GroupEncoding;
use std::io::{self, Read, Write};
use zcash_note_encryption::ShieldedOutput;
use zcash_note_encryption::{EphemeralKeyBytes, ShieldedOutput, COMPACT_NOTE_SIZE};
use crate::{
consensus,
sapling::{
note_encryption::SaplingDomain,
redjubjub::{PublicKey, Signature},
redjubjub::{self, PublicKey, Signature},
Nullifier,
},
};
use zcash_note_encryption::COMPACT_NOTE_SIZE;
use super::{amount::Amount, GROTH_PROOF_SIZE};
use super::GROTH_PROOF_SIZE;
pub type GrothProofBytes = [u8; GROTH_PROOF_SIZE];
pub mod builder;
pub trait Authorization: Debug {
type Proof: Clone + Debug;
type AuthSig: Clone + Debug;
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Unproven;
impl Authorization for Unproven {
type Proof = ();
type AuthSig = ();
}
#[derive(Debug, Copy, Clone)]
pub struct Authorized {
pub binding_sig: redjubjub::Signature,
}
impl Authorization for Authorized {
type Proof = GrothProofBytes;
type AuthSig = redjubjub::Signature;
}
#[derive(Debug, Clone)]
pub struct Bundle<A: Authorization> {
pub shielded_spends: Vec<SpendDescription<A>>,
pub shielded_outputs: Vec<OutputDescription<A::Proof>>,
pub value_balance: Amount,
pub authorization: A,
}
#[derive(Clone)]
pub struct SpendDescription {
pub struct SpendDescription<A: Authorization> {
pub cv: jubjub::ExtendedPoint,
pub anchor: bls12_381::Scalar,
pub nullifier: Nullifier,
pub rk: PublicKey,
pub zkproof: [u8; GROTH_PROOF_SIZE],
pub spend_auth_sig: Option<Signature>,
pub zkproof: A::Proof,
pub spend_auth_sig: A::AuthSig,
}
impl std::fmt::Debug for SpendDescription {
impl<A: Authorization> std::fmt::Debug for SpendDescription<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(
f,
@ -38,49 +71,84 @@ impl std::fmt::Debug for SpendDescription {
}
}
impl SpendDescription {
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
// Consensus rules (§4.4):
// - Canonical encoding is enforced here.
// - "Not small order" is enforced in SaplingVerificationContext::check_spend()
// (located in zcash_proofs::sapling::verifier).
let cv = {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let cv = jubjub::ExtendedPoint::from_bytes(&bytes);
if cv.is_none().into() {
return Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid cv"));
}
cv.unwrap()
};
/// Consensus rules (§4.4) & (§4.5):
/// - Canonical encoding is enforced here.
/// - "Not small order" is enforced in SaplingVerificationContext::(check_spend()/check_output())
/// (located in zcash_proofs::sapling::verifier).
pub fn read_point<R: Read>(mut reader: R, field: &str) -> io::Result<jubjub::ExtendedPoint> {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let point = jubjub::ExtendedPoint::from_bytes(&bytes);
// Consensus rule (§7.3): Canonical encoding is enforced here
let anchor = {
let mut f = [0u8; 32];
reader.read_exact(&mut f)?;
bls12_381::Scalar::from_repr(f)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "anchor not in field"))?
};
if point.is_none().into() {
Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("invalid {}", field),
))
} else {
Ok(point.unwrap())
}
}
/// Consensus rules (§7.3) & (§7.4):
/// - Canonical encoding is enforced here
pub fn read_base<R: Read>(mut reader: R, field: &str) -> io::Result<bls12_381::Scalar> {
let mut f = [0u8; 32];
reader.read_exact(&mut f)?;
bls12_381::Scalar::from_repr(f).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("{} not in field", field),
)
})
}
/// Consensus rules (§4.4) & (§4.5):
/// - Canonical encoding is enforced by the API of SaplingVerificationContext::check_spend()
/// and SaplingVerificationContext::check_output() due to the need to parse this into a
/// bellman::groth16::Proof.
/// - Proof validity is enforced in SaplingVerificationContext::check_spend()
/// and SaplingVerificationContext::check_output()
pub fn read_zkproof<R: Read>(mut reader: R) -> io::Result<GrothProofBytes> {
let mut zkproof = [0u8; GROTH_PROOF_SIZE];
reader.read_exact(&mut zkproof)?;
Ok(zkproof)
}
impl SpendDescription<Authorized> {
pub fn read_nullifier<R: Read>(mut reader: R) -> io::Result<Nullifier> {
let mut nullifier = Nullifier([0u8; 32]);
reader.read_exact(&mut nullifier.0)?;
Ok(nullifier)
}
// Consensus rules (§4.4):
/// Consensus rules (§4.4):
/// - Canonical encoding is enforced here.
/// - "Not small order" is enforced in SaplingVerificationContext::check_spend()
pub fn read_rk<R: Read>(mut reader: R) -> io::Result<PublicKey> {
PublicKey::read(&mut reader)
}
/// Consensus rules (§4.4):
/// - Canonical encoding is enforced here.
/// - Signature validity is enforced in SaplingVerificationContext::check_spend()
pub fn read_spend_auth_sig<R: Read>(mut reader: R) -> io::Result<Signature> {
Signature::read(&mut reader)
}
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
// Consensus rules (§4.4) & (§4.5):
// - Canonical encoding is enforced here.
// - "Not small order" is enforced in SaplingVerificationContext::check_spend()
let rk = PublicKey::read(&mut reader)?;
// Consensus rules (§4.4):
// - Canonical encoding is enforced by the API of SaplingVerificationContext::check_spend()
// due to the need to parse this into a bellman::groth16::Proof.
// - Proof validity is enforced in SaplingVerificationContext::check_spend()
let mut zkproof = [0u8; GROTH_PROOF_SIZE];
reader.read_exact(&mut zkproof)?;
// Consensus rules (§4.4):
// - Canonical encoding is enforced here.
// - Signature validity is enforced in SaplingVerificationContext::check_spend()
let spend_auth_sig = Some(Signature::read(&mut reader)?);
// - "Not small order" is enforced in SaplingVerificationContext::(check_spend()/check_output())
// (located in zcash_proofs::sapling::verifier).
let cv = read_point(&mut reader, "cv")?;
// Consensus rules (§7.3) & (§7.4):
// - Canonical encoding is enforced here
let anchor = read_base(&mut reader, "anchor")?;
let nullifier = Self::read_nullifier(&mut reader)?;
let rk = Self::read_rk(&mut reader)?;
let zkproof = read_zkproof(&mut reader)?;
let spend_auth_sig = Self::read_spend_auth_sig(&mut reader)?;
Ok(SpendDescription {
cv,
@ -92,35 +160,68 @@ impl SpendDescription {
})
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
pub fn write_v4<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.cv.to_bytes())?;
writer.write_all(self.anchor.to_repr().as_ref())?;
writer.write_all(&self.nullifier.0)?;
self.rk.write(&mut writer)?;
writer.write_all(&self.zkproof)?;
match self.spend_auth_sig {
Some(sig) => sig.write(&mut writer),
None => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Missing spend auth signature",
)),
self.spend_auth_sig.write(&mut writer)
}
pub fn write_v5_without_witness_data<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.cv.to_bytes())?;
writer.write_all(&self.nullifier.0)?;
self.rk.write(&mut writer)
}
}
#[derive(Clone)]
pub struct SpendDescriptionV5 {
pub cv: jubjub::ExtendedPoint,
pub nullifier: Nullifier,
pub rk: PublicKey,
}
impl SpendDescriptionV5 {
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
let cv = read_point(&mut reader, "cv")?;
let nullifier = SpendDescription::read_nullifier(&mut reader)?;
let rk = SpendDescription::read_rk(&mut reader)?;
Ok(SpendDescriptionV5 { cv, nullifier, rk })
}
pub fn into_spend_description(
self,
anchor: bls12_381::Scalar,
zkproof: GrothProofBytes,
spend_auth_sig: Signature,
) -> SpendDescription<Authorized> {
SpendDescription {
cv: self.cv,
anchor,
nullifier: self.nullifier,
rk: self.rk,
zkproof,
spend_auth_sig,
}
}
}
#[derive(Clone)]
pub struct OutputDescription {
pub struct OutputDescription<Proof> {
pub cv: jubjub::ExtendedPoint,
pub cmu: bls12_381::Scalar,
pub ephemeral_key: jubjub::ExtendedPoint,
pub ephemeral_key: EphemeralKeyBytes,
pub enc_ciphertext: [u8; 580],
pub out_ciphertext: [u8; 80],
pub zkproof: [u8; GROTH_PROOF_SIZE],
pub zkproof: Proof,
}
impl<P: consensus::Parameters> ShieldedOutput<SaplingDomain<P>> for OutputDescription {
fn epk(&self) -> &jubjub::ExtendedPoint {
&self.ephemeral_key
impl<P: consensus::Parameters, A> ShieldedOutput<SaplingDomain<P>> for OutputDescription<A> {
fn ephemeral_key(&self) -> EphemeralKeyBytes {
self.ephemeral_key.clone()
}
fn cmstar_bytes(&self) -> [u8; 32] {
@ -132,7 +233,7 @@ impl<P: consensus::Parameters> ShieldedOutput<SaplingDomain<P>> for OutputDescri
}
}
impl std::fmt::Debug for OutputDescription {
impl<A> std::fmt::Debug for OutputDescription<A> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(
f,
@ -142,57 +243,29 @@ impl std::fmt::Debug for OutputDescription {
}
}
impl OutputDescription {
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
impl OutputDescription<GrothProofBytes> {
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
// Consensus rules (§4.5):
// - Canonical encoding is enforced here.
// - "Not small order" is enforced in SaplingVerificationContext::check_output()
// (located in zcash_proofs::sapling::verifier).
let cv = {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let cv = jubjub::ExtendedPoint::from_bytes(&bytes);
if cv.is_none().into() {
return Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid cv"));
}
cv.unwrap()
};
let cv = read_point(&mut reader, "cv")?;
// Consensus rule (§7.4): Canonical encoding is enforced here
let cmu = {
let mut f = [0u8; 32];
reader.read_exact(&mut f)?;
bls12_381::Scalar::from_repr(f)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "cmu not in field"))?
};
let cmu = read_base(&mut reader, "cmu")?;
// Consensus rules (§4.5):
// - Canonical encoding is enforced here.
// - Canonical encoding is enforced in librustzcash_sapling_check_output by zcashd
// - "Not small order" is enforced in SaplingVerificationContext::check_output()
let ephemeral_key = {
let mut bytes = [0u8; 32];
reader.read_exact(&mut bytes)?;
let ephemeral_key = jubjub::ExtendedPoint::from_bytes(&bytes);
if ephemeral_key.is_none().into() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"invalid ephemeral_key",
));
}
ephemeral_key.unwrap()
};
let mut ephemeral_key = EphemeralKeyBytes([0u8; 32]);
reader.read_exact(&mut ephemeral_key.0)?;
let mut enc_ciphertext = [0u8; 580];
let mut out_ciphertext = [0u8; 80];
reader.read_exact(&mut enc_ciphertext)?;
reader.read_exact(&mut out_ciphertext)?;
// Consensus rules (§4.5):
// - Canonical encoding is enforced by the API of SaplingVerificationContext::check_output()
// due to the need to parse this into a bellman::groth16::Proof.
// - Proof validity is enforced in SaplingVerificationContext::check_output()
let mut zkproof = [0u8; GROTH_PROOF_SIZE];
reader.read_exact(&mut zkproof)?;
let zkproof = read_zkproof(&mut reader)?;
Ok(OutputDescription {
cv,
@ -204,26 +277,83 @@ impl OutputDescription {
})
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
pub fn write_v4<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.cv.to_bytes())?;
writer.write_all(self.cmu.to_repr().as_ref())?;
writer.write_all(&self.ephemeral_key.to_bytes())?;
writer.write_all(self.ephemeral_key.as_ref())?;
writer.write_all(&self.enc_ciphertext)?;
writer.write_all(&self.out_ciphertext)?;
writer.write_all(&self.zkproof)
}
pub fn write_v5_without_proof<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.cv.to_bytes())?;
writer.write_all(self.cmu.to_repr().as_ref())?;
writer.write_all(self.ephemeral_key.as_ref())?;
writer.write_all(&self.enc_ciphertext)?;
writer.write_all(&self.out_ciphertext)
}
}
#[derive(Clone)]
pub struct OutputDescriptionV5 {
pub cv: jubjub::ExtendedPoint,
pub cmu: bls12_381::Scalar,
pub ephemeral_key: EphemeralKeyBytes,
pub enc_ciphertext: [u8; 580],
pub out_ciphertext: [u8; 80],
}
impl OutputDescriptionV5 {
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
let cv = read_point(&mut reader, "cv")?;
let cmu = read_base(&mut reader, "cmu")?;
// Consensus rules (§4.5):
// - Canonical encoding is enforced in librustzcash_sapling_check_output by zcashd
// - "Not small order" is enforced in SaplingVerificationContext::check_output()
let mut ephemeral_key = EphemeralKeyBytes([0u8; 32]);
reader.read_exact(&mut ephemeral_key.0)?;
let mut enc_ciphertext = [0u8; 580];
let mut out_ciphertext = [0u8; 80];
reader.read_exact(&mut enc_ciphertext)?;
reader.read_exact(&mut out_ciphertext)?;
Ok(OutputDescriptionV5 {
cv,
cmu,
ephemeral_key,
enc_ciphertext,
out_ciphertext,
})
}
pub fn into_output_description(
self,
zkproof: GrothProofBytes,
) -> OutputDescription<GrothProofBytes> {
OutputDescription {
cv: self.cv,
cmu: self.cmu,
ephemeral_key: self.ephemeral_key,
enc_ciphertext: self.enc_ciphertext,
out_ciphertext: self.out_ciphertext,
zkproof,
}
}
}
pub struct CompactOutputDescription {
pub epk: jubjub::ExtendedPoint,
pub ephemeral_key: EphemeralKeyBytes,
pub cmu: bls12_381::Scalar,
pub enc_ciphertext: Vec<u8>,
}
impl From<OutputDescription> for CompactOutputDescription {
fn from(out: OutputDescription) -> CompactOutputDescription {
impl<A> From<OutputDescription<A>> for CompactOutputDescription {
fn from(out: OutputDescription<A>) -> CompactOutputDescription {
CompactOutputDescription {
epk: out.ephemeral_key,
ephemeral_key: out.ephemeral_key,
cmu: out.cmu,
enc_ciphertext: out.enc_ciphertext[..COMPACT_NOTE_SIZE].to_vec(),
}
@ -231,8 +361,8 @@ impl From<OutputDescription> for CompactOutputDescription {
}
impl<P: consensus::Parameters> ShieldedOutput<SaplingDomain<P>> for CompactOutputDescription {
fn epk(&self) -> &jubjub::ExtendedPoint {
&self.epk
fn ephemeral_key(&self) -> EphemeralKeyBytes {
self.ephemeral_key.clone()
}
fn cmstar_bytes(&self) -> [u8; 32] {
@ -243,3 +373,127 @@ impl<P: consensus::Parameters> ShieldedOutput<SaplingDomain<P>> for CompactOutpu
&self.enc_ciphertext
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use ff::Field;
use group::{Group, GroupEncoding};
use proptest::collection::vec;
use proptest::prelude::*;
use rand::{rngs::StdRng, SeedableRng};
use std::convert::TryFrom;
use crate::{
constants::{SPENDING_KEY_GENERATOR, VALUE_COMMITMENT_RANDOMNESS_GENERATOR},
sapling::{
redjubjub::{PrivateKey, PublicKey},
Nullifier,
},
transaction::{
components::{amount::testing::arb_amount, GROTH_PROOF_SIZE},
TxVersion,
},
};
use super::{Authorized, Bundle, GrothProofBytes, OutputDescription, SpendDescription};
prop_compose! {
fn arb_extended_point()(rng_seed in prop::array::uniform32(any::<u8>())) -> jubjub::ExtendedPoint {
let mut rng = StdRng::from_seed(rng_seed);
let scalar = jubjub::Scalar::random(&mut rng);
jubjub::ExtendedPoint::generator() * scalar
}
}
prop_compose! {
/// produce a spend description with invalid data (useful only for serialization
/// roundtrip testing).
fn arb_spend_description()(
cv in arb_extended_point(),
anchor in vec(any::<u8>(), 64)
.prop_map(|v| <[u8;64]>::try_from(v.as_slice()).unwrap())
.prop_map(|v| bls12_381::Scalar::from_bytes_wide(&v)),
nullifier in prop::array::uniform32(any::<u8>())
.prop_map(|v| Nullifier::from_slice(&v).unwrap()),
zkproof in vec(any::<u8>(), GROTH_PROOF_SIZE)
.prop_map(|v| <[u8;GROTH_PROOF_SIZE]>::try_from(v.as_slice()).unwrap()),
rng_seed in prop::array::uniform32(prop::num::u8::ANY),
fake_sighash_bytes in prop::array::uniform32(prop::num::u8::ANY),
) -> SpendDescription<Authorized> {
let mut rng = StdRng::from_seed(rng_seed);
let sk1 = PrivateKey(jubjub::Fr::random(&mut rng));
let rk = PublicKey::from_private(&sk1, SPENDING_KEY_GENERATOR);
SpendDescription {
cv,
anchor,
nullifier,
rk,
zkproof,
spend_auth_sig: sk1.sign(&fake_sighash_bytes, &mut rng, SPENDING_KEY_GENERATOR),
}
}
}
prop_compose! {
/// produce an output description with invalid data (useful only for serialization
/// roundtrip testing).
pub fn arb_output_description()(
cv in arb_extended_point(),
cmu in vec(any::<u8>(), 64)
.prop_map(|v| <[u8;64]>::try_from(v.as_slice()).unwrap())
.prop_map(|v| bls12_381::Scalar::from_bytes_wide(&v)),
enc_ciphertext in vec(any::<u8>(), 580)
.prop_map(|v| <[u8;580]>::try_from(v.as_slice()).unwrap()),
epk in arb_extended_point(),
out_ciphertext in vec(any::<u8>(), 80)
.prop_map(|v| <[u8;80]>::try_from(v.as_slice()).unwrap()),
zkproof in vec(any::<u8>(), GROTH_PROOF_SIZE)
.prop_map(|v| <[u8;GROTH_PROOF_SIZE]>::try_from(v.as_slice()).unwrap()),
) -> OutputDescription<GrothProofBytes> {
OutputDescription {
cv,
cmu,
ephemeral_key: epk.to_bytes().into(),
enc_ciphertext,
out_ciphertext,
zkproof,
}
}
}
prop_compose! {
pub fn arb_bundle()(
shielded_spends in vec(arb_spend_description(), 0..30),
shielded_outputs in vec(arb_output_description(), 0..30),
value_balance in arb_amount(),
rng_seed in prop::array::uniform32(prop::num::u8::ANY),
fake_bvk_bytes in prop::array::uniform32(prop::num::u8::ANY),
) -> Option<Bundle<Authorized>> {
if shielded_spends.is_empty() && shielded_outputs.is_empty() {
None
} else {
let mut rng = StdRng::from_seed(rng_seed);
let bsk = PrivateKey(jubjub::Fr::random(&mut rng));
Some(
Bundle {
shielded_spends,
shielded_outputs,
value_balance,
authorization: Authorized { binding_sig: bsk.sign(&fake_bvk_bytes, &mut rng, VALUE_COMMITMENT_RANDOMNESS_GENERATOR) },
}
)
}
}
}
pub fn arb_bundle_for_version(
v: TxVersion,
) -> impl Strategy<Value = Option<Bundle<Authorized>>> {
if v.has_sapling() {
Strategy::boxed(arb_bundle())
} else {
Strategy::boxed(Just(None))
}
}
}

View File

@ -0,0 +1,628 @@
//! Types and functions for building Sapling transaction components.
use std::fmt;
use std::sync::mpsc::Sender;
use ff::Field;
use group::GroupEncoding;
use rand::{seq::SliceRandom, RngCore};
use crate::{
consensus::{self, BlockHeight},
memo::MemoBytes,
merkle_tree::MerklePath,
sapling::{
keys::OutgoingViewingKey,
note_encryption::sapling_note_encryption,
prover::TxProver,
redjubjub::{PrivateKey, Signature},
spend_sig_internal,
util::generate_random_rseed_internal,
Diversifier, Node, Note, PaymentAddress,
},
transaction::{
builder::Progress,
components::{
amount::Amount,
sapling::{
Authorization, Authorized, Bundle, GrothProofBytes, OutputDescription,
SpendDescription,
},
},
},
zip32::ExtendedSpendingKey,
};
/// If there are any shielded inputs, always have at least two shielded outputs, padding
/// with dummy outputs if necessary. See <https://github.com/zcash/zcash/issues/3615>.
const MIN_SHIELDED_OUTPUTS: usize = 2;
#[derive(Debug, PartialEq)]
pub enum Error {
AnchorMismatch,
BindingSig,
InvalidAddress,
InvalidAmount,
SpendProof,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::AnchorMismatch => {
write!(f, "Anchor mismatch (anchors for all spends must be equal)")
}
Error::BindingSig => write!(f, "Failed to create bindingSig"),
Error::InvalidAddress => write!(f, "Invalid address"),
Error::InvalidAmount => write!(f, "Invalid amount"),
Error::SpendProof => write!(f, "Failed to create Sapling spend proof"),
}
}
}
#[derive(Debug, Clone)]
pub struct SpendDescriptionInfo {
extsk: ExtendedSpendingKey,
diversifier: Diversifier,
note: Note,
alpha: jubjub::Fr,
merkle_path: MerklePath<Node>,
}
#[derive(Clone)]
struct SaplingOutput {
/// `None` represents the `ovk = ⊥` case.
ovk: Option<OutgoingViewingKey>,
to: PaymentAddress,
note: Note,
memo: MemoBytes,
}
impl SaplingOutput {
fn new_internal<P: consensus::Parameters, R: RngCore>(
params: &P,
rng: &mut R,
target_height: BlockHeight,
ovk: Option<OutgoingViewingKey>,
to: PaymentAddress,
value: Amount,
memo: MemoBytes,
) -> Result<Self, Error> {
let g_d = to.g_d().ok_or(Error::InvalidAddress)?;
if value.is_negative() {
return Err(Error::InvalidAmount);
}
let rseed = generate_random_rseed_internal(params, target_height, rng);
let note = Note {
g_d,
pk_d: *to.pk_d(),
value: value.into(),
rseed,
};
Ok(SaplingOutput {
ovk,
to,
note,
memo,
})
}
fn build<P: consensus::Parameters, Pr: TxProver, R: RngCore>(
self,
prover: &Pr,
ctx: &mut Pr::SaplingProvingContext,
rng: &mut R,
) -> OutputDescription<GrothProofBytes> {
let encryptor = sapling_note_encryption::<R, P>(
self.ovk,
self.note.clone(),
self.to.clone(),
self.memo,
rng,
);
let (zkproof, cv) = prover.output_proof(
ctx,
*encryptor.esk(),
self.to,
self.note.rcm(),
self.note.value,
);
let cmu = self.note.cmu();
let enc_ciphertext = encryptor.encrypt_note_plaintext();
let out_ciphertext = encryptor.encrypt_outgoing_plaintext(&cv, &cmu, rng);
let epk = *encryptor.epk();
OutputDescription {
cv,
cmu,
ephemeral_key: epk.to_bytes().into(),
enc_ciphertext,
out_ciphertext,
zkproof,
}
}
}
/// Metadata about a transaction created by a [`SaplingBuilder`].
#[derive(Debug, Clone, PartialEq)]
pub struct SaplingMetadata {
spend_indices: Vec<usize>,
output_indices: Vec<usize>,
}
impl SaplingMetadata {
pub fn empty() -> Self {
SaplingMetadata {
spend_indices: vec![],
output_indices: vec![],
}
}
/// Returns the index within the transaction of the [`SpendDescription`] corresponding
/// to the `n`-th call to [`SaplingBuilder::add_spend`].
///
/// Note positions are randomized when building transactions for indistinguishability.
/// This means that the transaction consumer cannot assume that e.g. the first spend
/// they added (via the first call to [`SaplingBuilder::add_spend`]) is the first
/// [`SpendDescription`] in the transaction.
pub fn spend_index(&self, n: usize) -> Option<usize> {
self.spend_indices.get(n).copied()
}
/// Returns the index within the transaction of the [`OutputDescription`] corresponding
/// to the `n`-th call to [`SaplingBuilder::add_output`].
///
/// Note positions are randomized when building transactions for indistinguishability.
/// This means that the transaction consumer cannot assume that e.g. the first output
/// they added (via the first call to [`SaplingBuilder::add_output`]) is the first
/// [`OutputDescription`] in the transaction.
pub fn output_index(&self, n: usize) -> Option<usize> {
self.output_indices.get(n).copied()
}
}
pub struct SaplingBuilder<P> {
params: P,
anchor: Option<bls12_381::Scalar>,
target_height: BlockHeight,
value_balance: Amount,
spends: Vec<SpendDescriptionInfo>,
outputs: Vec<SaplingOutput>,
}
#[derive(Clone)]
pub struct Unauthorized {
tx_metadata: SaplingMetadata,
}
impl std::fmt::Debug for Unauthorized {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "Unauthorized")
}
}
impl Authorization for Unauthorized {
type Proof = GrothProofBytes;
type AuthSig = SpendDescriptionInfo;
}
impl<P: consensus::Parameters> SaplingBuilder<P> {
pub fn new(params: P, target_height: BlockHeight) -> Self {
SaplingBuilder {
params,
anchor: None,
target_height,
value_balance: Amount::zero(),
spends: vec![],
outputs: vec![],
}
}
/// Returns the net value represented by the spends and outputs added to this builder.
pub fn value_balance(&self) -> Amount {
self.value_balance
}
/// Adds a Sapling note to be spent in this transaction.
///
/// Returns an error if the given Merkle path does not have the same anchor as the
/// paths for previous Sapling notes.
pub fn add_spend<R: RngCore>(
&mut self,
mut rng: R,
extsk: ExtendedSpendingKey,
diversifier: Diversifier,
note: Note,
merkle_path: MerklePath<Node>,
) -> Result<(), Error> {
// Consistency check: all anchors must equal the first one
let cmu = Node::new(note.cmu().into());
if let Some(anchor) = self.anchor {
let path_root: bls12_381::Scalar = merkle_path.root(cmu).into();
if path_root != anchor {
return Err(Error::AnchorMismatch);
}
} else {
self.anchor = Some(merkle_path.root(cmu).into())
}
let alpha = jubjub::Fr::random(&mut rng);
self.value_balance += Amount::from_u64(note.value).map_err(|_| Error::InvalidAmount)?;
self.spends.push(SpendDescriptionInfo {
extsk,
diversifier,
note,
alpha,
merkle_path,
});
Ok(())
}
/// Adds a Sapling address to send funds to.
#[allow(clippy::too_many_arguments)]
pub fn add_output<R: RngCore>(
&mut self,
mut rng: R,
ovk: Option<OutgoingViewingKey>,
to: PaymentAddress,
value: Amount,
memo: MemoBytes,
) -> Result<(), Error> {
let output = SaplingOutput::new_internal(
&self.params,
&mut rng,
self.target_height,
ovk,
to,
value,
memo,
)?;
self.value_balance -= value;
self.outputs.push(output);
Ok(())
}
/// Send change to the specified change address. If no change address
/// was set, send change to the first Sapling address given as input.
pub fn get_candidate_change_address(&self) -> Option<(OutgoingViewingKey, PaymentAddress)> {
self.spends.first().and_then(|spend| {
PaymentAddress::from_parts(spend.diversifier, spend.note.pk_d)
.map(|addr| (spend.extsk.expsk.ovk, addr))
})
}
pub fn build<Pr: TxProver, R: RngCore>(
self,
prover: &Pr,
ctx: &mut Pr::SaplingProvingContext,
mut rng: R,
target_height: BlockHeight,
progress_notifier: Option<&Sender<Progress>>,
) -> Result<Option<Bundle<Unauthorized>>, Error> {
// Record initial positions of spends and outputs
let params = self.params;
let mut indexed_spends: Vec<_> = self.spends.into_iter().enumerate().collect();
let mut indexed_outputs: Vec<_> = self
.outputs
.iter()
.enumerate()
.map(|(i, o)| Some((i, o)))
.collect();
// Set up the transaction metadata that will be used to record how
// inputs and outputs are shuffled.
let mut tx_metadata = SaplingMetadata::empty();
tx_metadata.spend_indices.resize(indexed_spends.len(), 0);
tx_metadata.output_indices.resize(indexed_outputs.len(), 0);
// Pad Sapling outputs
if !indexed_spends.is_empty() {
while indexed_outputs.len() < MIN_SHIELDED_OUTPUTS {
indexed_outputs.push(None);
}
}
// Randomize order of inputs and outputs
indexed_spends.shuffle(&mut rng);
indexed_outputs.shuffle(&mut rng);
// Keep track of the total number of steps computed
let total_progress = indexed_spends.len() as u32 + indexed_outputs.len() as u32;
let mut progress = 0u32;
// Create Sapling SpendDescriptions
let shielded_spends: Vec<SpendDescription<Unauthorized>> = if !indexed_spends.is_empty() {
let anchor = self
.anchor
.expect("Sapling anchor must be set if Sapling spends are present.");
indexed_spends
.into_iter()
.enumerate()
.map(|(i, (pos, spend))| {
let proof_generation_key = spend.extsk.expsk.proof_generation_key();
let nullifier = spend.note.nf(
&proof_generation_key.to_viewing_key(),
spend.merkle_path.position,
);
let (zkproof, cv, rk) = prover
.spend_proof(
ctx,
proof_generation_key,
spend.diversifier,
spend.note.rseed,
spend.alpha,
spend.note.value,
anchor,
spend.merkle_path.clone(),
)
.map_err(|_| Error::SpendProof)?;
// Record the post-randomized spend location
tx_metadata.spend_indices[pos] = i;
// Update progress and send a notification on the channel
progress += 1;
if let Some(sender) = progress_notifier {
// If the send fails, we should ignore the error, not crash.
sender
.send(Progress::new(progress, Some(total_progress)))
.unwrap_or(());
}
Ok(SpendDescription {
cv,
anchor,
nullifier,
rk,
zkproof,
spend_auth_sig: spend,
})
})
.collect::<Result<Vec<_>, Error>>()?
} else {
vec![]
};
// Create Sapling OutputDescriptions
let shielded_outputs: Vec<OutputDescription<GrothProofBytes>> = indexed_outputs
.into_iter()
.enumerate()
.map(|(i, output)| {
let result = if let Some((pos, output)) = output {
// Record the post-randomized output location
tx_metadata.output_indices[pos] = i;
output.clone().build::<P, _, _>(prover, ctx, &mut rng)
} else {
// This is a dummy output
let (dummy_to, dummy_note) = {
let (diversifier, g_d) = {
let mut diversifier;
let g_d;
loop {
let mut d = [0; 11];
rng.fill_bytes(&mut d);
diversifier = Diversifier(d);
if let Some(val) = diversifier.g_d() {
g_d = val;
break;
}
}
(diversifier, g_d)
};
let (pk_d, payment_address) = loop {
let dummy_ivk = jubjub::Fr::random(&mut rng);
let pk_d = g_d * dummy_ivk;
if let Some(addr) = PaymentAddress::from_parts(diversifier, pk_d) {
break (pk_d, addr);
}
};
let rseed =
generate_random_rseed_internal(&params, target_height, &mut rng);
(
payment_address,
Note {
g_d,
pk_d,
rseed,
value: 0,
},
)
};
let esk = dummy_note.generate_or_derive_esk_internal(&mut rng);
let epk = dummy_note.g_d * esk;
let (zkproof, cv) =
prover.output_proof(ctx, esk, dummy_to, dummy_note.rcm(), dummy_note.value);
let cmu = dummy_note.cmu();
let mut enc_ciphertext = [0u8; 580];
let mut out_ciphertext = [0u8; 80];
rng.fill_bytes(&mut enc_ciphertext[..]);
rng.fill_bytes(&mut out_ciphertext[..]);
OutputDescription {
cv,
cmu,
ephemeral_key: epk.to_bytes().into(),
enc_ciphertext,
out_ciphertext,
zkproof,
}
};
// Update progress and send a notification on the channel
progress += 1;
if let Some(sender) = progress_notifier {
// If the send fails, we should ignore the error, not crash.
sender
.send(Progress::new(progress, Some(total_progress)))
.unwrap_or(());
}
result
})
.collect();
let bundle = if shielded_spends.is_empty() && shielded_outputs.is_empty() {
None
} else {
Some(Bundle {
shielded_spends,
shielded_outputs,
value_balance: self.value_balance,
authorization: Unauthorized { tx_metadata },
})
};
Ok(bundle)
}
}
impl SpendDescription<Unauthorized> {
pub fn apply_signature(&self, spend_auth_sig: Signature) -> SpendDescription<Authorized> {
SpendDescription {
cv: self.cv,
anchor: self.anchor,
nullifier: self.nullifier,
rk: self.rk.clone(),
zkproof: self.zkproof,
spend_auth_sig,
}
}
}
impl Bundle<Unauthorized> {
pub fn apply_signatures<Pr: TxProver, R: RngCore>(
self,
prover: &Pr,
ctx: &mut Pr::SaplingProvingContext,
rng: &mut R,
sighash_bytes: &[u8; 32],
) -> Result<(Bundle<Authorized>, SaplingMetadata), Error> {
let binding_sig = prover
.binding_sig(ctx, self.value_balance, sighash_bytes)
.map_err(|_| Error::BindingSig)?;
Ok((
Bundle {
shielded_spends: self
.shielded_spends
.iter()
.map(|spend| {
spend.apply_signature(spend_sig_internal(
PrivateKey(spend.spend_auth_sig.extsk.expsk.ask),
spend.spend_auth_sig.alpha,
sighash_bytes,
rng,
))
})
.collect(),
shielded_outputs: self.shielded_outputs,
value_balance: self.value_balance,
authorization: Authorized { binding_sig },
},
self.authorization.tx_metadata,
))
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::collection::vec;
use proptest::prelude::*;
use rand::{rngs::StdRng, SeedableRng};
use crate::{
consensus::{
testing::{arb_branch_id, arb_height},
TEST_NETWORK,
},
merkle_tree::{testing::arb_commitment_tree, IncrementalWitness},
sapling::{
prover::{mock::MockTxProver, TxProver},
testing::{arb_node, arb_note, arb_positive_note_value},
Diversifier,
},
transaction::components::{
amount::MAX_MONEY,
sapling::{Authorized, Bundle},
},
zip32::testing::arb_extended_spending_key,
};
use super::SaplingBuilder;
prop_compose! {
fn arb_bundle()(n_notes in 1..30usize)(
extsk in arb_extended_spending_key(),
spendable_notes in vec(
arb_positive_note_value(MAX_MONEY as u64 / 10000).prop_flat_map(arb_note),
n_notes
),
commitment_trees in vec(
arb_commitment_tree(n_notes, arb_node()).prop_map(
|t| IncrementalWitness::from_tree(&t).path().unwrap()
),
n_notes
),
diversifiers in vec(prop::array::uniform11(any::<u8>()).prop_map(Diversifier), n_notes),
target_height in arb_branch_id().prop_flat_map(|b| arb_height(b, &TEST_NETWORK)),
rng_seed in prop::array::uniform32(any::<u8>()),
fake_sighash_bytes in prop::array::uniform32(any::<u8>()),
) -> Bundle<Authorized> {
let mut builder = SaplingBuilder::new(TEST_NETWORK, target_height.unwrap());
let mut rng = StdRng::from_seed(rng_seed);
for ((note, path), diversifier) in spendable_notes.into_iter().zip(commitment_trees.into_iter()).zip(diversifiers.into_iter()) {
builder.add_spend(
&mut rng,
extsk.clone(),
diversifier,
note,
path
).unwrap();
}
let prover = MockTxProver;
let mut ctx = prover.new_sapling_proving_context();
let bundle = builder.build(
&prover,
&mut ctx,
&mut rng,
target_height.unwrap(),
None
).unwrap().unwrap();
let (bundle, _) = bundle.apply_signatures(
&prover,
&mut ctx,
&mut rng,
&fake_sighash_bytes,
).unwrap();
bundle
}
}
}

View File

@ -10,6 +10,13 @@ const PHGR_PROOF_SIZE: usize = 33 + 33 + 65 + 33 + 33 + 33 + 33 + 33;
const ZC_NUM_JS_INPUTS: usize = 2;
const ZC_NUM_JS_OUTPUTS: usize = 2;
#[derive(Debug)]
pub struct Bundle {
pub joinsplits: Vec<JsDescription>,
pub joinsplit_pubkey: [u8; 32],
pub joinsplit_sig: [u8; 64],
}
#[derive(Clone)]
pub(crate) enum SproutProof {
Groth([u8; GROTH_PROOF_SIZE]),

View File

@ -2,12 +2,33 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::fmt::Debug;
use std::io::{self, Read, Write};
use crate::legacy::Script;
use super::amount::Amount;
pub mod builder;
pub trait Authorization: Debug {
type ScriptSig: Debug + Clone + PartialEq;
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Authorized;
impl Authorization for Authorized {
type ScriptSig = Script;
}
#[derive(Debug, Clone, PartialEq)]
pub struct Bundle<A: Authorization> {
pub vin: Vec<TxIn<A>>,
pub vout: Vec<TxOut>,
pub authorization: A,
}
#[derive(Clone, Debug, PartialEq)]
pub struct OutPoint {
hash: [u8; 32],
@ -41,23 +62,13 @@ impl OutPoint {
}
#[derive(Debug, Clone, PartialEq)]
pub struct TxIn {
pub struct TxIn<A: Authorization> {
pub prevout: OutPoint,
pub script_sig: Script,
pub script_sig: A::ScriptSig,
pub sequence: u32,
}
impl TxIn {
#[cfg(feature = "transparent-inputs")]
#[cfg_attr(docsrs, doc(cfg(feature = "transparent-inputs")))]
pub fn new(prevout: OutPoint) -> Self {
TxIn {
prevout,
script_sig: Script::default(),
sequence: std::u32::MAX,
}
}
impl TxIn<Authorized> {
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
let prevout = OutPoint::read(&mut reader)?;
let script_sig = Script::read(&mut reader)?;
@ -104,3 +115,66 @@ impl TxOut {
self.script_pubkey.write(&mut writer)
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::collection::vec;
use proptest::prelude::*;
use proptest::sample::select;
use crate::{legacy::Script, transaction::components::amount::testing::arb_nonnegative_amount};
use super::{Authorized, Bundle, OutPoint, TxIn, TxOut};
pub const VALID_OPCODES: [u8; 8] = [
0x00, // OP_FALSE,
0x51, // OP_1,
0x52, // OP_2,
0x53, // OP_3,
0xac, // OP_CHECKSIG,
0x63, // OP_IF,
0x65, // OP_VERIF,
0x6a, // OP_RETURN,
];
prop_compose! {
pub fn arb_outpoint()(hash in prop::array::uniform32(0u8..), n in 0..100u32) -> OutPoint {
OutPoint::new(hash, n)
}
}
prop_compose! {
pub fn arb_script()(v in vec(select(&VALID_OPCODES[..]), 1..256)) -> Script {
Script(v)
}
}
prop_compose! {
pub fn arb_txin()(
prevout in arb_outpoint(),
script_sig in arb_script(),
sequence in any::<u32>()
) -> TxIn<Authorized> {
TxIn { prevout, script_sig, sequence }
}
}
prop_compose! {
pub fn arb_txout()(value in arb_nonnegative_amount(), script_pubkey in arb_script()) -> TxOut {
TxOut { value, script_pubkey }
}
}
prop_compose! {
pub fn arb_bundle()(
vin in vec(arb_txin(), 0..10),
vout in vec(arb_txout(), 0..10),
) -> Option<Bundle<Authorized>> {
if vin.is_empty() && vout.is_empty() {
None
} else {
Some(Bundle { vin, vout, authorization: Authorized })
}
}
}
}

View File

@ -0,0 +1,241 @@
//! Types and functions for building transparent transaction components.
use std::fmt;
#[cfg(feature = "transparent-inputs")]
use blake2b_simd::Hash as Blake2bHash;
use crate::{
legacy::TransparentAddress,
transaction::components::{
amount::Amount,
transparent::{self, Authorization, Authorized, Bundle, TxIn, TxOut},
},
};
#[cfg(feature = "transparent-inputs")]
use crate::{
legacy::Script,
transaction::{
self as tx,
components::OutPoint,
sighash::{signature_hash, SignableInput, SIGHASH_ALL},
TransactionData, TxDigests,
},
};
#[derive(Debug, PartialEq)]
pub enum Error {
InvalidAddress,
InvalidAmount,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::InvalidAddress => write!(f, "Invalid address"),
Error::InvalidAmount => write!(f, "Invalid amount"),
}
}
}
#[cfg(feature = "transparent-inputs")]
#[derive(Debug, Clone)]
struct TransparentInputInfo {
sk: secp256k1::SecretKey,
pubkey: [u8; secp256k1::constants::PUBLIC_KEY_SIZE],
utxo: OutPoint,
coin: TxOut,
}
pub struct TransparentBuilder {
#[cfg(feature = "transparent-inputs")]
secp: secp256k1::Secp256k1<secp256k1::SignOnly>,
#[cfg(feature = "transparent-inputs")]
inputs: Vec<TransparentInputInfo>,
vout: Vec<TxOut>,
}
#[derive(Debug, Clone)]
pub struct Unauthorized {
#[cfg(feature = "transparent-inputs")]
secp: secp256k1::Secp256k1<secp256k1::SignOnly>,
#[cfg(feature = "transparent-inputs")]
inputs: Vec<TransparentInputInfo>,
}
impl Authorization for Unauthorized {
type ScriptSig = ();
}
impl TransparentBuilder {
pub fn empty() -> Self {
TransparentBuilder {
#[cfg(feature = "transparent-inputs")]
secp: secp256k1::Secp256k1::gen_new(),
#[cfg(feature = "transparent-inputs")]
inputs: vec![],
vout: vec![],
}
}
#[cfg(feature = "transparent-inputs")]
pub fn add_input(
&mut self,
sk: secp256k1::SecretKey,
utxo: OutPoint,
coin: TxOut,
) -> Result<(), Error> {
if coin.value.is_negative() {
return Err(Error::InvalidAmount);
}
// Ensure that the RIPEMD-160 digest of the public key associated with the
// provided secret key matches that of the address to which the provided
// output may be spent.
let pubkey = secp256k1::PublicKey::from_secret_key(&self.secp, &sk).serialize();
match coin.script_pubkey.address() {
Some(TransparentAddress::PublicKey(hash)) => {
use ripemd160::Ripemd160;
use sha2::{Digest, Sha256};
if hash[..] != Ripemd160::digest(&Sha256::digest(&pubkey))[..] {
return Err(Error::InvalidAddress);
}
}
_ => return Err(Error::InvalidAddress),
}
self.inputs.push(TransparentInputInfo {
sk,
pubkey,
utxo,
coin,
});
Ok(())
}
pub fn add_output(&mut self, to: &TransparentAddress, value: Amount) -> Result<(), Error> {
if value.is_negative() {
return Err(Error::InvalidAmount);
}
self.vout.push(TxOut {
value,
script_pubkey: to.script(),
});
Ok(())
}
pub fn value_balance(&self) -> Option<Amount> {
#[cfg(feature = "transparent-inputs")]
let input_sum = self
.inputs
.iter()
.map(|input| input.coin.value)
.sum::<Option<Amount>>()?;
#[cfg(not(feature = "transparent-inputs"))]
let input_sum = Amount::zero();
input_sum
- self
.vout
.iter()
.map(|vo| vo.value)
.sum::<Option<Amount>>()?
}
pub fn build(self) -> Option<transparent::Bundle<Unauthorized>> {
#[cfg(feature = "transparent-inputs")]
let vin: Vec<TxIn<Unauthorized>> = self
.inputs
.iter()
.map(|i| TxIn::new(i.utxo.clone()))
.collect();
#[cfg(not(feature = "transparent-inputs"))]
let vin: Vec<TxIn<Unauthorized>> = vec![];
if vin.is_empty() && self.vout.is_empty() {
None
} else {
Some(transparent::Bundle {
vin,
vout: self.vout,
authorization: Unauthorized {
#[cfg(feature = "transparent-inputs")]
secp: self.secp,
#[cfg(feature = "transparent-inputs")]
inputs: self.inputs,
},
})
}
}
}
impl TxIn<Unauthorized> {
#[cfg(feature = "transparent-inputs")]
#[cfg_attr(docsrs, doc(cfg(feature = "transparent-inputs")))]
pub fn new(prevout: OutPoint) -> Self {
TxIn {
prevout,
script_sig: (),
sequence: std::u32::MAX,
}
}
}
impl Bundle<Unauthorized> {
pub fn apply_signatures(
self,
#[cfg(feature = "transparent-inputs")] mtx: &TransactionData<tx::Unauthorized>,
#[cfg(feature = "transparent-inputs")] txid_parts_cache: &TxDigests<Blake2bHash>,
) -> Bundle<Authorized> {
#[cfg(feature = "transparent-inputs")]
let script_sigs: Vec<Script> = self
.authorization
.inputs
.iter()
.enumerate()
.map(|(i, info)| {
let sighash = signature_hash(
mtx,
SIGHASH_ALL,
&SignableInput::transparent(i, &info.coin.script_pubkey, info.coin.value),
txid_parts_cache,
);
let msg = secp256k1::Message::from_slice(sighash.as_ref()).expect("32 bytes");
let sig = self.authorization.secp.sign(&msg, &info.sk);
// Signature has to have "SIGHASH_ALL" appended to it
let mut sig_bytes: Vec<u8> = sig.serialize_der()[..].to_vec();
sig_bytes.extend(&[SIGHASH_ALL as u8]);
// P2PKH scriptSig
Script::default() << &sig_bytes[..] << &info.pubkey[..]
})
.collect();
#[cfg(not(feature = "transparent-inputs"))]
let script_sigs = vec![];
transparent::Bundle {
vin: self
.vin
.into_iter()
.zip(script_sigs.into_iter())
.map(|(txin, sig)| TxIn {
prevout: txin.prevout,
script_sig: sig,
sequence: txin.sequence,
})
.collect(),
vout: self.vout,
authorization: Authorized,
}
}
}

View File

@ -3,41 +3,61 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::io::{self, Read, Write};
use std::convert::TryFrom;
use std::fmt::Debug;
use std::io::{self, Read, Write};
use crate::{
extensions::transparent as tze,
serialize::{CompactSize, Vector},
transaction::TxId,
};
use super::amount::Amount;
pub mod builder;
fn to_io_error(_: std::num::TryFromIntError) -> io::Error {
io::Error::new(io::ErrorKind::InvalidData, "value out of range")
}
pub trait Authorization: Debug {
type Witness: Debug + Clone + PartialEq;
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Authorized;
impl Authorization for Authorized {
type Witness = tze::AuthData;
}
#[derive(Debug, Clone, PartialEq)]
pub struct Bundle<A: Authorization> {
pub vin: Vec<TzeIn<A::Witness>>,
pub vout: Vec<TzeOut>,
pub authorization: A,
}
#[derive(Clone, Debug, PartialEq)]
pub struct TzeOutPoint {
hash: [u8; 32],
pub struct OutPoint {
txid: TxId,
n: u32,
}
impl TzeOutPoint {
pub fn new(hash: [u8; 32], n: u32) -> Self {
TzeOutPoint { hash, n }
impl OutPoint {
pub fn new(txid: TxId, n: u32) -> Self {
OutPoint { txid, n }
}
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
let mut hash = [0u8; 32];
reader.read_exact(&mut hash)?;
let txid = TxId::read(&mut reader)?;
let n = reader.read_u32::<LittleEndian>()?;
Ok(TzeOutPoint { hash, n })
Ok(OutPoint { txid, n })
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.hash)?;
self.txid.write(&mut writer)?;
writer.write_u32::<LittleEndian>(self.n)
}
@ -45,54 +65,18 @@ impl TzeOutPoint {
self.n
}
pub fn hash(&self) -> &[u8; 32] {
&self.hash
pub fn txid(&self) -> &TxId {
&self.txid
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct TzeIn {
pub prevout: TzeOutPoint,
pub witness: tze::Witness,
pub struct TzeIn<Payload> {
pub prevout: OutPoint,
pub witness: tze::Witness<Payload>,
}
/// Transaction encoding and decoding functions conforming to [ZIP 222].
///
/// [ZIP 222]: https://zips.z.cash/zip-0222#encoding-in-transactions
impl TzeIn {
/// Convenience constructor
pub fn new(prevout: TzeOutPoint, extension_id: u32, mode: u32) -> Self {
TzeIn {
prevout,
witness: tze::Witness {
extension_id,
mode,
payload: vec![],
},
}
}
/// Read witness metadata & payload
///
/// Used to decode the encoded form used within a serialized
/// transaction.
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
let prevout = TzeOutPoint::read(&mut reader)?;
let extension_id = CompactSize::read(&mut reader)?;
let mode = CompactSize::read(&mut reader)?;
let payload = Vector::read(&mut reader, |r| r.read_u8())?;
Ok(TzeIn {
prevout,
witness: tze::Witness {
extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(to_io_error)?,
payload,
},
})
}
impl<Payload> TzeIn<Payload> {
/// Write without witness data (for signature hashing)
///
/// This is also used as the prefix for the encoded form used
@ -110,6 +94,46 @@ impl TzeIn {
usize::try_from(self.witness.mode).map_err(to_io_error)?,
)
}
}
/// Transaction encoding and decoding functions conforming to [ZIP 222].
///
/// [ZIP 222]: https://zips.z.cash/zip-0222#encoding-in-transactions
impl TzeIn<()> {
/// Convenience constructor
pub fn new(prevout: OutPoint, extension_id: u32, mode: u32) -> Self {
TzeIn {
prevout,
witness: tze::Witness {
extension_id,
mode,
payload: (),
},
}
}
}
impl TzeIn<<Authorized as Authorization>::Witness> {
/// Read witness metadata & payload
///
/// Used to decode the encoded form used within a serialized
/// transaction.
pub fn read<R: Read>(mut reader: &mut R) -> io::Result<Self> {
let prevout = OutPoint::read(&mut reader)?;
let extension_id = CompactSize::read(&mut reader)?;
let mode = CompactSize::read(&mut reader)?;
let payload = Vector::read(&mut reader, |r| r.read_u8())?;
Ok(TzeIn {
prevout,
witness: tze::Witness {
extension_id: u32::try_from(extension_id).map_err(to_io_error)?,
mode: u32::try_from(mode).map_err(to_io_error)?,
payload: tze::AuthData(payload),
},
})
}
/// Write prevout, extension, and mode followed by witness data.
///
@ -120,7 +144,7 @@ impl TzeIn {
/// [`write_without_witness`]: TzeIn::write_without_witness
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.write_without_witness(&mut writer)?;
Vector::write(&mut writer, &self.witness.payload, |w, b| w.write_u8(*b))
Vector::write(&mut writer, &self.witness.payload.0, |w, b| w.write_u8(*b))
}
}
@ -169,3 +193,61 @@ impl TzeOut {
})
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::collection::vec;
use proptest::prelude::*;
use crate::{
consensus::BranchId,
extensions::transparent::{AuthData, Precondition, Witness},
transaction::components::amount::testing::arb_nonnegative_amount,
transaction::testing::arb_txid,
};
use super::{Authorized, Bundle, OutPoint, TzeIn, TzeOut};
prop_compose! {
pub fn arb_outpoint()(txid in arb_txid(), n in 0..100u32) -> OutPoint {
OutPoint::new(txid, n)
}
}
prop_compose! {
pub fn arb_witness()(extension_id in 0..100u32, mode in 0..100u32, payload in vec(any::<u8>(), 32..256).prop_map(AuthData)) -> Witness<AuthData> {
Witness { extension_id, mode, payload }
}
}
prop_compose! {
pub fn arb_tzein()(prevout in arb_outpoint(), witness in arb_witness()) -> TzeIn<AuthData> {
TzeIn { prevout, witness }
}
}
prop_compose! {
pub fn arb_precondition()(extension_id in 0..100u32, mode in 0..100u32, payload in vec(any::<u8>(), 32..256)) -> Precondition {
Precondition { extension_id, mode, payload }
}
}
prop_compose! {
pub fn arb_tzeout()(value in arb_nonnegative_amount(), precondition in arb_precondition()) -> TzeOut {
TzeOut { value, precondition }
}
}
prop_compose! {
pub fn arb_bundle(branch_id: BranchId)(
vin in vec(arb_tzein(), 0..10),
vout in vec(arb_tzeout(), 0..10),
) -> Option<Bundle<Authorized>> {
if branch_id != BranchId::ZFuture || (vin.is_empty() && vout.is_empty()) {
None
} else {
Some(Bundle { vin, vout, authorization: Authorized })
}
}
}
}

View File

@ -0,0 +1,171 @@
//! Types and functions for building TZE transaction components
#![cfg(feature = "zfuture")]
use std::fmt;
use crate::{
extensions::transparent::{self as tze, ToPayload},
transaction::{
self as tx,
components::{
amount::Amount,
tze::{Authorization, Authorized, Bundle, OutPoint, TzeIn, TzeOut},
},
},
};
#[derive(Debug, PartialEq)]
pub enum Error {
InvalidAmount,
WitnessModeMismatch(u32, u32),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::InvalidAmount => write!(f, "Invalid amount"),
Error::WitnessModeMismatch(expected, actual) =>
write!(f, "TZE witness builder returned a mode that did not match the mode with which the input was initially constructed: expected = {:?}, actual = {:?}", expected, actual),
}
}
}
#[allow(clippy::type_complexity)]
pub struct TzeSigner<'a, BuildCtx> {
prevout: TzeOut,
builder: Box<dyn FnOnce(&BuildCtx) -> Result<(u32, Vec<u8>), Error> + 'a>,
}
pub struct TzeBuilder<'a, BuildCtx> {
signers: Vec<TzeSigner<'a, BuildCtx>>,
vin: Vec<TzeIn<()>>,
vout: Vec<TzeOut>,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Unauthorized;
impl Authorization for Unauthorized {
type Witness = ();
}
impl<'a, BuildCtx> TzeBuilder<'a, BuildCtx> {
pub fn empty() -> Self {
TzeBuilder {
signers: vec![],
vin: vec![],
vout: vec![],
}
}
pub fn add_input<WBuilder, W: ToPayload>(
&mut self,
extension_id: u32,
mode: u32,
(outpoint, prevout): (OutPoint, TzeOut),
witness_builder: WBuilder,
) where
WBuilder: 'a + FnOnce(&BuildCtx) -> Result<W, Error>,
{
self.vin.push(TzeIn::new(outpoint, extension_id, mode));
self.signers.push(TzeSigner {
prevout,
builder: Box::new(move |ctx| witness_builder(&ctx).map(|x| x.to_payload())),
});
}
pub fn add_output<G: ToPayload>(
&mut self,
extension_id: u32,
value: Amount,
guarded_by: &G,
) -> Result<(), Error> {
if value.is_negative() {
return Err(Error::InvalidAmount);
}
let (mode, payload) = guarded_by.to_payload();
self.vout.push(TzeOut {
value,
precondition: tze::Precondition {
extension_id,
mode,
payload,
},
});
Ok(())
}
pub fn value_balance(&self) -> Option<Amount> {
self.signers
.iter()
.map(|s| s.prevout.value)
.sum::<Option<Amount>>()?
- self
.vout
.iter()
.map(|tzo| tzo.value)
.sum::<Option<Amount>>()?
}
pub fn build(self) -> (Option<Bundle<Unauthorized>>, Vec<TzeSigner<'a, BuildCtx>>) {
if self.vin.is_empty() && self.vout.is_empty() {
(None, vec![])
} else {
(
Some(Bundle {
vin: self.vin.clone(),
vout: self.vout.clone(),
authorization: Unauthorized,
}),
self.signers,
)
}
}
}
impl Bundle<Unauthorized> {
pub fn into_authorized(
self,
unauthed_tx: &tx::TransactionData<tx::Unauthorized>,
signers: Vec<TzeSigner<'_, tx::TransactionData<tx::Unauthorized>>>,
) -> Result<Bundle<Authorized>, Error> {
// Create TZE input witnesses
let payloads = signers
.into_iter()
.zip(self.vin.iter())
.into_iter()
.map(|(signer, tzein)| {
// The witness builder function should have cached/closed over whatever data was
// necessary for the witness to commit to at the time it was added to the
// transaction builder; here, it then computes those commitments.
let (mode, payload) = (signer.builder)(unauthed_tx)?;
let input_mode = tzein.witness.mode;
if mode != input_mode {
return Err(Error::WitnessModeMismatch(input_mode, mode));
}
Ok(tze::AuthData(payload))
})
.collect::<Result<Vec<_>, Error>>()?;
Ok(Bundle {
vin: self
.vin
.into_iter()
.zip(payloads.into_iter())
.map(|(tzein, payload)| TzeIn {
prevout: tzein.prevout,
witness: tze::Witness {
extension_id: tzein.witness.extension_id,
mode: tzein.witness.mode,
payload,
},
})
.collect(),
vout: self.vout,
authorization: Authorized,
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,382 +1,136 @@
#[cfg(feature = "zfuture")]
use crate::legacy::Script;
use blake2b_simd::Hash as Blake2bHash;
use std::convert::TryInto;
use blake2b_simd::{Hash as Blake2bHash, Params as Blake2bParams};
use byteorder::{LittleEndian, WriteBytesExt};
use ff::PrimeField;
use group::GroupEncoding;
use crate::{consensus, legacy::Script};
#[cfg(feature = "zfuture")]
use crate::{
extensions::transparent::Precondition,
serialize::{CompactSize, Vector},
};
use super::{
components::{Amount, JsDescription, OutputDescription, SpendDescription, TxIn, TxOut},
Transaction, TransactionData, TxVersion,
components::{
sapling::{self, GrothProofBytes},
Amount,
},
sighash_v4::v4_signature_hash,
sighash_v5::v5_signature_hash,
Authorization, TransactionData, TxDigests, TxVersion,
};
#[cfg(feature = "zfuture")]
use super::components::{TzeIn, TzeOut};
const ZCASH_SIGHASH_PERSONALIZATION_PREFIX: &[u8; 12] = b"ZcashSigHash";
const ZCASH_PREVOUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashPrevoutHash";
const ZCASH_SEQUENCE_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSequencHash";
const ZCASH_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashOutputsHash";
const ZCASH_JOINSPLITS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashJSplitsHash";
const ZCASH_SHIELDED_SPENDS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSSpendsHash";
const ZCASH_SHIELDED_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSOutputHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_INPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"Zcash_TzeInsHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashTzeOutsHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_SIGNED_INPUT_TAG: &[u8; 1] = &[0x00];
#[cfg(feature = "zfuture")]
const ZCASH_TRANSPARENT_SIGNED_INPUT_TAG: &[u8; 1] = &[0x01];
use crate::extensions::transparent::Precondition;
pub const SIGHASH_ALL: u32 = 1;
const SIGHASH_NONE: u32 = 2;
const SIGHASH_SINGLE: u32 = 3;
const SIGHASH_MASK: u32 = 0x1f;
const SIGHASH_ANYONECANPAY: u32 = 0x80;
pub const SIGHASH_NONE: u32 = 2;
pub const SIGHASH_SINGLE: u32 = 3;
pub const SIGHASH_MASK: u32 = 0x1f;
pub const SIGHASH_ANYONECANPAY: u32 = 0x80;
macro_rules! update_u32 {
($h:expr, $value:expr, $tmp:expr) => {
(&mut $tmp[..4]).write_u32::<LittleEndian>($value).unwrap();
$h.update(&$tmp[..4]);
};
pub struct TransparentInput<'a> {
index: usize,
script_code: &'a Script,
value: Amount,
}
macro_rules! update_hash {
($h:expr, $cond:expr, $value:expr) => {
if $cond {
$h.update(&$value.as_ref());
} else {
$h.update(&[0; 32]);
}
};
}
fn has_overwinter_components(version: &TxVersion) -> bool {
!matches!(version, TxVersion::Sprout(_))
}
fn has_sapling_components(version: &TxVersion) -> bool {
!matches!(version, TxVersion::Sprout(_) | TxVersion::Overwinter)
}
#[cfg(feature = "zfuture")]
fn has_tze_components(version: &TxVersion) -> bool {
matches!(version, TxVersion::ZFuture)
}
fn prevout_hash(vin: &[TxIn]) -> Blake2bHash {
let mut data = Vec::with_capacity(vin.len() * 36);
for t_in in vin {
t_in.prevout.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_PREVOUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn sequence_hash(vin: &[TxIn]) -> Blake2bHash {
let mut data = Vec::with_capacity(vin.len() * 4);
for t_in in vin {
(&mut data)
.write_u32::<LittleEndian>(t_in.sequence)
.unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SEQUENCE_HASH_PERSONALIZATION)
.hash(&data)
}
fn outputs_hash(vout: &[TxOut]) -> Blake2bHash {
let mut data = Vec::with_capacity(vout.len() * (4 + 1));
for t_out in vout {
t_out.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn single_output_hash(tx_out: &TxOut) -> Blake2bHash {
let mut data = vec![];
tx_out.write(&mut data).unwrap();
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn joinsplits_hash(
txversion: TxVersion,
joinsplits: &[JsDescription],
joinsplit_pubkey: &[u8; 32],
) -> Blake2bHash {
let mut data = Vec::with_capacity(
joinsplits.len()
* if txversion.uses_groth_proofs() {
1698 // JSDescription with Groth16 proof
} else {
1802 // JSDescription with PHGR13 proof
},
);
for js in joinsplits {
js.write(&mut data).unwrap();
}
data.extend_from_slice(joinsplit_pubkey);
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_JOINSPLITS_HASH_PERSONALIZATION)
.hash(&data)
}
fn shielded_spends_hash(shielded_spends: &[SpendDescription]) -> Blake2bHash {
let mut data = Vec::with_capacity(shielded_spends.len() * 384);
for s_spend in shielded_spends {
data.extend_from_slice(&s_spend.cv.to_bytes());
data.extend_from_slice(s_spend.anchor.to_repr().as_ref());
data.extend_from_slice(&s_spend.nullifier.0);
s_spend.rk.write(&mut data).unwrap();
data.extend_from_slice(&s_spend.zkproof);
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SHIELDED_SPENDS_HASH_PERSONALIZATION)
.hash(&data)
}
fn shielded_outputs_hash(shielded_outputs: &[OutputDescription]) -> Blake2bHash {
let mut data = Vec::with_capacity(shielded_outputs.len() * 948);
for s_out in shielded_outputs {
s_out.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SHIELDED_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
#[cfg(feature = "zfuture")]
fn tze_inputs_hash(tze_inputs: &[TzeIn]) -> Blake2bHash {
let mut data = vec![];
for tzein in tze_inputs {
tzein.write_without_witness(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_TZE_INPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
#[cfg(feature = "zfuture")]
fn tze_outputs_hash(tze_outputs: &[TzeOut]) -> Blake2bHash {
let mut data = vec![];
for tzeout in tze_outputs {
tzeout.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_TZE_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
pub enum SignableInput<'a> {
Shielded,
Transparent {
index: usize,
script_code: &'a Script,
value: Amount,
},
#[cfg(feature = "zfuture")]
Tze {
index: usize,
precondition: &'a Precondition,
value: Amount,
},
}
impl<'a> SignableInput<'a> {
pub fn transparent(index: usize, script_code: &'a Script, value: Amount) -> Self {
SignableInput::Transparent {
impl<'a> TransparentInput<'a> {
pub fn new(index: usize, script_code: &'a Script, value: Amount) -> Self {
TransparentInput {
index,
script_code,
value,
}
}
#[cfg(feature = "zfuture")]
pub fn tze(index: usize, precondition: &'a Precondition, value: Amount) -> Self {
SignableInput::Tze {
pub fn index(&self) -> usize {
self.index
}
pub fn script_code(&self) -> &'a Script {
self.script_code
}
pub fn value(&self) -> Amount {
self.value
}
}
#[cfg(feature = "zfuture")]
pub struct TzeInput<'a> {
index: usize,
precondition: &'a Precondition,
value: Amount,
}
#[cfg(feature = "zfuture")]
impl<'a> TzeInput<'a> {
pub fn new(index: usize, precondition: &'a Precondition, value: Amount) -> Self {
TzeInput {
index,
precondition,
value,
}
}
}
pub fn signature_hash_data(
tx: &TransactionData,
consensus_branch_id: consensus::BranchId,
hash_type: u32,
signable_input: SignableInput<'_>,
) -> Vec<u8> {
if has_overwinter_components(&tx.version) {
let mut personal = [0; 16];
(&mut personal[..12]).copy_from_slice(ZCASH_SIGHASH_PERSONALIZATION_PREFIX);
(&mut personal[12..])
.write_u32::<LittleEndian>(consensus_branch_id.into())
.unwrap();
pub fn index(&self) -> usize {
self.index
}
let mut h = Blake2bParams::new()
.hash_length(32)
.personal(&personal)
.to_state();
let mut tmp = [0; 8];
pub fn precondition(&self) -> &'a Precondition {
self.precondition
}
update_u32!(h, tx.version.header(), tmp);
update_u32!(h, tx.version.version_group_id(), tmp);
update_hash!(
h,
hash_type & SIGHASH_ANYONECANPAY == 0,
prevout_hash(&tx.vin)
);
update_hash!(
h,
hash_type & SIGHASH_ANYONECANPAY == 0
&& (hash_type & SIGHASH_MASK) != SIGHASH_SINGLE
&& (hash_type & SIGHASH_MASK) != SIGHASH_NONE,
sequence_hash(&tx.vin)
);
if (hash_type & SIGHASH_MASK) != SIGHASH_SINGLE
&& (hash_type & SIGHASH_MASK) != SIGHASH_NONE
{
h.update(outputs_hash(&tx.vout).as_ref());
} else if (hash_type & SIGHASH_MASK) == SIGHASH_SINGLE {
match signable_input {
SignableInput::Transparent { index, .. } if index < tx.vout.len() => {
h.update(single_output_hash(&tx.vout[index]).as_ref())
}
_ => h.update(&[0; 32]),
};
} else {
h.update(&[0; 32]);
};
#[cfg(feature = "zfuture")]
if has_tze_components(&tx.version) {
update_hash!(
h,
!tx.tze_inputs.is_empty(),
tze_inputs_hash(&tx.tze_inputs)
);
update_hash!(
h,
!tx.tze_outputs.is_empty(),
tze_outputs_hash(&tx.tze_outputs)
);
}
update_hash!(
h,
!tx.joinsplits.is_empty(),
joinsplits_hash(tx.version, &tx.joinsplits, &tx.joinsplit_pubkey.unwrap())
);
if has_sapling_components(&tx.version) {
update_hash!(
h,
!tx.shielded_spends.is_empty(),
shielded_spends_hash(&tx.shielded_spends)
);
update_hash!(
h,
!tx.shielded_outputs.is_empty(),
shielded_outputs_hash(&tx.shielded_outputs)
);
}
update_u32!(h, tx.lock_time, tmp);
update_u32!(h, tx.expiry_height.into(), tmp);
if has_sapling_components(&tx.version) {
h.update(&tx.value_balance.to_i64_le_bytes());
}
update_u32!(h, hash_type, tmp);
match signable_input {
SignableInput::Transparent {
index,
script_code,
value,
} => {
#[cfg(feature = "zfuture")]
let mut data = if has_tze_components(&tx.version) {
// domain separation here is to avoid collision attacks
// between transparent and TZE inputs.
ZCASH_TRANSPARENT_SIGNED_INPUT_TAG.to_vec()
} else {
vec![]
};
#[cfg(not(feature = "zfuture"))]
let mut data = vec![];
tx.vin[index].prevout.write(&mut data).unwrap();
script_code.write(&mut data).unwrap();
data.extend_from_slice(&value.to_i64_le_bytes());
(&mut data)
.write_u32::<LittleEndian>(tx.vin[index].sequence)
.unwrap();
h.update(&data);
}
#[cfg(feature = "zfuture")]
SignableInput::Tze {
index,
precondition,
value,
} if has_tze_components(&tx.version) => {
// domain separation here is to avoid collision attacks
// between transparent and TZE inputs.
let mut data = ZCASH_TZE_SIGNED_INPUT_TAG.to_vec();
tx.tze_inputs[index].prevout.write(&mut data).unwrap();
CompactSize::write(&mut data, precondition.extension_id.try_into().unwrap())
.unwrap();
CompactSize::write(&mut data, precondition.mode.try_into().unwrap()).unwrap();
Vector::write(&mut data, &precondition.payload, |w, e| w.write_u8(*e)).unwrap();
data.extend_from_slice(&value.to_i64_le_bytes());
h.update(&data);
}
#[cfg(feature = "zfuture")]
SignableInput::Tze { .. } => {
panic!("A request has been made to sign a TZE input, but the signature hash version is not ZFuture");
}
_ => (),
}
h.finalize().as_ref().to_vec()
} else {
unimplemented!()
pub fn value(&self) -> Amount {
self.value
}
}
pub fn signature_hash(
tx: &Transaction,
consensus_branch_id: consensus::BranchId,
hash_type: u32,
signable_input: SignableInput<'_>,
) -> Vec<u8> {
signature_hash_data(tx, consensus_branch_id, hash_type, signable_input)
pub enum SignableInput<'a> {
Shielded,
Transparent(TransparentInput<'a>),
#[cfg(feature = "zfuture")]
Tze(TzeInput<'a>),
}
impl<'a> SignableInput<'a> {
pub fn transparent(index: usize, script_code: &'a Script, value: Amount) -> Self {
SignableInput::Transparent(TransparentInput {
index,
script_code,
value,
})
}
#[cfg(feature = "zfuture")]
pub fn tze(index: usize, precondition: &'a Precondition, value: Amount) -> Self {
SignableInput::Tze(TzeInput {
index,
precondition,
value,
})
}
}
pub struct SignatureHash(Blake2bHash);
impl AsRef<[u8; 32]> for SignatureHash {
fn as_ref(&self) -> &[u8; 32] {
self.0.as_ref().try_into().unwrap()
}
}
pub fn signature_hash<
'a,
SA: sapling::Authorization<Proof = GrothProofBytes>,
A: Authorization<SaplingAuth = SA>,
>(
tx: &TransactionData<A>,
hash_type: u32,
signable_input: &SignableInput<'a>,
txid_parts: &TxDigests<Blake2bHash>,
) -> SignatureHash {
SignatureHash(match tx.version {
TxVersion::Sprout(_) | TxVersion::Overwinter | TxVersion::Sapling => {
v4_signature_hash(tx, hash_type, signable_input)
}
TxVersion::Zip225 => v5_signature_hash(tx, hash_type, signable_input, txid_parts),
#[cfg(feature = "zfuture")]
TxVersion::ZFuture => v5_signature_hash(tx, hash_type, signable_input, txid_parts),
})
}

View File

@ -0,0 +1,271 @@
use blake2b_simd::{Hash as Blake2bHash, Params as Blake2bParams};
use byteorder::{LittleEndian, WriteBytesExt};
use ff::PrimeField;
use group::GroupEncoding;
use crate::consensus::BranchId;
use super::{
components::{
sapling::{self, GrothProofBytes, OutputDescription, SpendDescription},
sprout::JsDescription,
transparent::{self, TxIn, TxOut},
},
sighash::{SignableInput, SIGHASH_ANYONECANPAY, SIGHASH_MASK, SIGHASH_NONE, SIGHASH_SINGLE},
Authorization, TransactionData,
};
const ZCASH_SIGHASH_PERSONALIZATION_PREFIX: &[u8; 12] = b"ZcashSigHash";
const ZCASH_PREVOUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashPrevoutHash";
const ZCASH_SEQUENCE_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSequencHash";
const ZCASH_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashOutputsHash";
const ZCASH_JOINSPLITS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashJSplitsHash";
const ZCASH_SHIELDED_SPENDS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSSpendsHash";
const ZCASH_SHIELDED_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZcashSOutputHash";
macro_rules! update_u32 {
($h:expr, $value:expr, $tmp:expr) => {
(&mut $tmp[..4]).write_u32::<LittleEndian>($value).unwrap();
$h.update(&$tmp[..4]);
};
}
macro_rules! update_hash {
($h:expr, $cond:expr, $value:expr) => {
if $cond {
$h.update(&$value.as_ref());
} else {
$h.update(&[0; 32]);
}
};
}
fn prevout_hash<TA: transparent::Authorization>(vin: &[TxIn<TA>]) -> Blake2bHash {
let mut data = Vec::with_capacity(vin.len() * 36);
for t_in in vin {
t_in.prevout.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_PREVOUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn sequence_hash<TA: transparent::Authorization>(vin: &[TxIn<TA>]) -> Blake2bHash {
let mut data = Vec::with_capacity(vin.len() * 4);
for t_in in vin {
(&mut data)
.write_u32::<LittleEndian>(t_in.sequence)
.unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SEQUENCE_HASH_PERSONALIZATION)
.hash(&data)
}
fn outputs_hash(vout: &[TxOut]) -> Blake2bHash {
let mut data = Vec::with_capacity(vout.len() * (4 + 1));
for t_out in vout {
t_out.write(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn single_output_hash(tx_out: &TxOut) -> Blake2bHash {
let mut data = vec![];
tx_out.write(&mut data).unwrap();
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
fn joinsplits_hash(
consensus_branch_id: BranchId,
joinsplits: &[JsDescription],
joinsplit_pubkey: &[u8; 32],
) -> Blake2bHash {
let mut data = Vec::with_capacity(
joinsplits.len()
* if consensus_branch_id.sprout_uses_groth_proofs() {
1698 // JSDescription with Groth16 proof
} else {
1802 // JsDescription with PHGR13 proof
},
);
for js in joinsplits {
js.write(&mut data).unwrap();
}
data.extend_from_slice(joinsplit_pubkey);
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_JOINSPLITS_HASH_PERSONALIZATION)
.hash(&data)
}
fn shielded_spends_hash<A: sapling::Authorization<Proof = GrothProofBytes>>(
shielded_spends: &[SpendDescription<A>],
) -> Blake2bHash {
let mut data = Vec::with_capacity(shielded_spends.len() * 384);
for s_spend in shielded_spends {
data.extend_from_slice(&s_spend.cv.to_bytes());
data.extend_from_slice(s_spend.anchor.to_repr().as_ref());
data.extend_from_slice(&s_spend.nullifier.as_ref());
s_spend.rk.write(&mut data).unwrap();
data.extend_from_slice(&s_spend.zkproof);
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SHIELDED_SPENDS_HASH_PERSONALIZATION)
.hash(&data)
}
fn shielded_outputs_hash(shielded_outputs: &[OutputDescription<GrothProofBytes>]) -> Blake2bHash {
let mut data = Vec::with_capacity(shielded_outputs.len() * 948);
for s_out in shielded_outputs {
s_out.write_v4(&mut data).unwrap();
}
Blake2bParams::new()
.hash_length(32)
.personal(ZCASH_SHIELDED_OUTPUTS_HASH_PERSONALIZATION)
.hash(&data)
}
pub fn v4_signature_hash<
SA: sapling::Authorization<Proof = GrothProofBytes>,
A: Authorization<SaplingAuth = SA>,
>(
tx: &TransactionData<A>,
hash_type: u32,
signable_input: &SignableInput<'_>,
) -> Blake2bHash {
if tx.version.has_overwinter() {
let mut personal = [0; 16];
(&mut personal[..12]).copy_from_slice(ZCASH_SIGHASH_PERSONALIZATION_PREFIX);
(&mut personal[12..])
.write_u32::<LittleEndian>(tx.consensus_branch_id.into())
.unwrap();
let mut h = Blake2bParams::new()
.hash_length(32)
.personal(&personal)
.to_state();
let mut tmp = [0; 8];
update_u32!(h, tx.version.header(), tmp);
update_u32!(h, tx.version.version_group_id(), tmp);
update_hash!(
h,
hash_type & SIGHASH_ANYONECANPAY == 0,
prevout_hash(
tx.transparent_bundle
.as_ref()
.map_or(&[], |b| b.vin.as_slice())
)
);
update_hash!(
h,
(hash_type & SIGHASH_ANYONECANPAY) == 0
&& (hash_type & SIGHASH_MASK) != SIGHASH_SINGLE
&& (hash_type & SIGHASH_MASK) != SIGHASH_NONE,
sequence_hash(
tx.transparent_bundle
.as_ref()
.map_or(&[], |b| b.vin.as_slice())
)
);
if (hash_type & SIGHASH_MASK) != SIGHASH_SINGLE
&& (hash_type & SIGHASH_MASK) != SIGHASH_NONE
{
h.update(
outputs_hash(
tx.transparent_bundle
.as_ref()
.map_or(&[], |b| b.vout.as_slice()),
)
.as_bytes(),
);
} else if (hash_type & SIGHASH_MASK) == SIGHASH_SINGLE {
match (tx.transparent_bundle.as_ref(), signable_input) {
(Some(b), SignableInput::Transparent(input)) if input.index() < b.vout.len() => {
h.update(single_output_hash(&b.vout[input.index()]).as_bytes())
}
_ => h.update(&[0; 32]),
};
} else {
h.update(&[0; 32]);
};
update_hash!(
h,
!tx.sprout_bundle
.as_ref()
.map_or(true, |b| b.joinsplits.is_empty()),
{
let bundle = tx.sprout_bundle.as_ref().unwrap();
joinsplits_hash(
tx.consensus_branch_id,
&bundle.joinsplits,
&bundle.joinsplit_pubkey,
)
}
);
if tx.version.has_sapling() {
update_hash!(
h,
!tx.sapling_bundle
.as_ref()
.map_or(true, |b| b.shielded_spends.is_empty()),
shielded_spends_hash(&tx.sapling_bundle.as_ref().unwrap().shielded_spends)
);
update_hash!(
h,
!tx.sapling_bundle
.as_ref()
.map_or(true, |b| b.shielded_outputs.is_empty()),
shielded_outputs_hash(&tx.sapling_bundle.as_ref().unwrap().shielded_outputs)
);
}
update_u32!(h, tx.lock_time, tmp);
update_u32!(h, tx.expiry_height.into(), tmp);
if tx.version.has_sapling() {
h.update(&tx.sapling_value_balance().to_i64_le_bytes());
}
update_u32!(h, hash_type, tmp);
match signable_input {
SignableInput::Shielded => (),
SignableInput::Transparent(input) => {
if let Some(bundle) = tx.transparent_bundle.as_ref() {
let mut data = vec![];
bundle.vin[input.index()].prevout.write(&mut data).unwrap();
input.script_code().write(&mut data).unwrap();
data.extend_from_slice(&input.value().to_i64_le_bytes());
(&mut data)
.write_u32::<LittleEndian>(bundle.vin[input.index()].sequence)
.unwrap();
h.update(&data);
} else {
panic!(
"A request has been made to sign a transparent input, but none are present."
);
}
}
#[cfg(feature = "zfuture")]
SignableInput::Tze(_) => {
panic!("A request has been made to sign a TZE input, but the transaction version is not ZFuture");
}
}
h.finalize()
} else {
panic!("Signature hashing for pre-overwinter transactions is not supported.")
}
}

View File

@ -0,0 +1,182 @@
use std::io::Write;
use blake2b_simd::{Hash as Blake2bHash, Params, State};
use byteorder::{LittleEndian, WriteBytesExt};
use crate::transaction::{
components::transparent::{self, TxOut},
sighash::{
SignableInput, TransparentInput, SIGHASH_ANYONECANPAY, SIGHASH_MASK, SIGHASH_NONE,
SIGHASH_SINGLE,
},
txid::{
to_hash, transparent_outputs_hash, transparent_prevout_hash, transparent_sequence_hash,
},
Authorization, TransactionData, TransparentDigests, TxDigests,
};
#[cfg(feature = "zfuture")]
use std::convert::TryInto;
#[cfg(feature = "zfuture")]
use crate::{
serialize::{CompactSize, Vector},
transaction::{components::tze, sighash::TzeInput, TzeDigests},
};
const ZCASH_TRANSPARENT_INPUT_HASH_PERSONALIZATION: &[u8; 16] = b"Zcash___TxInHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_INPUT_HASH_PERSONALIZATION: &[u8; 16] = b"Zcash__TzeInHash";
fn hasher(personal: &[u8; 16]) -> State {
Params::new().hash_length(32).personal(personal).to_state()
}
fn transparent_input_sigdigests<A: transparent::Authorization>(
bundle: &transparent::Bundle<A>,
input: &TransparentInput<'_>,
txid_digests: &TransparentDigests<Blake2bHash>,
hash_type: u32,
) -> TransparentDigests<Blake2bHash> {
let flag_anyonecanpay = hash_type & SIGHASH_ANYONECANPAY != 0;
let flag_single = hash_type & SIGHASH_MASK == SIGHASH_SINGLE;
let flag_none = hash_type & SIGHASH_MASK == SIGHASH_NONE;
let prevout_digest = if flag_anyonecanpay {
transparent_prevout_hash::<A>(&[])
} else {
txid_digests.prevout_digest
};
let sequence_digest = if flag_anyonecanpay || flag_single || flag_none {
transparent_sequence_hash::<A>(&[])
} else {
txid_digests.sequence_digest
};
let outputs_digest = if flag_single {
if input.index() < bundle.vout.len() {
transparent_outputs_hash(&[&bundle.vout[input.index()]])
} else {
transparent_outputs_hash::<TxOut>(&[])
}
} else if flag_none {
transparent_outputs_hash::<TxOut>(&[])
} else {
txid_digests.outputs_digest
};
// If we are serializing an input (i.e. this is not a JoinSplit signature hash):
// a. outpoint (32-byte hash + 4-byte little endian)
// b. scriptCode of the input (serialized as scripts inside CTxOuts)
// c. value of the output spent by this input (8-byte little endian)
// d. nSequence of the input (4-byte little endian)
let mut ch = hasher(ZCASH_TRANSPARENT_INPUT_HASH_PERSONALIZATION);
let txin = &bundle.vin[input.index()];
txin.prevout.write(&mut ch).unwrap();
input.script_code().write(&mut ch).unwrap();
ch.write_all(&input.value().to_i64_le_bytes()).unwrap();
ch.write_u32::<LittleEndian>(txin.sequence).unwrap();
let per_input_digest = ch.finalize();
TransparentDigests {
prevout_digest,
sequence_digest,
outputs_digest,
per_input_digest: Some(per_input_digest),
}
}
#[cfg(feature = "zfuture")]
fn tze_input_sigdigests<A: tze::Authorization>(
bundle: &tze::Bundle<A>,
input: &TzeInput<'_>,
txid_digests: &TzeDigests<Blake2bHash>,
) -> TzeDigests<Blake2bHash> {
let mut ch = hasher(ZCASH_TZE_INPUT_HASH_PERSONALIZATION);
let tzein = &bundle.vin[input.index()];
tzein.prevout.write(&mut ch).unwrap();
CompactSize::write(
&mut ch,
input.precondition().extension_id.try_into().unwrap(),
)
.unwrap();
CompactSize::write(&mut ch, input.precondition().mode.try_into().unwrap()).unwrap();
Vector::write(&mut ch, &input.precondition().payload, |w, e| {
w.write_u8(*e)
})
.unwrap();
ch.write_all(&input.value().to_i64_le_bytes()).unwrap();
let per_input_digest = ch.finalize();
TzeDigests {
inputs_digest: txid_digests.inputs_digest,
outputs_digest: txid_digests.outputs_digest,
per_input_digest: Some(per_input_digest),
}
}
pub fn v5_signature_hash<A: Authorization>(
tx: &TransactionData<A>,
hash_type: u32,
signable_input: &SignableInput<'_>,
txid_parts: &TxDigests<Blake2bHash>,
) -> Blake2bHash {
match signable_input {
SignableInput::Shielded => to_hash(
tx.version,
tx.consensus_branch_id,
txid_parts.header_digest,
txid_parts.transparent_digests.as_ref(),
txid_parts.sapling_digest,
txid_parts.orchard_digest,
#[cfg(feature = "zfuture")]
txid_parts.tze_digests.as_ref(),
),
SignableInput::Transparent(input) => {
if let Some((bundle, txid_digests)) = tx
.transparent_bundle
.as_ref()
.zip(txid_parts.transparent_digests.as_ref())
{
to_hash(
tx.version,
tx.consensus_branch_id,
txid_parts.header_digest,
Some(&transparent_input_sigdigests(
bundle,
input,
txid_digests,
hash_type,
)),
txid_parts.sapling_digest,
txid_parts.orchard_digest,
#[cfg(feature = "zfuture")]
txid_parts.tze_digests.as_ref(),
)
} else {
panic!("Transaction has no transparent inputs to sign.")
}
}
#[cfg(feature = "zfuture")]
SignableInput::Tze(input) => {
if let Some((bundle, txid_digests)) =
tx.tze_bundle.as_ref().zip(txid_parts.tze_digests.as_ref())
{
to_hash(
tx.version,
tx.consensus_branch_id,
txid_parts.header_digest,
txid_parts.transparent_digests.as_ref(),
txid_parts.sapling_digest,
txid_parts.orchard_digest,
#[cfg(feature = "zfuture")]
Some(&tze_input_sigdigests(bundle, input, txid_digests)),
)
} else {
panic!("Transaction has no TZE inputs to sign.")
}
}
}
}

View File

@ -1,22 +1,23 @@
use ff::Field;
use rand_core::OsRng;
use std::ops::Deref;
use proptest::prelude::*;
use crate::{constants::SPENDING_KEY_GENERATOR, sapling::redjubjub::PrivateKey};
use crate::{consensus::BranchId, legacy::Script};
use super::{
components::Amount,
sighash::{signature_hash, SignableInput},
Transaction, TransactionData,
sighash::{SignableInput, SIGHASH_ALL, SIGHASH_ANYONECANPAY, SIGHASH_NONE, SIGHASH_SINGLE},
sighash_v4::v4_signature_hash,
sighash_v5::v5_signature_hash,
testing::arb_tx,
txid::TxIdDigester,
Transaction,
};
use super::testing::{arb_branch_id, arb_tx};
#[test]
fn tx_read_write() {
let data = &self::data::tx_read_write::TX_READ_WRITE;
let tx = Transaction::read(&data[..]).unwrap();
let tx = Transaction::read(&data[..], BranchId::Canopy).unwrap();
assert_eq!(
format!("{}", tx.txid()),
"64f0bd7fe30ce23753358fe3a2dc835b8fba9c0274c4e2c54a6f73114cb55639"
@ -27,94 +28,89 @@ fn tx_read_write() {
assert_eq!(&data[..], &encoded[..]);
}
#[test]
fn tx_write_rejects_unexpected_joinsplit_pubkey() {
// Succeeds without a JoinSplit pubkey
assert!(TransactionData::new().freeze().is_ok());
fn check_roundtrip(tx: Transaction) -> Result<(), TestCaseError> {
let mut txn_bytes = vec![];
tx.write(&mut txn_bytes).unwrap();
let txo = Transaction::read(&txn_bytes[..], tx.consensus_branch_id).unwrap();
// Fails with an unexpected JoinSplit pubkey
{
let mut tx = TransactionData::new();
tx.joinsplit_pubkey = Some([0; 32]);
assert!(tx.freeze().is_err());
}
prop_assert_eq!(tx.version, txo.version);
#[cfg(feature = "zfuture")]
prop_assert_eq!(tx.tze_bundle.as_ref(), txo.tze_bundle.as_ref());
prop_assert_eq!(tx.lock_time, txo.lock_time);
prop_assert_eq!(
tx.transparent_bundle.as_ref(),
txo.transparent_bundle.as_ref()
);
prop_assert_eq!(tx.sapling_value_balance(), txo.sapling_value_balance());
prop_assert_eq!(
tx.orchard_bundle.as_ref().map(|v| *v.value_balance()),
txo.orchard_bundle.as_ref().map(|v| *v.value_balance())
);
Ok(())
}
#[test]
fn tx_write_rejects_unexpected_joinsplit_sig() {
// Succeeds without a JoinSplit signature
assert!(TransactionData::new().freeze().is_ok());
// Fails with an unexpected JoinSplit signature
{
let mut tx = TransactionData::new();
tx.joinsplit_sig = Some([0; 64]);
assert!(tx.freeze().is_err());
}
}
#[test]
fn tx_write_rejects_unexpected_binding_sig() {
// Succeeds without a binding signature
assert!(TransactionData::new().freeze().is_ok());
// Fails with an unexpected binding signature
{
let mut rng = OsRng;
let sk = PrivateKey(jubjub::Fr::random(&mut rng));
let sig = sk.sign(b"Foo bar", &mut rng, SPENDING_KEY_GENERATOR);
let mut tx = TransactionData::new();
tx.binding_sig = Some(sig);
assert!(tx.freeze().is_err());
proptest! {
#[test]
#[ignore]
fn tx_serialization_roundtrip_sprout(tx in arb_tx(BranchId::Sprout)) {
check_roundtrip(tx)?;
}
}
proptest! {
#[test]
fn tx_serialization_roundtrip(tx in arb_branch_id().prop_flat_map(arb_tx)) {
let mut txn_bytes = vec![];
tx.write(&mut txn_bytes).unwrap();
let txo = Transaction::read(&txn_bytes[..]).unwrap();
assert_eq!(tx.version, txo.version);
assert_eq!(tx.vin, txo.vin);
assert_eq!(tx.vout, txo.vout);
#[cfg(feature = "zfuture")]
assert_eq!(tx.tze_inputs, txo.tze_inputs);
#[cfg(feature = "zfuture")]
assert_eq!(tx.tze_outputs, txo.tze_outputs);
assert_eq!(tx.lock_time, txo.lock_time);
assert_eq!(tx.value_balance, txo.value_balance);
#[ignore]
fn tx_serialization_roundtrip_overwinter(tx in arb_tx(BranchId::Overwinter)) {
check_roundtrip(tx)?;
}
}
proptest! {
#[test]
#[ignore]
fn tx_serialization_roundtrip_sapling(tx in arb_tx(BranchId::Sapling)) {
check_roundtrip(tx)?;
}
}
proptest! {
#[test]
#[ignore]
fn tx_serialization_roundtrip_blossom(tx in arb_tx(BranchId::Blossom)) {
check_roundtrip(tx)?;
}
}
proptest! {
#[test]
#[ignore]
fn tx_serialization_roundtrip_heartwood(tx in arb_tx(BranchId::Heartwood)) {
check_roundtrip(tx)?;
}
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn tx_serialization_roundtrip_canopy(tx in arb_tx(BranchId::Canopy)) {
check_roundtrip(tx)?;
}
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn tx_serialization_roundtrip_nu5(tx in arb_tx(BranchId::Nu5)) {
check_roundtrip(tx)?;
}
}
#[test]
#[cfg(feature = "zfuture")]
fn test_tze_tx_parse() {
let txn_bytes = vec![
0xFF, 0xFF, 0x00, 0x80, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x01, 0x52, 0x52, 0x52, 0x52,
0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52,
0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x52, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x20, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x30, 0x75, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x20, 0xd9, 0x81, 0x80, 0x87, 0xde, 0x72, 0x44, 0xab, 0xc1, 0xb5, 0xfc,
0xf2, 0x8e, 0x55, 0xe4, 0x2c, 0x7f, 0xf9, 0xc6, 0x78, 0xc0, 0x60, 0x51, 0x81, 0xf3, 0x7a,
0xc5, 0xd7, 0x41, 0x4a, 0x7b, 0x95, 0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
];
let tx = Transaction::read(&txn_bytes[..]);
match tx {
Ok(tx) => assert!(!tx.tze_inputs.is_empty()),
Err(e) => panic!(
"An error occurred parsing a serialized TZE transaction: {}",
e
),
proptest! {
#[test]
#[ignore]
fn tx_serialization_roundtrip_future(tx in arb_tx(BranchId::ZFuture)) {
check_roundtrip(tx)?;
}
}
@ -122,7 +118,7 @@ mod data;
#[test]
fn zip_0143() {
for tv in self::data::zip_0143::make_test_vectors() {
let tx = Transaction::read(&tv.tx[..]).unwrap();
let tx = Transaction::read(&tv.tx[..], tv.consensus_branch_id).unwrap();
let signable_input = match tv.transparent_input {
Some(n) => SignableInput::transparent(
n as usize,
@ -133,7 +129,7 @@ fn zip_0143() {
};
assert_eq!(
signature_hash(&tx, tv.consensus_branch_id, tv.hash_type, signable_input),
v4_signature_hash(tx.deref(), tv.hash_type, &signable_input).as_ref(),
tv.sighash
);
}
@ -142,7 +138,7 @@ fn zip_0143() {
#[test]
fn zip_0243() {
for tv in self::data::zip_0243::make_test_vectors() {
let tx = Transaction::read(&tv.tx[..]).unwrap();
let tx = Transaction::read(&tv.tx[..], tv.consensus_branch_id).unwrap();
let signable_input = match tv.transparent_input {
Some(n) => SignableInput::transparent(
n as usize,
@ -153,8 +149,89 @@ fn zip_0243() {
};
assert_eq!(
signature_hash(&tx, tv.consensus_branch_id, tv.hash_type, signable_input),
v4_signature_hash(tx.deref(), tv.hash_type, &signable_input).as_ref(),
tv.sighash
);
}
}
#[test]
fn zip_0244() {
for tv in self::data::zip_0244::make_test_vectors() {
let tx = Transaction::read(&tv.tx[..], BranchId::Nu5).unwrap();
assert_eq!(tx.txid.as_ref(), &tv.txid);
assert_eq!(tx.auth_commitment().as_ref(), &tv.auth_digest);
let txid_parts = tx.deref().digest(TxIdDigester);
match tv.transparent_input {
Some(n) => {
let script = Script(tv.script_code.unwrap());
let signable_input = SignableInput::transparent(
n as usize,
&script,
Amount::from_nonnegative_i64(tv.amount.unwrap()).unwrap(),
);
assert_eq!(
v5_signature_hash(tx.deref(), SIGHASH_ALL, &signable_input, &txid_parts)
.as_ref(),
&tv.sighash_all
);
assert_eq!(
v5_signature_hash(tx.deref(), SIGHASH_NONE, &signable_input, &txid_parts)
.as_ref(),
&tv.sighash_none.unwrap()
);
assert_eq!(
v5_signature_hash(tx.deref(), SIGHASH_SINGLE, &signable_input, &txid_parts)
.as_ref(),
&tv.sighash_single.unwrap()
);
assert_eq!(
v5_signature_hash(
tx.deref(),
SIGHASH_ALL | SIGHASH_ANYONECANPAY,
&signable_input,
&txid_parts,
)
.as_ref(),
&tv.sighash_all_anyone.unwrap()
);
assert_eq!(
v5_signature_hash(
tx.deref(),
SIGHASH_NONE | SIGHASH_ANYONECANPAY,
&signable_input,
&txid_parts,
)
.as_ref(),
&tv.sighash_none_anyone.unwrap()
);
assert_eq!(
v5_signature_hash(
tx.deref(),
SIGHASH_SINGLE | SIGHASH_ANYONECANPAY,
&signable_input,
&txid_parts,
)
.as_ref(),
&tv.sighash_single_anyone.unwrap()
);
}
_ => {
let signable_input = SignableInput::Shielded;
assert_eq!(
v5_signature_hash(tx.deref(), SIGHASH_ALL, &signable_input, &txid_parts)
.as_ref(),
tv.sighash_all
);
}
};
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,527 @@
use std::borrow::Borrow;
use std::convert::TryFrom;
use std::io::Write;
use blake2b_simd::{Hash as Blake2bHash, Params, State};
use byteorder::{LittleEndian, WriteBytesExt};
use ff::PrimeField;
use group::GroupEncoding;
use orchard::bundle::{self as orchard};
use crate::consensus::{BlockHeight, BranchId};
use super::{
components::{
amount::Amount,
sapling::{self, OutputDescription, SpendDescription},
transparent::{self, TxIn, TxOut},
},
Authorization, Authorized, TransactionDigest, TransparentDigests, TxDigests, TxId, TxVersion,
};
#[cfg(feature = "zfuture")]
use super::{
components::tze::{self, TzeIn, TzeOut},
TzeDigests,
};
/// TxId tree root personalization
const ZCASH_TX_PERSONALIZATION_PREFIX: &[u8; 12] = b"ZcashTxHash_";
// TxId level 1 node personalization
const ZCASH_HEADERS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdHeadersHash";
const ZCASH_TRANSPARENT_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdTranspaHash";
const ZCASH_SAPLING_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSaplingHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdTZE____Hash";
// TxId transparent level 2 node personalization
const ZCASH_PREVOUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdPrevoutHash";
const ZCASH_SEQUENCE_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSequencHash";
const ZCASH_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdOutputsHash";
// TxId tze level 2 node personalization
#[cfg(feature = "zfuture")]
const ZCASH_TZE_INPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdTZEIns_Hash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdTZEOutsHash";
// TxId sapling level 2 node personalization
const ZCASH_SAPLING_SPENDS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSSpendsHash";
const ZCASH_SAPLING_SPENDS_COMPACT_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSSpendCHash";
const ZCASH_SAPLING_SPENDS_NONCOMPACT_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSSpendNHash";
const ZCASH_SAPLING_OUTPUTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSOutputHash";
const ZCASH_SAPLING_OUTPUTS_COMPACT_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSOutC__Hash";
const ZCASH_SAPLING_OUTPUTS_MEMOS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSOutM__Hash";
const ZCASH_SAPLING_OUTPUTS_NONCOMPACT_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxIdSOutN__Hash";
const ZCASH_AUTH_PERSONALIZATION_PREFIX: &[u8; 12] = b"ZTxAuthHash_";
const ZCASH_TRANSPARENT_SCRIPTS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxAuthTransHash";
const ZCASH_SAPLING_SIGS_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxAuthSapliHash";
#[cfg(feature = "zfuture")]
const ZCASH_TZE_WITNESSES_HASH_PERSONALIZATION: &[u8; 16] = b"ZTxAuthTZE__Hash";
fn hasher(personal: &[u8; 16]) -> State {
Params::new().hash_length(32).personal(personal).to_state()
}
/// Sequentially append the serialized value of each transparent input
/// to a hash personalized by ZCASH_PREVOUTS_HASH_PERSONALIZATION.
/// In the case that no inputs are provided, this produces a default
/// hash from just the personalization string.
pub(crate) fn transparent_prevout_hash<TransparentAuth: transparent::Authorization>(
vin: &[TxIn<TransparentAuth>],
) -> Blake2bHash {
let mut h = hasher(ZCASH_PREVOUTS_HASH_PERSONALIZATION);
for t_in in vin {
t_in.prevout.write(&mut h).unwrap();
}
h.finalize()
}
/// Hash of the little-endian u32 interpretation of the
/// `sequence` values for each TxIn record passed in vin.
pub(crate) fn transparent_sequence_hash<TransparentAuth: transparent::Authorization>(
vin: &[TxIn<TransparentAuth>],
) -> Blake2bHash {
let mut h = hasher(ZCASH_SEQUENCE_HASH_PERSONALIZATION);
for t_in in vin {
(&mut h).write_u32::<LittleEndian>(t_in.sequence).unwrap();
}
h.finalize()
}
/// Sequentially append the full serialized value of each transparent output
/// to a hash personalized by ZCASH_OUTPUTS_HASH_PERSONALIZATION.
/// In the case that no outputs are provided, this produces a default
/// hash from just the personalization string.
pub(crate) fn transparent_outputs_hash<T: Borrow<TxOut>>(vout: &[T]) -> Blake2bHash {
let mut h = hasher(ZCASH_OUTPUTS_HASH_PERSONALIZATION);
for t_out in vout {
t_out.borrow().write(&mut h).unwrap();
}
h.finalize()
}
/// Sequentially append the serialized value of each TZE input, excluding
/// witness data, to a hash personalized by ZCASH_TZE_INPUTS_HASH_PERSONALIZATION.
/// In the case that no inputs are provided, this produces a default
/// hash from just the personalization string.
#[cfg(feature = "zfuture")]
pub(crate) fn hash_tze_inputs<A>(tze_inputs: &[TzeIn<A>]) -> Blake2bHash {
let mut h = hasher(ZCASH_TZE_INPUTS_HASH_PERSONALIZATION);
for tzein in tze_inputs {
tzein.write_without_witness(&mut h).unwrap();
}
h.finalize()
}
/// Sequentially append the full serialized value of each TZE output
/// to a hash personalized by ZCASH_TZE_OUTPUTS_HASH_PERSONALIZATION.
/// In the case that no outputs are provided, this produces a default
/// hash from just the personalization string.
#[cfg(feature = "zfuture")]
pub(crate) fn hash_tze_outputs(tze_outputs: &[TzeOut]) -> Blake2bHash {
let mut h = hasher(ZCASH_TZE_OUTPUTS_HASH_PERSONALIZATION);
for tzeout in tze_outputs {
tzeout.write(&mut h).unwrap();
}
h.finalize()
}
/// Write disjoint parts of each Sapling shielded spend to a pair of hashes:
/// * \[nullifier*\] - personalized with ZCASH_SAPLING_SPENDS_COMPACT_HASH_PERSONALIZATION
/// * \[(cv, anchor, rk, zkproof)*\] - personalized with ZCASH_SAPLING_SPENDS_NONCOMPACT_HASH_PERSONALIZATION
///
/// Then, hash these together personalized by ZCASH_SAPLING_SPENDS_HASH_PERSONALIZATION
pub(crate) fn hash_sapling_spends<A: sapling::Authorization>(
shielded_spends: &[SpendDescription<A>],
) -> Blake2bHash {
let mut h = hasher(ZCASH_SAPLING_SPENDS_HASH_PERSONALIZATION);
if !shielded_spends.is_empty() {
let mut ch = hasher(ZCASH_SAPLING_SPENDS_COMPACT_HASH_PERSONALIZATION);
let mut nh = hasher(ZCASH_SAPLING_SPENDS_NONCOMPACT_HASH_PERSONALIZATION);
for s_spend in shielded_spends {
// we build the hash of nullifiers separately for compact blocks.
ch.write_all(&s_spend.nullifier.as_ref()).unwrap();
nh.write_all(&s_spend.cv.to_bytes()).unwrap();
nh.write_all(&s_spend.anchor.to_repr()).unwrap();
s_spend.rk.write(&mut nh).unwrap();
}
let compact_digest = ch.finalize();
h.write_all(&compact_digest.as_bytes()).unwrap();
let noncompact_digest = nh.finalize();
h.write_all(&noncompact_digest.as_bytes()).unwrap();
}
h.finalize()
}
/// Write disjoint parts of each Sapling shielded output as 3 separate hashes:
/// * \[(cmu, epk, enc_ciphertext\[..52\])*\] personalized with ZCASH_SAPLING_OUTPUTS_COMPACT_HASH_PERSONALIZATION
/// * \[enc_ciphertext\[52..564\]*\] (memo ciphertexts) personalized with ZCASH_SAPLING_OUTPUTS_MEMOS_HASH_PERSONALIZATION
/// * \[(cv, enc_ciphertext\[564..\], out_ciphertext, zkproof)*\] personalized with ZCASH_SAPLING_OUTPUTS_NONCOMPACT_HASH_PERSONALIZATION
///
/// Then, hash these together personalized with ZCASH_SAPLING_OUTPUTS_HASH_PERSONALIZATION
pub(crate) fn hash_sapling_outputs<A>(shielded_outputs: &[OutputDescription<A>]) -> Blake2bHash {
let mut h = hasher(ZCASH_SAPLING_OUTPUTS_HASH_PERSONALIZATION);
if !shielded_outputs.is_empty() {
let mut ch = hasher(ZCASH_SAPLING_OUTPUTS_COMPACT_HASH_PERSONALIZATION);
let mut mh = hasher(ZCASH_SAPLING_OUTPUTS_MEMOS_HASH_PERSONALIZATION);
let mut nh = hasher(ZCASH_SAPLING_OUTPUTS_NONCOMPACT_HASH_PERSONALIZATION);
for s_out in shielded_outputs {
ch.write_all(&s_out.cmu.to_repr().as_ref()).unwrap();
ch.write_all(s_out.ephemeral_key.as_ref()).unwrap();
ch.write_all(&s_out.enc_ciphertext[..52]).unwrap();
mh.write_all(&s_out.enc_ciphertext[52..564]).unwrap();
nh.write_all(&s_out.cv.to_bytes()).unwrap();
nh.write_all(&s_out.enc_ciphertext[564..]).unwrap();
nh.write_all(&s_out.out_ciphertext).unwrap();
}
h.write_all(&ch.finalize().as_bytes()).unwrap();
h.write_all(&mh.finalize().as_bytes()).unwrap();
h.write_all(&nh.finalize().as_bytes()).unwrap();
}
h.finalize()
}
/// The txid commits to the hash of all transparent outputs. The
/// prevout and sequence_hash components of txid
fn transparent_digests<A: transparent::Authorization>(
bundle: &transparent::Bundle<A>,
) -> TransparentDigests<Blake2bHash> {
TransparentDigests {
prevout_digest: transparent_prevout_hash(&bundle.vin),
sequence_digest: transparent_sequence_hash(&bundle.vin),
outputs_digest: transparent_outputs_hash(&bundle.vout),
per_input_digest: None,
}
}
#[cfg(feature = "zfuture")]
fn tze_digests<A: tze::Authorization>(bundle: &tze::Bundle<A>) -> TzeDigests<Blake2bHash> {
// The txid commits to the hash for all outputs.
TzeDigests {
inputs_digest: hash_tze_inputs(&bundle.vin),
outputs_digest: hash_tze_outputs(&bundle.vout),
per_input_digest: None,
}
}
fn hash_header_txid_data(
version: TxVersion,
// we commit to the consensus branch ID with the header
consensus_branch_id: BranchId,
lock_time: u32,
expiry_height: BlockHeight,
) -> Blake2bHash {
let mut h = hasher(ZCASH_HEADERS_HASH_PERSONALIZATION);
h.write_u32::<LittleEndian>(version.header()).unwrap();
h.write_u32::<LittleEndian>(version.version_group_id())
.unwrap();
h.write_u32::<LittleEndian>(consensus_branch_id.into())
.unwrap();
h.write_u32::<LittleEndian>(lock_time).unwrap();
h.write_u32::<LittleEndian>(expiry_height.into()).unwrap();
h.finalize()
}
fn hash_transparent_txid_data(t_digests: Option<&TransparentDigests<Blake2bHash>>) -> Blake2bHash {
let mut h = hasher(ZCASH_TRANSPARENT_HASH_PERSONALIZATION);
if let Some(d) = t_digests {
h.write_all(d.prevout_digest.as_bytes()).unwrap();
h.write_all(d.sequence_digest.as_bytes()).unwrap();
h.write_all(d.outputs_digest.as_bytes()).unwrap();
if let Some(s) = d.per_input_digest {
h.write_all(s.as_bytes()).unwrap();
};
}
h.finalize()
}
fn hash_sapling_txid_data<A: sapling::Authorization>(bundle: &sapling::Bundle<A>) -> Blake2bHash {
let mut h = hasher(ZCASH_SAPLING_HASH_PERSONALIZATION);
if !(bundle.shielded_spends.is_empty() && bundle.shielded_outputs.is_empty()) {
h.write_all(hash_sapling_spends(&bundle.shielded_spends).as_bytes())
.unwrap();
h.write_all(hash_sapling_outputs(&bundle.shielded_outputs).as_bytes())
.unwrap();
h.write_all(&bundle.value_balance.to_i64_le_bytes())
.unwrap();
}
h.finalize()
}
fn hash_sapling_txid_empty() -> Blake2bHash {
hasher(ZCASH_SAPLING_HASH_PERSONALIZATION).finalize()
}
#[cfg(feature = "zfuture")]
fn hash_tze_txid_data(tze_digests: Option<&TzeDigests<Blake2bHash>>) -> Blake2bHash {
let mut h = hasher(ZCASH_TZE_HASH_PERSONALIZATION);
if let Some(d) = tze_digests {
h.write_all(d.inputs_digest.as_bytes()).unwrap();
h.write_all(d.outputs_digest.as_bytes()).unwrap();
if let Some(s) = d.per_input_digest {
h.write_all(s.as_bytes()).unwrap();
}
}
h.finalize()
}
pub struct TxIdDigester;
// A TransactionDigest implementation that commits to all of the effecting
// data of a transaction to produce a nonmalleable transaction identifier.
//
// This expects and relies upon the existence of canonical encodings for
// each effecting component of a transaction.
impl<A: Authorization> TransactionDigest<A> for TxIdDigester {
type HeaderDigest = Blake2bHash;
type TransparentDigest = Option<TransparentDigests<Blake2bHash>>;
type SaplingDigest = Option<Blake2bHash>;
type OrchardDigest = Option<Blake2bHash>;
#[cfg(feature = "zfuture")]
type TzeDigest = Option<TzeDigests<Blake2bHash>>;
type Digest = TxDigests<Blake2bHash>;
fn digest_header(
&self,
version: TxVersion,
consensus_branch_id: BranchId,
lock_time: u32,
expiry_height: BlockHeight,
) -> Self::HeaderDigest {
hash_header_txid_data(version, consensus_branch_id, lock_time, expiry_height)
}
fn digest_transparent(
&self,
transparent_bundle: Option<&transparent::Bundle<A::TransparentAuth>>,
) -> Self::TransparentDigest {
transparent_bundle.map(transparent_digests)
}
fn digest_sapling(
&self,
sapling_bundle: Option<&sapling::Bundle<A::SaplingAuth>>,
) -> Self::SaplingDigest {
sapling_bundle.map(hash_sapling_txid_data)
}
fn digest_orchard(
&self,
orchard_bundle: Option<&orchard::Bundle<A::OrchardAuth, Amount>>,
) -> Self::OrchardDigest {
orchard_bundle.map(|b| b.commitment().0)
}
#[cfg(feature = "zfuture")]
fn digest_tze(&self, tze_bundle: Option<&tze::Bundle<A::TzeAuth>>) -> Self::TzeDigest {
tze_bundle.map(tze_digests)
}
fn combine(
&self,
header_digest: Self::HeaderDigest,
transparent_digests: Self::TransparentDigest,
sapling_digest: Self::SaplingDigest,
orchard_digest: Self::OrchardDigest,
#[cfg(feature = "zfuture")] tze_digests: Self::TzeDigest,
) -> Self::Digest {
TxDigests {
header_digest,
transparent_digests,
sapling_digest,
orchard_digest,
#[cfg(feature = "zfuture")]
tze_digests,
}
}
}
pub(crate) fn to_hash(
_txversion: TxVersion,
consensus_branch_id: BranchId,
header_digest: Blake2bHash,
transparent_digests: Option<&TransparentDigests<Blake2bHash>>,
sapling_digest: Option<Blake2bHash>,
orchard_digest: Option<Blake2bHash>,
#[cfg(feature = "zfuture")] tze_digests: Option<&TzeDigests<Blake2bHash>>,
) -> Blake2bHash {
let mut personal = [0; 16];
(&mut personal[..12]).copy_from_slice(ZCASH_TX_PERSONALIZATION_PREFIX);
(&mut personal[12..])
.write_u32::<LittleEndian>(consensus_branch_id.into())
.unwrap();
let mut h = hasher(&personal);
h.write_all(header_digest.as_bytes()).unwrap();
h.write_all(hash_transparent_txid_data(transparent_digests).as_bytes())
.unwrap();
h.write_all(
sapling_digest
.unwrap_or_else(hash_sapling_txid_empty)
.as_bytes(),
)
.unwrap();
h.write_all(
orchard_digest
.unwrap_or_else(orchard::commitments::hash_bundle_txid_empty)
.as_bytes(),
)
.unwrap();
#[cfg(feature = "zfuture")]
if _txversion.has_tze() {
h.write_all(hash_tze_txid_data(tze_digests).as_bytes())
.unwrap();
}
h.finalize()
}
pub fn to_txid(
txversion: TxVersion,
consensus_branch_id: BranchId,
digests: &TxDigests<Blake2bHash>,
) -> TxId {
let txid_digest = to_hash(
txversion,
consensus_branch_id,
digests.header_digest,
digests.transparent_digests.as_ref(),
digests.sapling_digest,
digests.orchard_digest,
#[cfg(feature = "zfuture")]
digests.tze_digests.as_ref(),
);
TxId(<[u8; 32]>::try_from(txid_digest.as_bytes()).unwrap())
}
/// Digester which constructs a digest of only the witness data.
/// This does not internally commit to the txid, so if that is
/// desired it should be done using the result of this digest
/// function.
pub struct BlockTxCommitmentDigester;
impl TransactionDigest<Authorized> for BlockTxCommitmentDigester {
/// We use the header digest to pass the transaction ID into
/// where it needs to be used for personalization string construction.
type HeaderDigest = BranchId;
type TransparentDigest = Blake2bHash;
type SaplingDigest = Blake2bHash;
type OrchardDigest = Blake2bHash;
#[cfg(feature = "zfuture")]
type TzeDigest = Blake2bHash;
type Digest = Blake2bHash;
fn digest_header(
&self,
_version: TxVersion,
consensus_branch_id: BranchId,
_lock_time: u32,
_expiry_height: BlockHeight,
) -> Self::HeaderDigest {
consensus_branch_id
}
fn digest_transparent(
&self,
transparent_bundle: Option<&transparent::Bundle<transparent::Authorized>>,
) -> Blake2bHash {
let mut h = hasher(ZCASH_TRANSPARENT_SCRIPTS_HASH_PERSONALIZATION);
if let Some(bundle) = transparent_bundle {
for txin in &bundle.vin {
txin.script_sig.write(&mut h).unwrap();
}
}
h.finalize()
}
fn digest_sapling(
&self,
sapling_bundle: Option<&sapling::Bundle<sapling::Authorized>>,
) -> Blake2bHash {
let mut h = hasher(ZCASH_SAPLING_SIGS_HASH_PERSONALIZATION);
if let Some(bundle) = sapling_bundle {
for spend in &bundle.shielded_spends {
h.write_all(&spend.zkproof).unwrap();
}
for spend in &bundle.shielded_spends {
spend.spend_auth_sig.write(&mut h).unwrap();
}
for output in &bundle.shielded_outputs {
h.write_all(&output.zkproof).unwrap();
}
bundle.authorization.binding_sig.write(&mut h).unwrap();
}
h.finalize()
}
fn digest_orchard(
&self,
orchard_bundle: Option<&orchard::Bundle<orchard::Authorized, Amount>>,
) -> Self::OrchardDigest {
orchard_bundle.map_or_else(orchard::commitments::hash_bundle_auth_empty, |b| {
b.authorizing_commitment().0
})
}
#[cfg(feature = "zfuture")]
fn digest_tze(&self, tze_bundle: Option<&tze::Bundle<tze::Authorized>>) -> Blake2bHash {
let mut h = hasher(ZCASH_TZE_WITNESSES_HASH_PERSONALIZATION);
if let Some(bundle) = tze_bundle {
for tzein in &bundle.vin {
h.write_all(&tzein.witness.payload.0).unwrap();
}
}
h.finalize()
}
fn combine(
&self,
consensus_branch_id: Self::HeaderDigest,
transparent_digest: Self::TransparentDigest,
sapling_digest: Self::SaplingDigest,
orchard_digest: Self::OrchardDigest,
#[cfg(feature = "zfuture")] tze_digest: Self::TzeDigest,
) -> Self::Digest {
let digests = [transparent_digest, sapling_digest, orchard_digest];
let mut personal = [0; 16];
(&mut personal[..12]).copy_from_slice(ZCASH_AUTH_PERSONALIZATION_PREFIX);
(&mut personal[12..])
.write_u32::<LittleEndian>(consensus_branch_id.into())
.unwrap();
let mut h = hasher(&personal);
for digest in &digests {
h.write_all(digest.as_bytes()).unwrap();
}
#[cfg(feature = "zfuture")]
if TxVersion::suggested_for_branch(consensus_branch_id).has_tze() {
h.write_all(tze_digest.as_bytes()).unwrap();
}
h.finalize()
}
}

View File

@ -16,6 +16,10 @@ impl<R: Read> HashReader<R> {
}
}
pub fn into_base_reader(self) -> R {
self.reader
}
/// Destroy this reader and return the hash of what was read.
pub fn into_hash(self) -> Output<Sha256> {
Sha256::digest(&self.hasher.finalize())

View File

@ -1071,3 +1071,16 @@ mod tests {
}
}
}
#[cfg(any(test, feature = "test-dependencies"))]
pub mod testing {
use proptest::prelude::*;
use super::ExtendedSpendingKey;
prop_compose! {
pub fn arb_extended_spending_key()(seed in prop::array::uniform32(prop::num::u8::ANY)) -> ExtendedSpendingKey {
ExtendedSpendingKey::master(&seed)
}
}
}

View File

@ -8,11 +8,17 @@ and this library adheres to Rust's notion of
## [Unreleased]
### Changed
- MSRV is now 1.51.0.
- `zcash_proofs::sapling::SaplingVerificationContext::new` now takes a
`zip216_enabled` boolean; this is used to control how RedJubjub signatures are
validated.
- Renamed the following in `zcash_proofs::circuit::sprout` to use lower-case
abbreviations (matching Rust naming conventions):
- `JSInput` to `JsInput`
- `JSOutput` to `JsOutput`
### Removed
- `zcash_proofs::sapling::SaplingVerificationContext: Default`
## [0.5.0] - 2021-03-26
### Added
- `zcash_proofs::ZcashParameters`

View File

@ -15,23 +15,23 @@ edition = "2018"
all-features = true
[dependencies]
bellman = { version = "0.8", default-features = false, features = ["groth16"] }
bellman = { version = "0.10", default-features = false, features = ["groth16"] }
blake2b_simd = "0.5"
bls12_381 = "0.3.1"
bls12_381 = "0.5"
byteorder = "1"
directories = { version = "3", optional = true }
ff = "0.8"
group = "0.8"
jubjub = "0.5.1"
ff = "0.10"
group = "0.10"
jubjub = "0.7"
lazy_static = "1"
minreq = { version = "2", features = ["https"], optional = true }
rand_core = "0.5.1"
rand_core = "0.6"
wagyu-zcash-parameters = { version = "0.2", optional = true }
zcash_primitives = { version = "0.5", path = "../zcash_primitives" }
[dev-dependencies]
criterion = "0.3"
rand_xorshift = "0.2"
rand_xorshift = "0.3"
[features]
default = ["local-prover", "multicore"]
@ -40,6 +40,9 @@ download-params = ["minreq"]
local-prover = ["directories"]
multicore = ["bellman/multicore"]
[lib]
bench = false
[[bench]]
name = "sapling"
harness = false

View File

@ -620,7 +620,7 @@ impl MontgomeryPoint {
#[cfg(test)]
mod test {
use bellman::ConstraintSystem;
use ff::{Field, PrimeField};
use ff::{Field, PrimeField, PrimeFieldBits};
use group::{Curve, Group};
use rand_core::{RngCore, SeedableRng};
use rand_xorshift::XorShiftRng;
@ -737,9 +737,9 @@ mod test {
let s_bits = s
.to_le_bits()
.into_iter()
.iter()
.by_val()
.take(jubjub::Fr::NUM_BITS as usize)
.cloned()
.enumerate()
.map(|(i, b)| {
AllocatedBit::alloc(cs.namespace(|| format!("scalar bit {}", i)), Some(b))
@ -788,9 +788,9 @@ mod test {
let s_bits = s
.to_le_bits()
.into_iter()
.iter()
.by_val()
.take(jubjub::Fr::NUM_BITS as usize)
.cloned()
.enumerate()
.map(|(i, b)| {
AllocatedBit::alloc(cs.namespace(|| format!("scalar bit {}", i)), Some(b))

View File

@ -24,6 +24,9 @@ use bellman::gadgets::multipack;
use bellman::gadgets::num;
use bellman::gadgets::Assignment;
#[cfg(test)]
use ff::PrimeFieldBits;
pub const TREE_DEPTH: usize = SAPLING_COMMITMENT_TREE_DEPTH;
/// This is an instance of the `Spend` circuit.
@ -591,10 +594,14 @@ fn test_input_circuit_with_bls12_381() {
cur = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash(
pedersen_hash::Personalization::MerkleTree(i),
lhs.into_iter()
lhs.iter()
.by_val()
.take(bls12_381::Scalar::NUM_BITS as usize)
.chain(rhs.into_iter().take(bls12_381::Scalar::NUM_BITS as usize))
.cloned(),
.chain(
rhs.iter()
.by_val()
.take(bls12_381::Scalar::NUM_BITS as usize),
),
))
.to_affine()
.get_u();
@ -765,10 +772,14 @@ fn test_input_circuit_with_bls12_381_external_test_vectors() {
cur = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash(
pedersen_hash::Personalization::MerkleTree(i),
lhs.into_iter()
lhs.iter()
.by_val()
.take(bls12_381::Scalar::NUM_BITS as usize)
.chain(rhs.into_iter().take(bls12_381::Scalar::NUM_BITS as usize))
.cloned(),
.chain(
rhs.iter()
.by_val()
.take(bls12_381::Scalar::NUM_BITS as usize),
),
))
.to_affine()
.get_u();

View File

@ -16,19 +16,15 @@ use super::compute_value_balance;
pub struct SaplingVerificationContext {
// (sum of the Spend value commitments) - (sum of the Output value commitments)
cv_sum: jubjub::ExtendedPoint,
}
impl Default for SaplingVerificationContext {
fn default() -> Self {
SaplingVerificationContext::new()
}
zip216_enabled: bool,
}
impl SaplingVerificationContext {
/// Construct a new context to be used with a single transaction.
pub fn new() -> Self {
pub fn new(zip216_enabled: bool) -> Self {
SaplingVerificationContext {
cv_sum: jubjub::ExtendedPoint::identity(),
zip216_enabled,
}
}
@ -62,7 +58,12 @@ impl SaplingVerificationContext {
(&mut data_to_be_signed[32..64]).copy_from_slice(&sighash_value[..]);
// Verify the spend_auth_sig
if !rk.verify(&data_to_be_signed, &spend_auth_sig, SPENDING_KEY_GENERATOR) {
if !rk.verify_with_zip216(
&data_to_be_signed,
&spend_auth_sig,
SPENDING_KEY_GENERATOR,
self.zip216_enabled,
) {
return false;
}
@ -161,10 +162,11 @@ impl SaplingVerificationContext {
(&mut data_to_be_signed[32..64]).copy_from_slice(&sighash_value[..]);
// Verify the binding_sig
bvk.verify(
bvk.verify_with_zip216(
&data_to_be_signed,
&binding_sig,
VALUE_COMMITMENT_RANDOMNESS_GENERATOR,
self.zip216_enabled,
)
}
}