Implement manual serialization for incremental Merkle tree components.

This also adds parsing of the legacy CommitmentTree format,
to permit easy migration of persisted data.
This commit is contained in:
Kris Nuttycombe 2021-07-07 09:54:32 -06:00
parent e8d20f73f8
commit 2945905d52
7 changed files with 446 additions and 31 deletions

View File

@ -24,5 +24,6 @@ nom = { git = "https://github.com/myrrlyn/nom.git", rev = "d6b81f5303b0a347726e1
# In development.
halo2 = { git = "https://github.com/zcash/halo2.git", rev = "27c4187673a9c6ade13fbdbd4f20955530c22d7f" }
orchard = { git = "https://github.com/zcash/orchard.git", rev = "8454f86d423edbf0b53a1d5d32df1c691f8b7188" }
orchard = { git = "https://github.com/zcash/orchard.git", rev = "d0baa18fc6105df4a7847de2b6dc50c5919b3123" }
incrementalmerkletree = { git = "https://github.com/zcash/incrementalmerkletree.git", rev = "b7bd6246122a6e9ace8edb51553fbf5228906cbb" }
zcash_note_encryption = { path = "components/zcash_note_encryption" }

View File

@ -217,7 +217,7 @@ impl TryFrom<(&str, &[u8])> for Address {
let mut expected_padding = [0; PADDING_LEN];
expected_padding[0..hrp.len()].copy_from_slice(hrp.as_bytes());
let encoded = match encoded.split_at(encoded.len() - PADDING_LEN) {
(encoded, tail) if tail == &expected_padding => Ok(encoded),
(encoded, tail) if tail == expected_padding => Ok(encoded),
_ => Err(ParseError::InvalidEncoding),
}?;

View File

@ -28,6 +28,7 @@ ff = "0.10"
fpe = "0.4"
group = "0.10"
hex = "0.4"
incrementalmerkletree = "0.1"
jubjub = "0.7"
lazy_static = "1"
log = "0.4"

View File

@ -0,0 +1,7 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc f6df6e3a7a1641029b9f39a671046ba39745ded73de8d7444e7c27a8f73e1365 # shrinks to t = CommitmentTree { left: Some(Node { repr: [36, 96, 18, 1, 228, 118, 68, 158, 142, 67, 253, 219, 85, 192, 179, 142, 230, 218, 145, 73, 159, 211, 208, 58, 182, 136, 108, 95, 137, 166, 232, 10] }), right: Some(Node { repr: [10, 211, 222, 223, 94, 55, 180, 62, 79, 50, 38, 55, 73, 152, 245, 181, 157, 40, 89, 177, 51, 96, 154, 78, 185, 74, 118, 11, 54, 188, 151, 181] }), parents: [None, None, Some(Node { repr: [99, 240, 35, 62, 160, 23, 150, 46, 3, 226, 153, 214, 59, 25, 19, 85, 247, 234, 174, 75, 93, 165, 99, 116, 194, 243, 103, 155, 166, 131, 10, 68] }), Some(Node { repr: [106, 249, 220, 118, 49, 239, 102, 59, 121, 101, 110, 82, 194, 242, 72, 24, 209, 160, 24, 225, 124, 138, 138, 52, 157, 6, 43, 180, 212, 8, 117, 3] })] }

View File

@ -1,12 +1,16 @@
//! Implementation of a Merkle tree of commitments used to prove the existence of notes.
use byteorder::{LittleEndian, ReadBytesExt};
use incrementalmerkletree::bridgetree;
use std::collections::VecDeque;
use std::io::{self, Read, Write};
use crate::sapling::SAPLING_COMMITMENT_TREE_DEPTH;
use crate::sapling::SAPLING_COMMITMENT_TREE_DEPTH_U8;
use crate::serialize::{Optional, Vector};
pub mod incremental;
/// A hashable node within a Merkle tree.
pub trait Hashable: Clone + Copy {
/// Parses a node from the given byte source.
@ -48,13 +52,13 @@ impl<Node: Hashable> PathFiller<Node> {
/// The depth of the Merkle tree is fixed at 32, equal to the depth of the Sapling
/// commitment tree.
#[derive(Clone, Debug)]
pub struct CommitmentTree<Node: Hashable> {
left: Option<Node>,
right: Option<Node>,
parents: Vec<Option<Node>>,
pub struct CommitmentTree<Node> {
pub(crate) left: Option<Node>,
pub(crate) right: Option<Node>,
pub(crate) parents: Vec<Option<Node>>,
}
impl<Node: Hashable> CommitmentTree<Node> {
impl<Node> CommitmentTree<Node> {
/// Creates an empty tree.
pub fn empty() -> Self {
CommitmentTree {
@ -64,6 +68,59 @@ impl<Node: Hashable> CommitmentTree<Node> {
}
}
pub fn to_frontier(&self) -> bridgetree::Frontier<Node, SAPLING_COMMITMENT_TREE_DEPTH_U8>
where
Node: incrementalmerkletree::Hashable + Clone,
{
if self.size() == 0 {
bridgetree::Frontier::empty()
} else {
let leaf = match (self.left.as_ref(), self.right.as_ref()) {
(Some(a), None) => bridgetree::Leaf::Left(a.clone()),
(Some(a), Some(b)) => bridgetree::Leaf::Right(a.clone(), b.clone()),
_ => unreachable!(),
};
let ommers = self
.parents
.iter()
.filter_map(|v| v.as_ref())
.cloned()
.collect();
// If a frontier cannot be successfully constructed from the
// parts of a commitment tree, it is a programming error.
bridgetree::Frontier::from_parts((self.size() - 1).into(), leaf, ommers)
.expect("Frontier should be constructable from CommitmentTree.")
}
}
/// Returns the number of leaf nodes in the tree.
pub fn size(&self) -> usize {
self.parents.iter().enumerate().fold(
match (self.left.as_ref(), self.right.as_ref()) {
(None, None) => 0,
(Some(_), None) => 1,
(Some(_), Some(_)) => 2,
(None, Some(_)) => unreachable!(),
},
|acc, (i, p)| {
// Treat occupation of parents array as a binary number
// (right-shifted by 1)
acc + if p.is_some() { 1 << (i + 1) } else { 0 }
},
)
}
fn is_complete(&self, depth: usize) -> bool {
self.left.is_some()
&& self.right.is_some()
&& self.parents.len() == depth - 1
&& self.parents.iter().all(|p| p.is_some())
}
}
impl<Node: Hashable> CommitmentTree<Node> {
/// Reads a `CommitmentTree` from its serialized form.
#[allow(clippy::redundant_closure)]
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
@ -87,30 +144,6 @@ impl<Node: Hashable> CommitmentTree<Node> {
})
}
/// Returns the number of leaf nodes in the tree.
pub fn size(&self) -> usize {
self.parents.iter().enumerate().fold(
match (self.left, self.right) {
(None, None) => 0,
(Some(_), None) => 1,
(Some(_), Some(_)) => 2,
(None, Some(_)) => unreachable!(),
},
|acc, (i, p)| {
// Treat occupation of parents array as a binary number
// (right-shifted by 1)
acc + if p.is_some() { 1 << (i + 1) } else { 0 }
},
)
}
fn is_complete(&self, depth: usize) -> bool {
self.left.is_some()
&& self.right.is_some()
&& self.parents.len() == depth - 1
&& self.parents.iter().all(|p| p.is_some())
}
/// Adds a leaf node to the tree.
///
/// Returns an error if the tree is full.

View File

@ -0,0 +1,341 @@
//! Implementations of serialization and parsing for Orchard note commitment trees.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::convert::TryFrom;
use std::hash::Hash;
use std::io::{self, Read, Write};
use incrementalmerkletree::{
bridgetree::{
AuthFragment, BridgeTree, Checkpoint, Frontier, Leaf, MerkleBridge, NonEmptyFrontier,
},
Hashable, Position,
};
use orchard::tree::MerkleCrhOrchardOutput;
use super::CommitmentTree;
use crate::serialize::{Optional, Vector};
pub const SER_V1: u8 = 1;
/// A hashable node within a Merkle tree.
pub trait HashSer {
/// Parses a node from the given byte source.
fn read<R: Read>(reader: R) -> io::Result<Self>
where
Self: Sized;
/// Serializes this node.
fn write<W: Write>(&self, writer: W) -> io::Result<()>;
}
pub fn read_frontier_v0<H: Hashable + super::Hashable, R: Read>(
mut reader: R,
) -> io::Result<Frontier<H, 32>> {
let tree = CommitmentTree::read(&mut reader)?;
Ok(tree.to_frontier())
}
impl HashSer for MerkleCrhOrchardOutput {
fn read<R: Read>(mut reader: R) -> io::Result<Self>
where
Self: Sized,
{
let mut repr = [0u8; 32];
reader.read_exact(&mut repr)?;
<Option<_>>::from(Self::from_bytes(&repr)).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"Non-canonical encoding of Pallas base field value.",
)
})
}
fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(&self.to_bytes())
}
}
pub fn write_nonempty_frontier_v1<H: HashSer, W: Write>(
mut writer: W,
frontier: &NonEmptyFrontier<H>,
) -> io::Result<()> {
writer.write_u64::<LittleEndian>(<u64>::from(frontier.position()))?;
match frontier.leaf() {
Leaf::Left(a) => {
a.write(&mut writer)?;
Optional::write(&mut writer, &None, |w, n: &H| n.write(w))?;
}
Leaf::Right(a, b) => {
a.write(&mut writer)?;
Optional::write(&mut writer, &Some(b), |w, n| n.write(w))?;
}
}
Vector::write(&mut writer, &frontier.ommers(), |w, e| e.write(w))?;
Ok(())
}
#[allow(clippy::redundant_closure)]
pub fn read_nonempty_frontier_v1<H: HashSer + Clone, R: Read>(
mut reader: R,
) -> io::Result<NonEmptyFrontier<H>> {
let position = reader
.read_u64::<LittleEndian>()
.map(|v| <Position>::from(v as usize))?;
let left = H::read(&mut reader)?;
let right = Optional::read(&mut reader, |r| H::read(r))?;
let leaf = right.map_or_else(
|| Leaf::Left(left.clone()),
|r| Leaf::Right(left.clone(), r),
);
let ommers = Vector::read(&mut reader, |r| H::read(r))?;
NonEmptyFrontier::from_parts(position, leaf, ommers).map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("Parsing resulted in an invalid Merkle frontier: {:?}", err),
)
})
}
pub fn write_frontier_v1<H: HashSer, W: Write>(
mut writer: W,
frontier: &Frontier<H, 32>,
) -> io::Result<()> {
match frontier.value() {
None => {
writer.write_u8(0)?;
}
Some(f) => {
writer.write_u8(1)?;
write_nonempty_frontier_v1(&mut writer, f)?;
}
}
Ok(())
}
#[allow(clippy::redundant_closure)]
pub fn read_frontier_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<Frontier<H, 32>> {
let is_empty = reader.read_u8()? == 0;
if is_empty {
Ok(Frontier::empty())
} else {
read_nonempty_frontier_v1(reader).and_then(|f| {
Frontier::try_from(f).map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("Parsing resulted in an invalid Merkle frontier: {:?}", err),
)
})
})
}
}
pub fn write_auth_fragment_v1<H: HashSer, W: Write>(
mut writer: W,
fragment: &AuthFragment<H>,
) -> io::Result<()> {
writer.write_u64::<LittleEndian>(<u64>::from(fragment.position()))?;
writer.write_u64::<LittleEndian>(fragment.altitudes_observed() as u64)?;
Vector::write(&mut writer, fragment.values(), |w, a| a.write(w))?;
Ok(())
}
pub fn read_position<R: Read>(mut reader: R) -> io::Result<Position> {
Ok(Position::from(reader.read_u64::<LittleEndian>()? as usize))
}
#[allow(clippy::redundant_closure)]
pub fn read_auth_fragment_v1<H: HashSer, R: Read>(mut reader: R) -> io::Result<AuthFragment<H>> {
let position = read_position(&mut reader)?;
let alts_observed = reader.read_u64::<LittleEndian>()? as usize;
let values = Vector::read(&mut reader, |r| H::read(r))?;
Ok(AuthFragment::from_parts(position, alts_observed, values))
}
pub fn write_bridge_v1<H: HashSer, W: Write>(
mut writer: W,
bridge: &MerkleBridge<H>,
) -> io::Result<()> {
Optional::write(
&mut writer,
&bridge.prior_position().map(<u64>::from),
|w, n| w.write_u64::<LittleEndian>(*n),
)?;
Vector::write(
&mut writer,
&bridge.auth_fragments().iter().collect::<Vec<_>>(),
|w, (i, a)| {
w.write_u64::<LittleEndian>(**i as u64)?;
write_auth_fragment_v1(w, a)
},
)?;
write_nonempty_frontier_v1(&mut writer, bridge.frontier())?;
Ok(())
}
#[allow(clippy::redundant_closure)]
pub fn read_bridge_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<MerkleBridge<H>> {
let prior_position = Optional::read(&mut reader, |r| read_position(r))?;
let auth_fragments = Vector::read(&mut reader, |r| {
Ok((
r.read_u64::<LittleEndian>()? as usize,
read_auth_fragment_v1(r)?,
))
})?
.into_iter()
.collect();
let frontier = read_nonempty_frontier_v1(&mut reader)?;
Ok(MerkleBridge::from_parts(
prior_position,
auth_fragments,
frontier,
))
}
pub const EMPTY_CHECKPOINT: u8 = 0;
pub const BRIDGE_CHECKPOINT: u8 = 1;
pub fn write_checkpoint_v1<H: HashSer, W: Write>(
mut writer: W,
checkpoint: &Checkpoint<H>,
) -> io::Result<()> {
match checkpoint {
Checkpoint::Empty => {
writer.write_u8(EMPTY_CHECKPOINT)?;
}
Checkpoint::AtIndex(i, b) => {
writer.write_u8(BRIDGE_CHECKPOINT)?;
writer.write_u64::<LittleEndian>(*i as u64)?;
write_bridge_v1(&mut writer, b)?;
}
}
Ok(())
}
pub fn read_checkpoint_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<Checkpoint<H>> {
match reader.read_u8()? {
EMPTY_CHECKPOINT => Ok(Checkpoint::Empty),
BRIDGE_CHECKPOINT => Ok(Checkpoint::AtIndex(
reader.read_u64::<LittleEndian>()? as usize,
read_bridge_v1(&mut reader)?,
)),
flag => Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("Unrecognized checkpoint variant identifier: {:?}", flag),
)),
}
}
pub fn write_tree_v1<H: HashSer + Hash + Eq, W: Write>(
mut writer: W,
tree: &BridgeTree<H, 32>,
) -> io::Result<()> {
Vector::write(&mut writer, tree.bridges(), |w, b| write_bridge_v1(w, b))?;
Vector::write(
&mut writer,
&tree.witnessable_leaves().iter().collect::<Vec<_>>(),
|mut w, (a, i)| {
a.write(&mut w)?;
w.write_u64::<LittleEndian>(**i as u64)?;
Ok(())
},
)?;
Vector::write(&mut writer, tree.checkpoints(), |w, c| {
write_checkpoint_v1(w, c)
})?;
writer.write_u64::<LittleEndian>(tree.max_checkpoints() as u64)?;
Ok(())
}
#[allow(clippy::redundant_closure)]
pub fn read_tree_v1<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(
mut reader: R,
) -> io::Result<BridgeTree<H, 32>> {
BridgeTree::from_parts(
Vector::read(&mut reader, |r| read_bridge_v1(r))?,
Vector::read(&mut reader, |mut r| {
Ok((H::read(&mut r)?, r.read_u64::<LittleEndian>()? as usize))
})?
.into_iter()
.collect(),
Vector::read(&mut reader, |r| read_checkpoint_v1(r))?,
reader.read_u64::<LittleEndian>()? as usize,
)
.map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!(
"Consistency violation found when attempting to deserialize Merkle tree: {:?}",
err
),
)
})
}
pub fn write_tree<H: HashSer + Hash + Eq, W: Write>(
mut writer: W,
tree: &BridgeTree<H, 32>,
) -> io::Result<()> {
writer.write_u8(SER_V1)?;
write_tree_v1(&mut writer, tree)
}
pub fn read_tree<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(
mut reader: R,
) -> io::Result<BridgeTree<H, 32>> {
match reader.read_u8()? {
SER_V1 => read_tree_v1(&mut reader),
flag => Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("Unrecognized tree serialization version: {:?}", flag),
)),
}
}
#[cfg(test)]
mod tests {
use proptest::prelude::*;
use incrementalmerkletree::bridgetree::Frontier;
use super::*;
use crate::{
merkle_tree::testing::arb_commitment_tree,
sapling::{testing as sapling, Node},
};
proptest! {
#[test]
fn frontier_serialization_v0(t in arb_commitment_tree(0, sapling::arb_node()))
{
let mut buffer = vec![];
t.write(&mut buffer).unwrap();
let frontier: Frontier<Node, 32> = read_frontier_v0(&buffer[..]).unwrap();
let expected: Frontier<Node, 32> = t.to_frontier();
assert_eq!(frontier, expected);
}
#[test]
fn frontier_serialization_v1(t in arb_commitment_tree(1, sapling::arb_node()))
{
let original: Frontier<Node, 32> = t.to_frontier();
let mut buffer = vec![];
write_frontier_v1(&mut buffer, &original).unwrap();
let read: Frontier<Node, 32> = read_frontier_v1(&buffer[..]).unwrap();
assert_eq!(read, original);
}
}
}

View File

@ -13,6 +13,7 @@ use blake2s_simd::Params as Blake2sParams;
use byteorder::{LittleEndian, WriteBytesExt};
use ff::{Field, PrimeField};
use group::{Curve, Group, GroupEncoding};
use incrementalmerkletree::{self, Altitude};
use lazy_static::lazy_static;
use rand_core::{CryptoRng, RngCore};
use std::array::TryFromSliceError;
@ -34,6 +35,7 @@ use self::{
};
pub const SAPLING_COMMITMENT_TREE_DEPTH: usize = 32;
pub const SAPLING_COMMITMENT_TREE_DEPTH_U8: u8 = 32;
/// Compute a parent node in the Sapling commitment tree given its two children.
pub fn merkle_hash(depth: usize, lhs: &[u8; 32], rhs: &[u8; 32]) -> [u8; 32] {
@ -109,6 +111,36 @@ impl Hashable for Node {
}
}
impl incrementalmerkletree::Hashable for Node {
fn empty_leaf() -> Self {
Node {
repr: Note::uncommitted().to_repr(),
}
}
fn combine(altitude: Altitude, lhs: &Self, rhs: &Self) -> Self {
Node {
repr: merkle_hash(altitude.into(), &lhs.repr, &rhs.repr),
}
}
fn empty_root(altitude: Altitude) -> Self {
EMPTY_ROOTS[<usize>::from(altitude)]
}
}
impl crate::merkle_tree::incremental::HashSer for Node {
fn read<R: Read>(mut reader: R) -> io::Result<Self> {
let mut repr = [0u8; 32];
reader.read_exact(&mut repr)?;
Ok(Node::new(repr))
}
fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
writer.write_all(self.repr.as_ref())
}
}
impl From<Node> for bls12_381::Scalar {
fn from(node: Node) -> Self {
bls12_381::Scalar::from_repr(node.repr).expect("Tree nodes should be in the prime field")