Merge pull request #417 from nuttycom/incremental_merkle_tree_ser
Add manual serialization for bridgetree::Frontier
This commit is contained in:
commit
9be36f3e54
|
@ -24,7 +24,8 @@ nom = { git = "https://github.com/myrrlyn/nom.git", rev = "d6b81f5303b0a347726e1
|
|||
|
||||
# In development.
|
||||
halo2 = { git = "https://github.com/zcash/halo2.git", rev = "27c4187673a9c6ade13fbdbd4f20955530c22d7f" }
|
||||
orchard = { git = "https://github.com/zcash/orchard.git", rev = "8454f86d423edbf0b53a1d5d32df1c691f8b7188" }
|
||||
orchard = { git = "https://github.com/zcash/orchard.git", rev = "d0baa18fc6105df4a7847de2b6dc50c5919b3123" }
|
||||
incrementalmerkletree = { git = "https://github.com/zcash/incrementalmerkletree.git", rev = "b7bd6246122a6e9ace8edb51553fbf5228906cbb" }
|
||||
zcash_note_encryption = { path = "components/zcash_note_encryption" }
|
||||
|
||||
# Unreleased
|
||||
|
|
|
@ -217,7 +217,7 @@ impl TryFrom<(&str, &[u8])> for Address {
|
|||
let mut expected_padding = [0; PADDING_LEN];
|
||||
expected_padding[0..hrp.len()].copy_from_slice(hrp.as_bytes());
|
||||
let encoded = match encoded.split_at(encoded.len() - PADDING_LEN) {
|
||||
(encoded, tail) if tail == &expected_padding => Ok(encoded),
|
||||
(encoded, tail) if tail == expected_padding => Ok(encoded),
|
||||
_ => Err(ParseError::InvalidEncoding),
|
||||
}?;
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ ff = "0.10"
|
|||
fpe = "0.4"
|
||||
group = "0.10"
|
||||
hex = "0.4"
|
||||
incrementalmerkletree = "0.1"
|
||||
jubjub = "0.7"
|
||||
lazy_static = "1"
|
||||
log = "0.4"
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# Seeds for failure cases proptest has generated in the past. It is
|
||||
# automatically read and these particular cases re-run before any
|
||||
# novel cases are generated.
|
||||
#
|
||||
# It is recommended to check this file in to source control so that
|
||||
# everyone who runs the test benefits from these saved cases.
|
||||
cc f6df6e3a7a1641029b9f39a671046ba39745ded73de8d7444e7c27a8f73e1365 # shrinks to t = CommitmentTree { left: Some(Node { repr: [36, 96, 18, 1, 228, 118, 68, 158, 142, 67, 253, 219, 85, 192, 179, 142, 230, 218, 145, 73, 159, 211, 208, 58, 182, 136, 108, 95, 137, 166, 232, 10] }), right: Some(Node { repr: [10, 211, 222, 223, 94, 55, 180, 62, 79, 50, 38, 55, 73, 152, 245, 181, 157, 40, 89, 177, 51, 96, 154, 78, 185, 74, 118, 11, 54, 188, 151, 181] }), parents: [None, None, Some(Node { repr: [99, 240, 35, 62, 160, 23, 150, 46, 3, 226, 153, 214, 59, 25, 19, 85, 247, 234, 174, 75, 93, 165, 99, 116, 194, 243, 103, 155, 166, 131, 10, 68] }), Some(Node { repr: [106, 249, 220, 118, 49, 239, 102, 59, 121, 101, 110, 82, 194, 242, 72, 24, 209, 160, 24, 225, 124, 138, 138, 52, 157, 6, 43, 180, 212, 8, 117, 3] })] }
|
|
@ -1,12 +1,17 @@
|
|||
//! Implementation of a Merkle tree of commitments used to prove the existence of notes.
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
use incrementalmerkletree::{self, bridgetree, Altitude};
|
||||
use std::collections::VecDeque;
|
||||
use std::convert::TryFrom;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use crate::sapling::SAPLING_COMMITMENT_TREE_DEPTH;
|
||||
use crate::sapling::SAPLING_COMMITMENT_TREE_DEPTH_U8;
|
||||
use crate::serialize::{Optional, Vector};
|
||||
|
||||
pub mod incremental;
|
||||
|
||||
/// A hashable node within a Merkle tree.
|
||||
pub trait Hashable: Clone + Copy {
|
||||
/// Parses a node from the given byte source.
|
||||
|
@ -25,6 +30,55 @@ pub trait Hashable: Clone + Copy {
|
|||
fn empty_root(_: usize) -> Self;
|
||||
}
|
||||
|
||||
/// A hashable node within a Merkle tree.
|
||||
pub trait HashSer {
|
||||
/// Parses a node from the given byte source.
|
||||
fn read<R: Read>(reader: R) -> io::Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Serializes this node.
|
||||
fn write<W: Write>(&self, writer: W) -> io::Result<()>;
|
||||
}
|
||||
|
||||
impl<T> Hashable for T
|
||||
where
|
||||
T: incrementalmerkletree::Hashable + HashSer + Copy,
|
||||
{
|
||||
/// Parses a node from the given byte source.
|
||||
fn read<R: Read>(reader: R) -> io::Result<Self> {
|
||||
<Self as HashSer>::read(reader)
|
||||
}
|
||||
|
||||
/// Serializes this node.
|
||||
fn write<W: Write>(&self, writer: W) -> io::Result<()> {
|
||||
<Self as HashSer>::write(self, writer)
|
||||
}
|
||||
|
||||
/// Returns the parent node within the tree of the two given nodes.
|
||||
fn combine(alt: usize, lhs: &Self, rhs: &Self) -> Self {
|
||||
<Self as incrementalmerkletree::Hashable>::combine(
|
||||
Altitude::from(
|
||||
u8::try_from(alt).expect("Tree heights greater than 255 are unsupported."),
|
||||
),
|
||||
lhs,
|
||||
rhs,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns a blank leaf node.
|
||||
fn blank() -> Self {
|
||||
<Self as incrementalmerkletree::Hashable>::empty_leaf()
|
||||
}
|
||||
|
||||
/// Returns the empty root for the given depth.
|
||||
fn empty_root(alt: usize) -> Self {
|
||||
<Self as incrementalmerkletree::Hashable>::empty_root(Altitude::from(
|
||||
u8::try_from(alt).expect("Tree heights greater than 255 are unsupported."),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct PathFiller<Node: Hashable> {
|
||||
queue: VecDeque<Node>,
|
||||
}
|
||||
|
@ -48,13 +102,13 @@ impl<Node: Hashable> PathFiller<Node> {
|
|||
/// The depth of the Merkle tree is fixed at 32, equal to the depth of the Sapling
|
||||
/// commitment tree.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CommitmentTree<Node: Hashable> {
|
||||
left: Option<Node>,
|
||||
right: Option<Node>,
|
||||
parents: Vec<Option<Node>>,
|
||||
pub struct CommitmentTree<Node> {
|
||||
pub(crate) left: Option<Node>,
|
||||
pub(crate) right: Option<Node>,
|
||||
pub(crate) parents: Vec<Option<Node>>,
|
||||
}
|
||||
|
||||
impl<Node: Hashable> CommitmentTree<Node> {
|
||||
impl<Node> CommitmentTree<Node> {
|
||||
/// Creates an empty tree.
|
||||
pub fn empty() -> Self {
|
||||
CommitmentTree {
|
||||
|
@ -64,33 +118,37 @@ impl<Node: Hashable> CommitmentTree<Node> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Reads a `CommitmentTree` from its serialized form.
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
|
||||
let left = Optional::read(&mut reader, |r| Node::read(r))?;
|
||||
let right = Optional::read(&mut reader, |r| Node::read(r))?;
|
||||
let parents = Vector::read(&mut reader, |r| Optional::read(r, |r| Node::read(r)))?;
|
||||
pub fn to_frontier(&self) -> bridgetree::Frontier<Node, SAPLING_COMMITMENT_TREE_DEPTH_U8>
|
||||
where
|
||||
Node: incrementalmerkletree::Hashable + Clone,
|
||||
{
|
||||
if self.size() == 0 {
|
||||
bridgetree::Frontier::empty()
|
||||
} else {
|
||||
let leaf = match (self.left.as_ref(), self.right.as_ref()) {
|
||||
(Some(a), None) => bridgetree::Leaf::Left(a.clone()),
|
||||
(Some(a), Some(b)) => bridgetree::Leaf::Right(a.clone(), b.clone()),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
Ok(CommitmentTree {
|
||||
left,
|
||||
right,
|
||||
parents,
|
||||
})
|
||||
}
|
||||
let ommers = self
|
||||
.parents
|
||||
.iter()
|
||||
.filter_map(|v| v.as_ref())
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
/// Serializes this tree as an array of bytes.
|
||||
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
|
||||
Optional::write(&mut writer, &self.left, |w, n| n.write(w))?;
|
||||
Optional::write(&mut writer, &self.right, |w, n| n.write(w))?;
|
||||
Vector::write(&mut writer, &self.parents, |w, e| {
|
||||
Optional::write(w, e, |w, n| n.write(w))
|
||||
})
|
||||
// If a frontier cannot be successfully constructed from the
|
||||
// parts of a commitment tree, it is a programming error.
|
||||
bridgetree::Frontier::from_parts((self.size() - 1).into(), leaf, ommers)
|
||||
.expect("Frontier should be constructable from CommitmentTree.")
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of leaf nodes in the tree.
|
||||
pub fn size(&self) -> usize {
|
||||
self.parents.iter().enumerate().fold(
|
||||
match (self.left, self.right) {
|
||||
match (self.left.as_ref(), self.right.as_ref()) {
|
||||
(None, None) => 0,
|
||||
(Some(_), None) => 1,
|
||||
(Some(_), Some(_)) => 2,
|
||||
|
@ -110,6 +168,30 @@ impl<Node: Hashable> CommitmentTree<Node> {
|
|||
&& self.parents.len() == depth - 1
|
||||
&& self.parents.iter().all(|p| p.is_some())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Node: Hashable> CommitmentTree<Node> {
|
||||
/// Reads a `CommitmentTree` from its serialized form.
|
||||
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
|
||||
let left = Optional::read(&mut reader, Node::read)?;
|
||||
let right = Optional::read(&mut reader, Node::read)?;
|
||||
let parents = Vector::read(&mut reader, |r| Optional::read(r, Node::read))?;
|
||||
|
||||
Ok(CommitmentTree {
|
||||
left,
|
||||
right,
|
||||
parents,
|
||||
})
|
||||
}
|
||||
|
||||
/// Serializes this tree as an array of bytes.
|
||||
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
|
||||
Optional::write(&mut writer, self.left, |w, n| n.write(w))?;
|
||||
Optional::write(&mut writer, self.right, |w, n| n.write(w))?;
|
||||
Vector::write(&mut writer, &self.parents, |w, e| {
|
||||
Optional::write(w, *e, |w, n| n.write(w))
|
||||
})
|
||||
}
|
||||
|
||||
/// Adds a leaf node to the tree.
|
||||
///
|
||||
|
@ -241,7 +323,7 @@ impl<Node: Hashable> IncrementalWitness<Node> {
|
|||
pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {
|
||||
let tree = CommitmentTree::read(&mut reader)?;
|
||||
let filled = Vector::read(&mut reader, |r| Node::read(r))?;
|
||||
let cursor = Optional::read(&mut reader, |r| CommitmentTree::read(r))?;
|
||||
let cursor = Optional::read(&mut reader, CommitmentTree::read)?;
|
||||
|
||||
let mut witness = IncrementalWitness {
|
||||
tree,
|
||||
|
@ -259,7 +341,7 @@ impl<Node: Hashable> IncrementalWitness<Node> {
|
|||
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
|
||||
self.tree.write(&mut writer)?;
|
||||
Vector::write(&mut writer, &self.filled, |w, n| n.write(w))?;
|
||||
Optional::write(&mut writer, &self.cursor, |w, t| t.write(w))
|
||||
Optional::write(&mut writer, self.cursor.as_ref(), |w, t| t.write(w))
|
||||
}
|
||||
|
||||
/// Returns the position of the witnessed leaf node in the commitment tree.
|
||||
|
|
|
@ -0,0 +1,312 @@
|
|||
//! Implementations of serialization and parsing for Orchard note commitment trees.
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use std::convert::TryFrom;
|
||||
use std::hash::Hash;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use incrementalmerkletree::{
|
||||
bridgetree::{
|
||||
AuthFragment, BridgeTree, Checkpoint, Frontier, Leaf, MerkleBridge, NonEmptyFrontier,
|
||||
},
|
||||
Hashable, Position,
|
||||
};
|
||||
use orchard::tree::MerkleCrhOrchardOutput;
|
||||
|
||||
use super::{CommitmentTree, HashSer};
|
||||
use crate::serialize::{Optional, Vector};
|
||||
|
||||
pub const SER_V1: u8 = 1;
|
||||
|
||||
pub fn read_frontier_v0<H: Hashable + super::Hashable, R: Read>(
|
||||
mut reader: R,
|
||||
) -> io::Result<Frontier<H, 32>> {
|
||||
let tree = CommitmentTree::read(&mut reader)?;
|
||||
|
||||
Ok(tree.to_frontier())
|
||||
}
|
||||
|
||||
impl HashSer for MerkleCrhOrchardOutput {
|
||||
fn read<R: Read>(mut reader: R) -> io::Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let mut repr = [0u8; 32];
|
||||
reader.read_exact(&mut repr)?;
|
||||
<Option<_>>::from(Self::from_bytes(&repr)).ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Non-canonical encoding of Pallas base field value.",
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
|
||||
writer.write_all(&self.to_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_nonempty_frontier_v1<H: HashSer, W: Write>(
|
||||
mut writer: W,
|
||||
frontier: &NonEmptyFrontier<H>,
|
||||
) -> io::Result<()> {
|
||||
writer.write_u64::<LittleEndian>(<u64>::from(frontier.position()))?;
|
||||
match frontier.leaf() {
|
||||
Leaf::Left(a) => {
|
||||
a.write(&mut writer)?;
|
||||
Optional::write(&mut writer, None, |w, n: &H| n.write(w))?;
|
||||
}
|
||||
Leaf::Right(a, b) => {
|
||||
a.write(&mut writer)?;
|
||||
Optional::write(&mut writer, Some(b), |w, n| n.write(w))?;
|
||||
}
|
||||
}
|
||||
Vector::write(&mut writer, &frontier.ommers(), |w, e| e.write(w))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub fn read_nonempty_frontier_v1<H: HashSer + Clone, R: Read>(
|
||||
mut reader: R,
|
||||
) -> io::Result<NonEmptyFrontier<H>> {
|
||||
let position = read_position(&mut reader)?;
|
||||
let left = H::read(&mut reader)?;
|
||||
let right = Optional::read(&mut reader, H::read)?;
|
||||
|
||||
let leaf = right.map_or_else(
|
||||
|| Leaf::Left(left.clone()),
|
||||
|r| Leaf::Right(left.clone(), r),
|
||||
);
|
||||
let ommers = Vector::read(&mut reader, |r| H::read(r))?;
|
||||
|
||||
NonEmptyFrontier::from_parts(position, leaf, ommers).map_err(|err| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!("Parsing resulted in an invalid Merkle frontier: {:?}", err),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_frontier_v1<H: HashSer, W: Write>(
|
||||
writer: W,
|
||||
frontier: &Frontier<H, 32>,
|
||||
) -> io::Result<()> {
|
||||
Optional::write(writer, frontier.value(), write_nonempty_frontier_v1)
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub fn read_frontier_v1<H: HashSer + Clone, R: Read>(reader: R) -> io::Result<Frontier<H, 32>> {
|
||||
match Optional::read(reader, read_nonempty_frontier_v1)? {
|
||||
None => Ok(Frontier::empty()),
|
||||
Some(f) => Frontier::try_from(f).map_err(|err| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!("Parsing resulted in an invalid Merkle frontier: {:?}", err),
|
||||
)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_auth_fragment_v1<H: HashSer, W: Write>(
|
||||
mut writer: W,
|
||||
fragment: &AuthFragment<H>,
|
||||
) -> io::Result<()> {
|
||||
writer.write_u64::<LittleEndian>(<u64>::from(fragment.position()))?;
|
||||
writer.write_u64::<LittleEndian>(fragment.altitudes_observed() as u64)?;
|
||||
Vector::write(&mut writer, fragment.values(), |w, a| a.write(w))
|
||||
}
|
||||
|
||||
pub fn read_position<R: Read>(mut reader: R) -> io::Result<Position> {
|
||||
Ok(Position::from(reader.read_u64::<LittleEndian>()? as usize))
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub fn read_auth_fragment_v1<H: HashSer, R: Read>(mut reader: R) -> io::Result<AuthFragment<H>> {
|
||||
let position = read_position(&mut reader)?;
|
||||
let alts_observed = reader.read_u64::<LittleEndian>()? as usize;
|
||||
let values = Vector::read(&mut reader, |r| H::read(r))?;
|
||||
|
||||
Ok(AuthFragment::from_parts(position, alts_observed, values))
|
||||
}
|
||||
|
||||
pub fn write_bridge_v1<H: HashSer, W: Write>(
|
||||
mut writer: W,
|
||||
bridge: &MerkleBridge<H>,
|
||||
) -> io::Result<()> {
|
||||
Optional::write(
|
||||
&mut writer,
|
||||
bridge.prior_position().map(<u64>::from),
|
||||
|w, n| w.write_u64::<LittleEndian>(n),
|
||||
)?;
|
||||
Vector::write(
|
||||
&mut writer,
|
||||
&bridge.auth_fragments().iter().collect::<Vec<_>>(),
|
||||
|w, (i, a)| {
|
||||
w.write_u64::<LittleEndian>(**i as u64)?;
|
||||
write_auth_fragment_v1(w, a)
|
||||
},
|
||||
)?;
|
||||
write_nonempty_frontier_v1(&mut writer, bridge.frontier())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read_bridge_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<MerkleBridge<H>> {
|
||||
let prior_position = Optional::read(&mut reader, read_position)?;
|
||||
let auth_fragments = Vector::read(&mut reader, |r| {
|
||||
Ok((
|
||||
r.read_u64::<LittleEndian>()? as usize,
|
||||
read_auth_fragment_v1(r)?,
|
||||
))
|
||||
})?
|
||||
.into_iter()
|
||||
.collect();
|
||||
let frontier = read_nonempty_frontier_v1(&mut reader)?;
|
||||
|
||||
Ok(MerkleBridge::from_parts(
|
||||
prior_position,
|
||||
auth_fragments,
|
||||
frontier,
|
||||
))
|
||||
}
|
||||
|
||||
pub const EMPTY_CHECKPOINT: u8 = 0;
|
||||
pub const BRIDGE_CHECKPOINT: u8 = 1;
|
||||
|
||||
pub fn write_checkpoint_v1<H: HashSer, W: Write>(
|
||||
mut writer: W,
|
||||
checkpoint: &Checkpoint<H>,
|
||||
) -> io::Result<()> {
|
||||
match checkpoint {
|
||||
Checkpoint::Empty => {
|
||||
writer.write_u8(EMPTY_CHECKPOINT)?;
|
||||
}
|
||||
Checkpoint::AtIndex(i, b) => {
|
||||
writer.write_u8(BRIDGE_CHECKPOINT)?;
|
||||
writer.write_u64::<LittleEndian>(*i as u64)?;
|
||||
write_bridge_v1(&mut writer, b)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read_checkpoint_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<Checkpoint<H>> {
|
||||
match reader.read_u8()? {
|
||||
EMPTY_CHECKPOINT => Ok(Checkpoint::Empty),
|
||||
BRIDGE_CHECKPOINT => Ok(Checkpoint::AtIndex(
|
||||
reader.read_u64::<LittleEndian>()? as usize,
|
||||
read_bridge_v1(&mut reader)?,
|
||||
)),
|
||||
flag => Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!("Unrecognized checkpoint variant identifier: {:?}", flag),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_tree_v1<H: HashSer + Hash + Eq, W: Write>(
|
||||
mut writer: W,
|
||||
tree: &BridgeTree<H, 32>,
|
||||
) -> io::Result<()> {
|
||||
Vector::write(&mut writer, tree.bridges(), |w, b| write_bridge_v1(w, b))?;
|
||||
Vector::write(
|
||||
&mut writer,
|
||||
&tree.witnessable_leaves().iter().collect::<Vec<_>>(),
|
||||
|mut w, (a, i)| {
|
||||
a.write(&mut w)?;
|
||||
w.write_u64::<LittleEndian>(**i as u64)?;
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
Vector::write(&mut writer, tree.checkpoints(), |w, c| {
|
||||
write_checkpoint_v1(w, c)
|
||||
})?;
|
||||
writer.write_u64::<LittleEndian>(tree.max_checkpoints() as u64)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub fn read_tree_v1<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(
|
||||
mut reader: R,
|
||||
) -> io::Result<BridgeTree<H, 32>> {
|
||||
BridgeTree::from_parts(
|
||||
Vector::read(&mut reader, |r| read_bridge_v1(r))?,
|
||||
Vector::read(&mut reader, |mut r| {
|
||||
Ok((H::read(&mut r)?, r.read_u64::<LittleEndian>()? as usize))
|
||||
})?
|
||||
.into_iter()
|
||||
.collect(),
|
||||
Vector::read(&mut reader, |r| read_checkpoint_v1(r))?,
|
||||
reader.read_u64::<LittleEndian>()? as usize,
|
||||
)
|
||||
.map_err(|err| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Consistency violation found when attempting to deserialize Merkle tree: {:?}",
|
||||
err
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_tree<H: HashSer + Hash + Eq, W: Write>(
|
||||
mut writer: W,
|
||||
tree: &BridgeTree<H, 32>,
|
||||
) -> io::Result<()> {
|
||||
writer.write_u8(SER_V1)?;
|
||||
write_tree_v1(&mut writer, tree)
|
||||
}
|
||||
|
||||
pub fn read_tree<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(
|
||||
mut reader: R,
|
||||
) -> io::Result<BridgeTree<H, 32>> {
|
||||
match reader.read_u8()? {
|
||||
SER_V1 => read_tree_v1(&mut reader),
|
||||
flag => Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
format!("Unrecognized tree serialization version: {:?}", flag),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use proptest::prelude::*;
|
||||
|
||||
use incrementalmerkletree::bridgetree::Frontier;
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
merkle_tree::testing::arb_commitment_tree,
|
||||
sapling::{testing as sapling, Node},
|
||||
};
|
||||
|
||||
proptest! {
|
||||
#[test]
|
||||
fn frontier_serialization_v0(t in arb_commitment_tree(0, sapling::arb_node()))
|
||||
{
|
||||
let mut buffer = vec![];
|
||||
t.write(&mut buffer).unwrap();
|
||||
let frontier: Frontier<Node, 32> = read_frontier_v0(&buffer[..]).unwrap();
|
||||
|
||||
let expected: Frontier<Node, 32> = t.to_frontier();
|
||||
assert_eq!(frontier, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn frontier_serialization_v1(t in arb_commitment_tree(1, sapling::arb_node()))
|
||||
{
|
||||
let original: Frontier<Node, 32> = t.to_frontier();
|
||||
|
||||
let mut buffer = vec![];
|
||||
write_frontier_v1(&mut buffer, &original).unwrap();
|
||||
let read: Frontier<Node, 32> = read_frontier_v1(&buffer[..]).unwrap();
|
||||
|
||||
assert_eq!(read, original);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -13,6 +13,7 @@ use blake2s_simd::Params as Blake2sParams;
|
|||
use byteorder::{LittleEndian, WriteBytesExt};
|
||||
use ff::{Field, PrimeField};
|
||||
use group::{Curve, Group, GroupEncoding};
|
||||
use incrementalmerkletree::{self, Altitude};
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{CryptoRng, RngCore};
|
||||
use std::array::TryFromSliceError;
|
||||
|
@ -22,7 +23,7 @@ use subtle::{Choice, ConstantTimeEq};
|
|||
|
||||
use crate::{
|
||||
constants::{self, SPENDING_KEY_GENERATOR},
|
||||
merkle_tree::Hashable,
|
||||
merkle_tree::{HashSer, Hashable},
|
||||
transaction::components::amount::MAX_MONEY,
|
||||
};
|
||||
|
||||
|
@ -34,6 +35,7 @@ use self::{
|
|||
};
|
||||
|
||||
pub const SAPLING_COMMITMENT_TREE_DEPTH: usize = 32;
|
||||
pub const SAPLING_COMMITMENT_TREE_DEPTH_U8: u8 = 32;
|
||||
|
||||
/// Compute a parent node in the Sapling commitment tree given its two children.
|
||||
pub fn merkle_hash(depth: usize, lhs: &[u8; 32], rhs: &[u8; 32]) -> [u8; 32] {
|
||||
|
@ -81,7 +83,25 @@ impl Node {
|
|||
}
|
||||
}
|
||||
|
||||
impl Hashable for Node {
|
||||
impl incrementalmerkletree::Hashable for Node {
|
||||
fn empty_leaf() -> Self {
|
||||
Node {
|
||||
repr: Note::uncommitted().to_repr(),
|
||||
}
|
||||
}
|
||||
|
||||
fn combine(altitude: Altitude, lhs: &Self, rhs: &Self) -> Self {
|
||||
Node {
|
||||
repr: merkle_hash(altitude.into(), &lhs.repr, &rhs.repr),
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_root(altitude: Altitude) -> Self {
|
||||
EMPTY_ROOTS[<usize>::from(altitude)]
|
||||
}
|
||||
}
|
||||
|
||||
impl HashSer for Node {
|
||||
fn read<R: Read>(mut reader: R) -> io::Result<Self> {
|
||||
let mut repr = [0u8; 32];
|
||||
reader.read_exact(&mut repr)?;
|
||||
|
@ -91,22 +111,6 @@ impl Hashable for Node {
|
|||
fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
|
||||
writer.write_all(self.repr.as_ref())
|
||||
}
|
||||
|
||||
fn combine(depth: usize, lhs: &Self, rhs: &Self) -> Self {
|
||||
Node {
|
||||
repr: merkle_hash(depth, &lhs.repr, &rhs.repr),
|
||||
}
|
||||
}
|
||||
|
||||
fn blank() -> Self {
|
||||
Node {
|
||||
repr: Note::uncommitted().to_repr(),
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_root(depth: usize) -> Self {
|
||||
EMPTY_ROOTS[depth]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Node> for bls12_381::Scalar {
|
||||
|
|
|
@ -122,11 +122,11 @@ pub struct Optional;
|
|||
impl Optional {
|
||||
pub fn read<R: Read, T, F>(mut reader: R, func: F) -> io::Result<Option<T>>
|
||||
where
|
||||
F: Fn(&mut R) -> io::Result<T>,
|
||||
F: Fn(R) -> io::Result<T>,
|
||||
{
|
||||
match reader.read_u8()? {
|
||||
0 => Ok(None),
|
||||
1 => Ok(Some(func(&mut reader)?)),
|
||||
1 => Ok(Some(func(reader)?)),
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"non-canonical Option<T>",
|
||||
|
@ -134,15 +134,15 @@ impl Optional {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn write<W: Write, T, F>(mut writer: W, val: &Option<T>, func: F) -> io::Result<()>
|
||||
pub fn write<W: Write, T, F>(mut writer: W, val: Option<T>, func: F) -> io::Result<()>
|
||||
where
|
||||
F: Fn(&mut W, &T) -> io::Result<()>,
|
||||
F: Fn(W, T) -> io::Result<()>,
|
||||
{
|
||||
match val {
|
||||
None => writer.write_u8(0),
|
||||
Some(e) => {
|
||||
writer.write_u8(1)?;
|
||||
func(&mut writer, e)
|
||||
func(writer, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ mod tests {
|
|||
macro_rules! eval {
|
||||
($value:expr, $expected:expr, $write:expr, $read:expr) => {
|
||||
let mut data = vec![];
|
||||
Optional::write(&mut data, &$value, $write).unwrap();
|
||||
Optional::write(&mut data, $value, $write).unwrap();
|
||||
assert_eq!(&data[..], &$expected[..]);
|
||||
match Optional::read(&data[..], $read) {
|
||||
Ok(v) => assert_eq!(v, $value),
|
||||
|
@ -237,7 +237,7 @@ mod tests {
|
|||
|
||||
macro_rules! eval_u8 {
|
||||
($value:expr, $expected:expr) => {
|
||||
eval!($value, $expected, |w, e| w.write_u8(*e), |r| r.read_u8())
|
||||
eval!($value, $expected, |w, e| w.write_u8(e), |mut r| r.read_u8())
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -246,7 +246,7 @@ mod tests {
|
|||
eval!(
|
||||
$value,
|
||||
$expected,
|
||||
|w, v| Vector::write(w, v, |w, e| w.write_u8(*e)),
|
||||
|w, v| Vector::write(w, &v, |w, e| w.write_u8(*e)),
|
||||
|r| Vector::read(r, |r| r.read_u8())
|
||||
)
|
||||
};
|
||||
|
|
Loading…
Reference in New Issue