Merge pull request #525 from zcash/merge-non-consensus-changes-and-orchard

Merge non-consensus changes and `orchard` beta release
This commit is contained in:
str4d 2022-03-23 00:34:29 +00:00 committed by GitHub
commit cda01fb954
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 93 additions and 43 deletions

View File

@ -20,7 +20,5 @@ codegen-units = 1
[patch.crates-io] [patch.crates-io]
hdwallet = { git = "https://github.com/nuttycom/hdwallet", rev = "576683b9f2865f1118c309017ff36e01f84420c9" } hdwallet = { git = "https://github.com/nuttycom/hdwallet", rev = "576683b9f2865f1118c309017ff36e01f84420c9" }
incrementalmerkletree = { git = "https://github.com/zcash/incrementalmerkletree.git", rev = "dd57b430dee7c0b163f4035fef2280cd1935036c" }
orchard = { git = "https://github.com/zcash/orchard.git", rev = "3ddf6c49f7484ed1295bd5351317bbfe49e14472" }
zcash_encoding = { path = "components/zcash_encoding" } zcash_encoding = { path = "components/zcash_encoding" }
zcash_note_encryption = { path = "components/zcash_note_encryption" } zcash_note_encryption = { path = "components/zcash_note_encryption" }

View File

@ -12,6 +12,7 @@ use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use nonempty::NonEmpty; use nonempty::NonEmpty;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use std::iter::FromIterator;
/// The maximum allowed value representable as a `[CompactSize]` /// The maximum allowed value representable as a `[CompactSize]`
pub const MAX_COMPACT_SIZE: u32 = 0x02000000; pub const MAX_COMPACT_SIZE: u32 = 0x02000000;
@ -104,23 +105,34 @@ pub struct Vector;
impl Vector { impl Vector {
/// Reads a vector, assuming the encoding written by [`Vector::write`], using the provided /// Reads a vector, assuming the encoding written by [`Vector::write`], using the provided
/// function to decode each element of the vector. /// function to decode each element of the vector.
pub fn read<R: Read, E, F>(mut reader: R, func: F) -> io::Result<Vec<E>> pub fn read<R: Read, E, F>(reader: R, func: F) -> io::Result<Vec<E>>
where
F: Fn(&mut R) -> io::Result<E>,
{
Self::read_collected(reader, func)
}
/// Reads a CompactSize-prefixed series of elements into a collection, assuming the encoding
/// written by [`Vector::write`], using the provided function to decode each element.
pub fn read_collected<R: Read, E, F, O: FromIterator<E>>(
mut reader: R,
func: F,
) -> io::Result<O>
where where
F: Fn(&mut R) -> io::Result<E>, F: Fn(&mut R) -> io::Result<E>,
{ {
let count: usize = CompactSize::read_t(&mut reader)?; let count: usize = CompactSize::read_t(&mut reader)?;
Array::read(reader, count, func) Array::read_collected(reader, count, func)
} }
/// Writes a slice of values by writing [`CompactSize`]-encoded integer specifying the length of /// Writes a slice of values by writing [`CompactSize`]-encoded integer specifying the length
/// the slice to the stream, followed by the encoding of each element of the slice as performed /// of the slice to the stream, followed by the encoding of each element of the slice as
/// by the provided function. /// performed by the provided function.
pub fn write<W: Write, E, F>(mut writer: W, vec: &[E], func: F) -> io::Result<()> pub fn write<W: Write, E, F>(writer: W, vec: &[E], func: F) -> io::Result<()>
where where
F: Fn(&mut W, &E) -> io::Result<()>, F: Fn(&mut W, &E) -> io::Result<()>,
{ {
CompactSize::write(&mut writer, vec.len())?; Self::write_sized(writer, vec.iter(), func)
vec.iter().try_for_each(|e| func(&mut writer, e))
} }
/// Writes a NonEmpty container of values to the stream using the same encoding as /// Writes a NonEmpty container of values to the stream using the same encoding as
@ -136,6 +148,21 @@ impl Vector {
CompactSize::write(&mut writer, vec.len())?; CompactSize::write(&mut writer, vec.len())?;
vec.iter().try_for_each(|e| func(&mut writer, e)) vec.iter().try_for_each(|e| func(&mut writer, e))
} }
/// Writes an iterator of values by writing [`CompactSize`]-encoded integer specifying
/// the length of the iterator to the stream, followed by the encoding of each element
/// of the iterator as performed by the provided function.
pub fn write_sized<W: Write, E, F, I: Iterator<Item = E> + ExactSizeIterator>(
mut writer: W,
mut items: I,
func: F,
) -> io::Result<()>
where
F: Fn(&mut W, E) -> io::Result<()>,
{
CompactSize::write(&mut writer, items.len())?;
items.try_for_each(|e| func(&mut writer, e))
}
} }
/// Namespace for functions that perform encoding of array contents. /// Namespace for functions that perform encoding of array contents.
@ -146,9 +173,22 @@ impl Vector {
pub struct Array; pub struct Array;
impl Array { impl Array {
/// Reads a vector, assuming the encoding written by [`Array::write`], using the provided /// Reads `count` elements from a stream into a vector, assuming the encoding written by
/// function to decode each element of the vector. /// [`Array::write`], using the provided function to decode each element.
pub fn read<R: Read, E, F>(mut reader: R, count: usize, func: F) -> io::Result<Vec<E>> pub fn read<R: Read, E, F>(reader: R, count: usize, func: F) -> io::Result<Vec<E>>
where
F: Fn(&mut R) -> io::Result<E>,
{
Self::read_collected(reader, count, func)
}
/// Reads `count` elements into a collection, assuming the encoding written by
/// [`Array::write`], using the provided function to decode each element.
pub fn read_collected<R: Read, E, F, O: FromIterator<E>>(
mut reader: R,
count: usize,
func: F,
) -> io::Result<O>
where where
F: Fn(&mut R) -> io::Result<E>, F: Fn(&mut R) -> io::Result<E>,
{ {

View File

@ -31,12 +31,12 @@ fpe = "0.5"
group = "0.11" group = "0.11"
hdwallet = { version = "0.3.0", optional = true } hdwallet = { version = "0.3.0", optional = true }
hex = "0.4" hex = "0.4"
incrementalmerkletree = "0.2" incrementalmerkletree = "=0.3.0-beta.1"
jubjub = "0.8" jubjub = "0.8"
lazy_static = "1" lazy_static = "1"
memuse = "0.2" memuse = "0.2"
nonempty = "0.7" nonempty = "0.7"
orchard = "=0.1.0-beta.1" orchard = "=0.1.0-beta.2"
proptest = { version = "1.0.0", optional = true } proptest = { version = "1.0.0", optional = true }
rand = "0.8" rand = "0.8"
rand_core = "0.6" rand_core = "0.6"
@ -55,7 +55,7 @@ features = ["pre-zip-212"]
criterion = "0.3" criterion = "0.3"
proptest = "1.0.0" proptest = "1.0.0"
rand_xorshift = "0.3" rand_xorshift = "0.3"
orchard = { version = "=0.1.0-beta.1", features = ["test-dependencies"] } orchard = { version = "=0.1.0-beta.2", features = ["test-dependencies"] }
[target.'cfg(unix)'.dev-dependencies] [target.'cfg(unix)'.dev-dependencies]
pprof = { version = "=0.6.1", features = ["criterion", "flamegraph"] } pprof = { version = "=0.6.1", features = ["criterion", "flamegraph"] }

View File

@ -35,6 +35,37 @@ impl HashSer for MerkleHashOrchard {
} }
} }
/// Writes a usize value encoded as a u64 in little-endian order. Since usize
/// is platform-dependent, we consistently represent it as u64 in serialized
/// formats.
pub fn write_usize_leu64<W: Write>(mut writer: W, value: usize) -> io::Result<()> {
// Panic if we get a usize value that can't fit into a u64.
writer.write_u64::<LittleEndian>(value.try_into().unwrap())
}
/// Reads a usize value encoded as a u64 in little-endian order. Since usize
/// is platform-dependent, we consistently represent it as u64 in serialized
/// formats.
pub fn read_leu64_usize<R: Read>(mut reader: R) -> io::Result<usize> {
reader.read_u64::<LittleEndian>()?.try_into().map_err(|e| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!(
"usize could not be decoded from a 64-bit value on this platform: {:?}",
e
),
)
})
}
pub fn write_position<W: Write>(mut writer: W, position: Position) -> io::Result<()> {
write_usize_leu64(&mut writer, position.into())
}
pub fn read_position<R: Read>(mut reader: R) -> io::Result<Position> {
read_leu64_usize(&mut reader).map(Position::from)
}
pub fn read_frontier_v0<H: Hashable + super::Hashable, R: Read>( pub fn read_frontier_v0<H: Hashable + super::Hashable, R: Read>(
mut reader: R, mut reader: R,
) -> io::Result<Frontier<H, 32>> { ) -> io::Result<Frontier<H, 32>> {
@ -47,7 +78,7 @@ pub fn write_nonempty_frontier_v1<H: HashSer, W: Write>(
mut writer: W, mut writer: W,
frontier: &NonEmptyFrontier<H>, frontier: &NonEmptyFrontier<H>,
) -> io::Result<()> { ) -> io::Result<()> {
writer.write_u64::<LittleEndian>(<u64>::from(frontier.position()))?; write_position(&mut writer, frontier.position())?;
match frontier.leaf() { match frontier.leaf() {
Leaf::Left(a) => { Leaf::Left(a) => {
a.write(&mut writer)?; a.write(&mut writer)?;
@ -105,36 +136,19 @@ pub fn read_frontier_v1<H: HashSer + Clone, R: Read>(reader: R) -> io::Result<Fr
} }
} }
pub fn write_position<W: Write>(mut writer: W, position: Position) -> io::Result<()> {
writer.write_u64::<LittleEndian>(position.try_into().unwrap())
}
pub fn read_position<R: Read>(mut reader: R) -> io::Result<Position> {
let p = reader.read_u64::<LittleEndian>()?;
<usize>::try_from(p).map(Position::from).map_err(|err| {
io::Error::new(
io::ErrorKind::Unsupported,
format!(
"usize could not be decoded to a 64-bit value on this platform: {:?}",
err
),
)
})
}
pub fn write_auth_fragment_v1<H: HashSer, W: Write>( pub fn write_auth_fragment_v1<H: HashSer, W: Write>(
mut writer: W, mut writer: W,
fragment: &AuthFragment<H>, fragment: &AuthFragment<H>,
) -> io::Result<()> { ) -> io::Result<()> {
write_position(&mut writer, fragment.position())?; write_position(&mut writer, fragment.position())?;
writer.write_u64::<LittleEndian>(fragment.altitudes_observed().try_into().unwrap())?; write_usize_leu64(&mut writer, fragment.altitudes_observed())?;
Vector::write(&mut writer, fragment.values(), |w, a| a.write(w)) Vector::write(&mut writer, fragment.values(), |w, a| a.write(w))
} }
#[allow(clippy::redundant_closure)] #[allow(clippy::redundant_closure)]
pub fn read_auth_fragment_v1<H: HashSer, R: Read>(mut reader: R) -> io::Result<AuthFragment<H>> { pub fn read_auth_fragment_v1<H: HashSer, R: Read>(mut reader: R) -> io::Result<AuthFragment<H>> {
let position = read_position(&mut reader)?; let position = read_position(&mut reader)?;
let alts_observed = reader.read_u64::<LittleEndian>()? as usize; let alts_observed = read_leu64_usize(&mut reader)?;
let values = Vector::read(&mut reader, |r| H::read(r))?; let values = Vector::read(&mut reader, |r| H::read(r))?;
Ok(AuthFragment::from_parts(position, alts_observed, values)) Ok(AuthFragment::from_parts(position, alts_observed, values))
@ -144,16 +158,14 @@ pub fn write_bridge_v1<H: HashSer + Ord, W: Write>(
mut writer: W, mut writer: W,
bridge: &MerkleBridge<H>, bridge: &MerkleBridge<H>,
) -> io::Result<()> { ) -> io::Result<()> {
Optional::write( Optional::write(&mut writer, bridge.prior_position(), |w, pos| {
&mut writer, write_position(w, pos)
bridge.prior_position().map(<u64>::from), })?;
|w, n| w.write_u64::<LittleEndian>(n),
)?;
Vector::write( Vector::write(
&mut writer, &mut writer,
&bridge.auth_fragments().iter().collect::<Vec<_>>(), &bridge.auth_fragments().iter().collect::<Vec<_>>(),
|w, (i, a)| { |mut w, (pos, a)| {
w.write_u64::<LittleEndian>(u64::from(**i))?; write_position(&mut w, **pos)?;
write_auth_fragment_v1(w, a) write_auth_fragment_v1(w, a)
}, },
)?; )?;