diff --git a/bridgetree/src/lib.rs b/bridgetree/src/lib.rs index acb815d..7b01b34 100644 --- a/bridgetree/src/lib.rs +++ b/bridgetree/src/lib.rs @@ -1093,15 +1093,14 @@ mod tests { where G::Value: Hashable + Clone + Ord + Debug + 'static, { - proptest::collection::vec(arb_operation(item_gen, 0..max_count), 0..max_count).prop_map( - |ops| { - let mut tree: BridgeTree = BridgeTree::new(10, 0); - for (i, op) in ops.into_iter().enumerate() { - apply_operation(&mut tree, op.map_checkpoint_id(|_| i)); - } - tree - }, - ) + let pos_gen = (0..max_count).prop_map(|p| Position::try_from(p).unwrap()); + proptest::collection::vec(arb_operation(item_gen, pos_gen), 0..max_count).prop_map(|ops| { + let mut tree: BridgeTree = BridgeTree::new(10, 0); + for (i, op) in ops.into_iter().enumerate() { + apply_operation(&mut tree, op.map_checkpoint_id(|_| i)); + } + tree + }) } proptest! { @@ -1171,11 +1170,11 @@ mod tests { let mut t = BridgeTree::::new(10, 0); let mut to_unmark = vec![]; let mut has_witness = vec![]; - for i in 0usize..100 { + for i in 0u64..100 { let elem: String = format!("{},", i); assert!(t.append(elem), "Append should succeed."); if i % 5 == 0 { - t.checkpoint(i + 1); + t.checkpoint(usize::try_from(i).unwrap() + 1); } if i % 7 == 0 { t.mark(); @@ -1235,7 +1234,10 @@ mod tests { #[test] fn check_randomized_u64_ops( ops in proptest::collection::vec( - arb_operation((0..32u64).prop_map(SipHashable), 0usize..100), + arb_operation( + (0..32u64).prop_map(SipHashable), + (0u64..100).prop_map(Position::from) + ), 1..100 ) ) { @@ -1247,7 +1249,10 @@ mod tests { #[test] fn check_randomized_str_ops( ops in proptest::collection::vec( - arb_operation((97u8..123).prop_map(|c| char::from(c).to_string()), 0usize..100), + arb_operation( + (97u8..123).prop_map(|c| char::from(c).to_string()), + (0u64..100).prop_map(Position::from) + ), 1..100 ) ) { diff --git a/incrementalmerkletree/CHANGELOG.md b/incrementalmerkletree/CHANGELOG.md index 08281ff..d44c85f 100644 --- a/incrementalmerkletree/CHANGELOG.md +++ b/incrementalmerkletree/CHANGELOG.md @@ -18,6 +18,22 @@ and this project adheres to Rust's notion of the `legacy-api` feature flag related to constructing witnesses for leaves of a Merkle tree. +### Changed +- `Position` has been made isomorphic to `u64` via introduction of `From` + implementations between these types. +- `Address::index` now returns `u64` instead of `usize` +- The `expected_ommers` field of `FrontierError::PositionMismatch` now + has type `u8` instead of `usize`. +- `Address::context` now also uses `u64` instead of `usize` for when it returns + a range of index values. + +### Removed +- The `From` impl for `Position` has been removed, as has + `From for usize`. In addition, `Add` and `Sub` impls + that previously allowed numeric operations between `usize` and + `Position` values have been removed in favor of similar operations + that instead allow computing with `u64`. + ## [0.3.1] - 2023-02-28 ### Fixed diff --git a/incrementalmerkletree/src/frontier.rs b/incrementalmerkletree/src/frontier.rs index d19e433..a93e4bf 100644 --- a/incrementalmerkletree/src/frontier.rs +++ b/incrementalmerkletree/src/frontier.rs @@ -12,7 +12,7 @@ use {std::collections::VecDeque, std::iter::repeat}; pub enum FrontierError { /// An error representing that the number of ommers provided in frontier construction does not /// the expected length of the ommers list given the position. - PositionMismatch { expected_ommers: usize }, + PositionMismatch { expected_ommers: u8 }, /// An error representing that the position and/or list of ommers provided to frontier /// construction would result in a frontier that exceeds the maximum statically allowed depth /// of the tree. `depth` is the minimum tree depth that would be required in order for that @@ -43,7 +43,7 @@ impl NonEmptyFrontier { /// Constructs a new frontier from its constituent parts. pub fn from_parts(position: Position, leaf: H, ommers: Vec) -> Result { let expected_ommers = position.past_ommer_count(); - if ommers.len() == expected_ommers { + if ommers.len() == expected_ommers.into() { Ok(Self { position, leaf, @@ -89,25 +89,25 @@ impl NonEmptyFrontier { let new_root_level = self.position.root_level(); let mut carry = Some((prior_leaf, 0.into())); - let mut new_ommers = Vec::with_capacity(self.position.past_ommer_count()); + let mut new_ommers = Vec::with_capacity(self.position.past_ommer_count().into()); for (addr, source) in prior_position.witness_addrs(new_root_level) { if let Source::Past(i) = source { if let Some((carry_ommer, carry_lvl)) = carry.as_ref() { if *carry_lvl == addr.level() { carry = Some(( - H::combine(addr.level(), &self.ommers[i], carry_ommer), + H::combine(addr.level(), &self.ommers[usize::from(i)], carry_ommer), addr.level() + 1, )) } else { // insert the carry at the first empty slot; then the rest of the // ommers will remain unchanged new_ommers.push(carry_ommer.clone()); - new_ommers.push(self.ommers[i].clone()); + new_ommers.push(self.ommers[usize::from(i)].clone()); carry = None; } } else { // when there's no carry, just push on the ommer value - new_ommers.push(self.ommers[i].clone()); + new_ommers.push(self.ommers[usize::from(i)].clone()); } } } @@ -136,7 +136,9 @@ impl NonEmptyFrontier { .fold(digest, |d, l| H::combine(l, &d, &H::empty_root(l))); let res_digest = match source { - Source::Past(i) => H::combine(addr.level(), &self.ommers[i], &digest), + Source::Past(i) => { + H::combine(addr.level(), &self.ommers[usize::from(i)], &digest) + } Source::Future => { H::combine(addr.level(), &digest, &H::empty_root(addr.level())) } @@ -162,7 +164,7 @@ impl NonEmptyFrontier { self.position() .witness_addrs(depth.into()) .map(|(addr, source)| match source { - Source::Past(i) => Ok(self.ommers[i].clone()), + Source::Past(i) => Ok(self.ommers[usize::from(i)].clone()), Source::Future => complement_nodes(addr).ok_or(addr), }) .collect::, _>>() @@ -370,14 +372,13 @@ impl CommitmentTree { (f.leaf().clone(), None) }; - let upos: usize = f.position().into(); Self { left: Some(left), right, parents: (1u8..DEPTH) .into_iter() .map(|i| { - if upos & (1 << i) == 0 { + if u64::from(f.position()) & (1 << i) == 0 { None } else { ommers_iter.next() @@ -404,7 +405,7 @@ impl CommitmentTree { // If a frontier cannot be successfully constructed from the // parts of a commitment tree, it is a programming error. - Frontier::from_parts((self.size() - 1).into(), leaf, ommers) + Frontier::from_parts((self.size() - 1).try_into().unwrap(), leaf, ommers) .expect("Frontier should be constructable from CommitmentTree.") } } diff --git a/incrementalmerkletree/src/lib.rs b/incrementalmerkletree/src/lib.rs index 2732368..17f4c84 100644 --- a/incrementalmerkletree/src/lib.rs +++ b/incrementalmerkletree/src/lib.rs @@ -51,7 +51,7 @@ impl Retention { pub enum Source { /// The sibling to the address can be derived from the incremental frontier /// at the contained ommer index - Past(usize), + Past(u8), /// The sibling to the address must be obtained from values discovered by /// the addition of more nodes to the tree Future, @@ -61,7 +61,7 @@ pub enum Source { struct WitnessAddrsIter { root_level: Level, current: Address, - ommer_count: usize, + ommer_count: u8, } impl Iterator for WitnessAddrsIter { @@ -91,7 +91,7 @@ impl Iterator for WitnessAddrsIter { /// A type representing the position of a leaf in a Merkle tree. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] -pub struct Position(usize); +pub struct Position(u64); impl Position { /// Return whether the position is odd-valued. @@ -102,15 +102,17 @@ impl Position { /// Returns the minimum possible level of the root of a binary tree containing at least /// `self + 1` nodes. pub fn root_level(&self) -> Level { - Level((usize::BITS - self.0.leading_zeros()) as u8) + Level((u64::BITS - self.0.leading_zeros()) as u8) } /// Returns the number of cousins and/or ommers required to construct an authentication /// path to the root of a merkle tree that has `self + 1` nodes. - pub fn past_ommer_count(&self) -> usize { + pub fn past_ommer_count(&self) -> u8 { (0..self.root_level().0) .filter(|i| (self.0 >> i) & 0x1 == 1) .count() + .try_into() + .unwrap() // this is safe because we're counting within a `u8` range } /// Returns whether the binary tree having `self` as the position of the rightmost leaf @@ -133,34 +135,34 @@ impl Position { } } -impl From for usize { - fn from(p: Position) -> usize { +impl From for u64 { + fn from(p: Position) -> Self { p.0 } } -impl From for u64 { - fn from(p: Position) -> Self { - p.0 as u64 +impl From for Position { + fn from(sz: u64) -> Self { + Self(sz) } } -impl Add for Position { +impl Add for Position { type Output = Position; - fn add(self, other: usize) -> Self { + fn add(self, other: u64) -> Self { Position(self.0 + other) } } -impl AddAssign for Position { - fn add_assign(&mut self, other: usize) { +impl AddAssign for Position { + fn add_assign(&mut self, other: u64) { self.0 += other } } -impl Sub for Position { +impl Sub for Position { type Output = Position; - fn sub(self, other: usize) -> Self { + fn sub(self, other: u64) -> Self { if self.0 < other { panic!("position underflow"); } @@ -168,16 +170,17 @@ impl Sub for Position { } } -impl From for Position { - fn from(sz: usize) -> Self { - Self(sz) +impl TryFrom for Position { + type Error = TryFromIntError; + fn try_from(sz: usize) -> Result { + ::try_from(sz).map(Self) } } -impl TryFrom for Position { +impl TryFrom for usize { type Error = TryFromIntError; - fn try_from(sz: u64) -> Result { - ::try_from(sz).map(Self) + fn try_from(p: Position) -> Result { + ::try_from(p.0) } } @@ -216,6 +219,7 @@ impl From for u8 { } } +// Supporting sub-8-bit platforms isn't on our impl From for usize { fn from(level: Level) -> usize { level.0 as usize @@ -245,12 +249,12 @@ impl Sub for Level { #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Address { level: Level, - index: usize, + index: u64, } impl Address { /// Construct a new address from its constituent parts. - pub fn from_parts(level: Level, index: usize) -> Self { + pub fn from_parts(level: Level, index: u64) -> Self { Address { level, index } } @@ -272,7 +276,7 @@ impl Address { /// The index of an address is defined as the number of subtrees with their roots /// at the address's level that appear to the left of this address in a binary /// tree of arbitrary height > level * 2 + 1. - pub fn index(&self) -> usize { + pub fn index(&self) -> u64 { self.index } @@ -354,7 +358,7 @@ impl Address { /// than or equal to that of this address) or the range of indices of root addresses of /// subtrees with roots at the given level contained within the tree with its root at this /// address otherwise. - pub fn context(&self, level: Level) -> Either> { + pub fn context(&self, level: Level) -> Either> { if level >= self.level { Either::Left(Address { level, @@ -541,7 +545,7 @@ pub(crate) mod tests { assert!(Position(3).is_complete_subtree(Level(2))); assert!(!Position(4).is_complete_subtree(Level(2))); assert!(Position(7).is_complete_subtree(Level(3))); - assert!(Position(u32::MAX as usize).is_complete_subtree(Level(32))); + assert!(Position(u32::MAX as u64).is_complete_subtree(Level(32))); } #[test] diff --git a/incrementalmerkletree/src/testing.rs b/incrementalmerkletree/src/testing.rs index e34eccb..96e516a 100644 --- a/incrementalmerkletree/src/testing.rs +++ b/incrementalmerkletree/src/testing.rs @@ -147,11 +147,11 @@ pub fn append_str(x: &str, retention: Retention) -> Operation { Operation::Append(x.to_string(), retention) } -pub fn unmark(pos: usize) -> Operation { +pub fn unmark(pos: u64) -> Operation { Operation::Unmark(Position::from(pos)) } -pub fn witness(pos: usize, depth: usize) -> Operation { +pub fn witness(pos: u64, depth: usize) -> Operation { Operation::Witness(Position::from(pos), depth) } @@ -218,7 +218,7 @@ pub fn arb_retention() -> impl Strategy> { pub fn arb_operation( item_gen: G, - pos_gen: impl Strategy + Clone, + pos_gen: impl Strategy + Clone, ) -> impl Strategy> where G::Value: Clone + 'static, @@ -230,17 +230,11 @@ where Just(Operation::MarkedPositions), ], Just(Operation::GarbageCollect), - pos_gen - .clone() - .prop_map(|i| Operation::MarkedLeaf(Position::from(i))), - pos_gen - .clone() - .prop_map(|i| Operation::Unmark(Position::from(i))), + pos_gen.clone().prop_map(Operation::MarkedLeaf), + pos_gen.clone().prop_map(Operation::Unmark), Just(Operation::Checkpoint(())), Just(Operation::Rewind), - pos_gen - .prop_flat_map(|i| (0usize..10) - .prop_map(move |depth| Operation::Witness(Position::from(i), depth))), + pos_gen.prop_flat_map(|i| (0usize..10).prop_map(move |depth| Operation::Witness(i, depth))), ] } @@ -289,19 +283,20 @@ pub fn check_operations>( } else { prop_assert_eq!( tree_size, - tree.current_position().map_or(0, |p| usize::from(p) + 1) + tree.current_position() + .map_or(0, |p| usize::try_from(p).unwrap() + 1) ); } } CurrentPosition => { if let Some(pos) = tree.current_position() { prop_assert!(tree_size > 0); - prop_assert_eq!(tree_size - 1, pos.into()); + prop_assert_eq!(tree_size - 1, pos.try_into().unwrap()); } } MarkedLeaf(position) => { if tree.get_marked_leaf(*position).is_some() { - prop_assert!(::from(*position) < tree_size); + prop_assert!(::try_from(*position).unwrap() < tree_size); } } Unmark(position) => { @@ -322,7 +317,7 @@ pub fn check_operations>( } Witness(position, depth) => { if let Some(path) = tree.witness(*position, *depth) { - let value: H = tree_values[::from(*position)].clone(); + let value: H = tree_values[::try_from(*position).unwrap()].clone(); let tree_root = tree.root(*depth); if tree_checkpoints.len() >= *depth { @@ -361,7 +356,7 @@ pub fn compute_root_from_witness(value: H, position: Position, path for (i, v) in path .iter() .enumerate() - .map(|(i, v)| (((::from(position) >> i) & 1) == 1, v)) + .map(|(i, v)| (((::from(position) >> i) & 1) == 1, v)) { if i { cur = H::combine(lvl, v, &cur); @@ -719,7 +714,7 @@ pub fn check_witnesses + std::fmt::Debug, F: Fn(usize) -> .map(|c| Append(c.to_string(), Marked)) .chain(Some(Append('m'.to_string(), Ephemeral))) .chain(Some(Append('n'.to_string(), Ephemeral))) - .chain(Some(Witness(11usize.into(), 0))) + .chain(Some(Witness(11u64.into(), 0))) .collect::>(); let mut tree = new_tree(100); @@ -770,7 +765,7 @@ pub fn check_witnesses + std::fmt::Debug, F: Fn(usize) -> is_marked: false, }, ), - Witness(3usize.into(), 5), + Witness(3u64.into(), 5), ]; let mut tree = new_tree(100); assert_eq!( @@ -963,23 +958,23 @@ pub fn check_rewind_remove_mark, F: Fn(usize) -> T>(new_t tree.append("e".to_string(), Retention::Marked); tree.checkpoint(1); assert!(tree.rewind()); - assert!(tree.remove_mark(0usize.into())); + assert!(tree.remove_mark(0u64.into())); // use a maximum number of checkpoints of 1 let mut tree = new_tree(1); assert!(tree.append("e".to_string(), Retention::Marked)); assert!(tree.checkpoint(1)); - assert!(tree.marked_positions().contains(&0usize.into())); + assert!(tree.marked_positions().contains(&0u64.into())); assert!(tree.append("f".to_string(), Retention::Ephemeral)); // simulate a spend of `e` at `f` - assert!(tree.remove_mark(0usize.into())); + assert!(tree.remove_mark(0u64.into())); // even though the mark has been staged for removal, it's not gone yet - assert!(tree.marked_positions().contains(&0usize.into())); + assert!(tree.marked_positions().contains(&0u64.into())); assert!(tree.checkpoint(2)); // the newest checkpoint will have caused the oldest to roll off, and // so the forgotten node will be unmarked - assert!(!tree.marked_positions().contains(&0usize.into())); - assert!(!tree.remove_mark(0usize.into())); + assert!(!tree.marked_positions().contains(&0u64.into())); + assert!(!tree.remove_mark(0u64.into())); // The following check_operations tests cover errors where the // test framework itself previously did not correctly handle diff --git a/incrementalmerkletree/src/testing/complete_tree.rs b/incrementalmerkletree/src/testing/complete_tree.rs index db4cb65..5d6591a 100644 --- a/incrementalmerkletree/src/testing/complete_tree.rs +++ b/incrementalmerkletree/src/testing/complete_tree.rs @@ -4,6 +4,8 @@ use std::collections::{BTreeMap, BTreeSet}; use crate::{testing::Tree, Hashable, Level, Position, Retention}; +const MAX_COMPLETE_SIZE_ERROR: &str = "Positions of a `CompleteTree` must fit into the platform word size, because larger complete trees are not representable."; + pub(crate) fn root(leaves: &[H], depth: u8) -> H { let empty_leaf = H::empty_leaf(); let mut leaves = leaves @@ -129,7 +131,9 @@ impl CompleteTr if self.leaves.is_empty() { None } else { - Some((self.leaves.len() - 1).into()) + // this unwrap is safe because nobody is ever going to create a complete + // tree with more than 2^64 leaves + Some((self.leaves.len() - 1).try_into().unwrap()) } } @@ -158,7 +162,10 @@ impl CompleteTr fn checkpoint(&mut self, id: C, pos: Option) { self.checkpoints.insert( id, - Checkpoint::at_length(pos.map_or_else(|| 0, |p| usize::from(p) + 1)), + Checkpoint::at_length(pos.map_or_else( + || 0, + |p| usize::try_from(p).expect(MAX_COMPLETE_SIZE_ERROR) + 1, + )), ); if self.checkpoints.len() > self.max_checkpoints { self.drop_oldest_checkpoint(); @@ -226,7 +233,7 @@ impl Option<&H> { if self.marks.contains(&position) { self.leaves - .get(usize::from(position)) + .get(usize::try_from(position).expect(MAX_COMPLETE_SIZE_ERROR)) .and_then(|opt: &Option| opt.as_ref()) } else { None @@ -254,7 +261,7 @@ impl IncrementalWitness { /// Returns the position of the witnessed leaf node in the commitment tree. pub fn position(&self) -> Position { - Position::from(self.tree.size() - 1) + Position::try_from(self.tree.size() - 1) + .expect("Commitment trees with more than 2^64 leaves are unsupported.") } /// Finds the next "depth" of an unfilled subtree. diff --git a/shardtree/src/lib.rs b/shardtree/src/lib.rs index daa8c80..e5cdc8d 100644 --- a/shardtree/src/lib.rs +++ b/shardtree/src/lib.rs @@ -1,5 +1,5 @@ use bitflags::bitflags; -use core::convert::Infallible; +use core::convert::{Infallible, TryFrom}; use core::fmt::Debug; use core::marker::PhantomData; use core::ops::{Deref, Range}; @@ -1424,7 +1424,7 @@ impl ShardStore for Vec> { type Error = Infallible; fn get_shard(&self, shard_root: Address) -> Option<&LocatedPrunableTree> { - self.get(shard_root.index()) + self.get(usize::try_from(shard_root.index()).expect("SHARD_HEIGHT > 64 is unsupported")) } fn last_shard(&self) -> Option<&LocatedPrunableTree> { @@ -1440,7 +1440,8 @@ impl ShardStore for Vec> { root: Tree(Node::Nil), }) } - self[subtree_addr.index()] = subtree; + self[usize::try_from(subtree_addr.index()).expect("SHARD_HEIGHT > 64 is unsupported")] = + subtree; Ok(()) } @@ -1449,7 +1450,7 @@ impl ShardStore for Vec> { } fn truncate(&mut self, from: Address) -> Result<(), Self::Error> { - self.truncate(from.index()); + self.truncate(usize::try_from(from.index()).expect("SHARD_HEIGHT > 64 is unsupported")); Ok(()) } } @@ -2764,7 +2765,10 @@ mod tests { #[test] fn check_randomized_u64_ops( ops in proptest::collection::vec( - arb_operation((0..32u64).prop_map(SipHashable), 0usize..100), + arb_operation( + (0..32u64).prop_map(SipHashable), + (0u64..100).prop_map(Position::from) + ), 1..100 ) ) { @@ -2776,7 +2780,10 @@ mod tests { #[test] fn check_randomized_str_ops( ops in proptest::collection::vec( - arb_operation((97u8..123).prop_map(|c| char::from(c).to_string()), 0usize..100), + arb_operation( + (97u8..123).prop_map(|c| char::from(c).to_string()), + (0u64..100).prop_map(Position::from) + ), 1..100 ) ) {