Merge pull request #68 from nuttycom/u64_positions

Use `u64` for the internal state of `Position` and `Address::index`
This commit is contained in:
Kris Nuttycombe 2023-05-10 20:34:59 -06:00 committed by GitHub
commit 62acf235a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 123 additions and 87 deletions

View File

@ -1093,15 +1093,14 @@ mod tests {
where
G::Value: Hashable + Clone + Ord + Debug + 'static,
{
proptest::collection::vec(arb_operation(item_gen, 0..max_count), 0..max_count).prop_map(
|ops| {
let mut tree: BridgeTree<G::Value, usize, 8> = BridgeTree::new(10, 0);
for (i, op) in ops.into_iter().enumerate() {
apply_operation(&mut tree, op.map_checkpoint_id(|_| i));
}
tree
},
)
let pos_gen = (0..max_count).prop_map(|p| Position::try_from(p).unwrap());
proptest::collection::vec(arb_operation(item_gen, pos_gen), 0..max_count).prop_map(|ops| {
let mut tree: BridgeTree<G::Value, usize, 8> = BridgeTree::new(10, 0);
for (i, op) in ops.into_iter().enumerate() {
apply_operation(&mut tree, op.map_checkpoint_id(|_| i));
}
tree
})
}
proptest! {
@ -1171,11 +1170,11 @@ mod tests {
let mut t = BridgeTree::<String, usize, 7>::new(10, 0);
let mut to_unmark = vec![];
let mut has_witness = vec![];
for i in 0usize..100 {
for i in 0u64..100 {
let elem: String = format!("{},", i);
assert!(t.append(elem), "Append should succeed.");
if i % 5 == 0 {
t.checkpoint(i + 1);
t.checkpoint(usize::try_from(i).unwrap() + 1);
}
if i % 7 == 0 {
t.mark();
@ -1235,7 +1234,10 @@ mod tests {
#[test]
fn check_randomized_u64_ops(
ops in proptest::collection::vec(
arb_operation((0..32u64).prop_map(SipHashable), 0usize..100),
arb_operation(
(0..32u64).prop_map(SipHashable),
(0u64..100).prop_map(Position::from)
),
1..100
)
) {
@ -1247,7 +1249,10 @@ mod tests {
#[test]
fn check_randomized_str_ops(
ops in proptest::collection::vec(
arb_operation((97u8..123).prop_map(|c| char::from(c).to_string()), 0usize..100),
arb_operation(
(97u8..123).prop_map(|c| char::from(c).to_string()),
(0u64..100).prop_map(Position::from)
),
1..100
)
) {

View File

@ -18,6 +18,22 @@ and this project adheres to Rust's notion of
the `legacy-api` feature flag related to constructing witnesses for leaves
of a Merkle tree.
### Changed
- `Position` has been made isomorphic to `u64` via introduction of `From`
implementations between these types.
- `Address::index` now returns `u64` instead of `usize`
- The `expected_ommers` field of `FrontierError::PositionMismatch` now
has type `u8` instead of `usize`.
- `Address::context` now also uses `u64` instead of `usize` for when it returns
a range of index values.
### Removed
- The `From<usize>` impl for `Position` has been removed, as has
`From<Position> for usize`. In addition, `Add` and `Sub` impls
that previously allowed numeric operations between `usize` and
`Position` values have been removed in favor of similar operations
that instead allow computing with `u64`.
## [0.3.1] - 2023-02-28
### Fixed

View File

@ -12,7 +12,7 @@ use {std::collections::VecDeque, std::iter::repeat};
pub enum FrontierError {
/// An error representing that the number of ommers provided in frontier construction does not
/// the expected length of the ommers list given the position.
PositionMismatch { expected_ommers: usize },
PositionMismatch { expected_ommers: u8 },
/// An error representing that the position and/or list of ommers provided to frontier
/// construction would result in a frontier that exceeds the maximum statically allowed depth
/// of the tree. `depth` is the minimum tree depth that would be required in order for that
@ -43,7 +43,7 @@ impl<H> NonEmptyFrontier<H> {
/// Constructs a new frontier from its constituent parts.
pub fn from_parts(position: Position, leaf: H, ommers: Vec<H>) -> Result<Self, FrontierError> {
let expected_ommers = position.past_ommer_count();
if ommers.len() == expected_ommers {
if ommers.len() == expected_ommers.into() {
Ok(Self {
position,
leaf,
@ -89,25 +89,25 @@ impl<H: Hashable + Clone> NonEmptyFrontier<H> {
let new_root_level = self.position.root_level();
let mut carry = Some((prior_leaf, 0.into()));
let mut new_ommers = Vec::with_capacity(self.position.past_ommer_count());
let mut new_ommers = Vec::with_capacity(self.position.past_ommer_count().into());
for (addr, source) in prior_position.witness_addrs(new_root_level) {
if let Source::Past(i) = source {
if let Some((carry_ommer, carry_lvl)) = carry.as_ref() {
if *carry_lvl == addr.level() {
carry = Some((
H::combine(addr.level(), &self.ommers[i], carry_ommer),
H::combine(addr.level(), &self.ommers[usize::from(i)], carry_ommer),
addr.level() + 1,
))
} else {
// insert the carry at the first empty slot; then the rest of the
// ommers will remain unchanged
new_ommers.push(carry_ommer.clone());
new_ommers.push(self.ommers[i].clone());
new_ommers.push(self.ommers[usize::from(i)].clone());
carry = None;
}
} else {
// when there's no carry, just push on the ommer value
new_ommers.push(self.ommers[i].clone());
new_ommers.push(self.ommers[usize::from(i)].clone());
}
}
}
@ -136,7 +136,9 @@ impl<H: Hashable + Clone> NonEmptyFrontier<H> {
.fold(digest, |d, l| H::combine(l, &d, &H::empty_root(l)));
let res_digest = match source {
Source::Past(i) => H::combine(addr.level(), &self.ommers[i], &digest),
Source::Past(i) => {
H::combine(addr.level(), &self.ommers[usize::from(i)], &digest)
}
Source::Future => {
H::combine(addr.level(), &digest, &H::empty_root(addr.level()))
}
@ -162,7 +164,7 @@ impl<H: Hashable + Clone> NonEmptyFrontier<H> {
self.position()
.witness_addrs(depth.into())
.map(|(addr, source)| match source {
Source::Past(i) => Ok(self.ommers[i].clone()),
Source::Past(i) => Ok(self.ommers[usize::from(i)].clone()),
Source::Future => complement_nodes(addr).ok_or(addr),
})
.collect::<Result<Vec<_>, _>>()
@ -370,14 +372,13 @@ impl<H: Hashable + Clone, const DEPTH: u8> CommitmentTree<H, DEPTH> {
(f.leaf().clone(), None)
};
let upos: usize = f.position().into();
Self {
left: Some(left),
right,
parents: (1u8..DEPTH)
.into_iter()
.map(|i| {
if upos & (1 << i) == 0 {
if u64::from(f.position()) & (1 << i) == 0 {
None
} else {
ommers_iter.next()
@ -404,7 +405,7 @@ impl<H: Hashable + Clone, const DEPTH: u8> CommitmentTree<H, DEPTH> {
// If a frontier cannot be successfully constructed from the
// parts of a commitment tree, it is a programming error.
Frontier::from_parts((self.size() - 1).into(), leaf, ommers)
Frontier::from_parts((self.size() - 1).try_into().unwrap(), leaf, ommers)
.expect("Frontier should be constructable from CommitmentTree.")
}
}

View File

@ -51,7 +51,7 @@ impl<C> Retention<C> {
pub enum Source {
/// The sibling to the address can be derived from the incremental frontier
/// at the contained ommer index
Past(usize),
Past(u8),
/// The sibling to the address must be obtained from values discovered by
/// the addition of more nodes to the tree
Future,
@ -61,7 +61,7 @@ pub enum Source {
struct WitnessAddrsIter {
root_level: Level,
current: Address,
ommer_count: usize,
ommer_count: u8,
}
impl Iterator for WitnessAddrsIter {
@ -91,7 +91,7 @@ impl Iterator for WitnessAddrsIter {
/// A type representing the position of a leaf in a Merkle tree.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct Position(usize);
pub struct Position(u64);
impl Position {
/// Return whether the position is odd-valued.
@ -102,15 +102,17 @@ impl Position {
/// Returns the minimum possible level of the root of a binary tree containing at least
/// `self + 1` nodes.
pub fn root_level(&self) -> Level {
Level((usize::BITS - self.0.leading_zeros()) as u8)
Level((u64::BITS - self.0.leading_zeros()) as u8)
}
/// Returns the number of cousins and/or ommers required to construct an authentication
/// path to the root of a merkle tree that has `self + 1` nodes.
pub fn past_ommer_count(&self) -> usize {
pub fn past_ommer_count(&self) -> u8 {
(0..self.root_level().0)
.filter(|i| (self.0 >> i) & 0x1 == 1)
.count()
.try_into()
.unwrap() // this is safe because we're counting within a `u8` range
}
/// Returns whether the binary tree having `self` as the position of the rightmost leaf
@ -133,34 +135,34 @@ impl Position {
}
}
impl From<Position> for usize {
fn from(p: Position) -> usize {
impl From<Position> for u64 {
fn from(p: Position) -> Self {
p.0
}
}
impl From<Position> for u64 {
fn from(p: Position) -> Self {
p.0 as u64
impl From<u64> for Position {
fn from(sz: u64) -> Self {
Self(sz)
}
}
impl Add<usize> for Position {
impl Add<u64> for Position {
type Output = Position;
fn add(self, other: usize) -> Self {
fn add(self, other: u64) -> Self {
Position(self.0 + other)
}
}
impl AddAssign<usize> for Position {
fn add_assign(&mut self, other: usize) {
impl AddAssign<u64> for Position {
fn add_assign(&mut self, other: u64) {
self.0 += other
}
}
impl Sub<usize> for Position {
impl Sub<u64> for Position {
type Output = Position;
fn sub(self, other: usize) -> Self {
fn sub(self, other: u64) -> Self {
if self.0 < other {
panic!("position underflow");
}
@ -168,16 +170,17 @@ impl Sub<usize> for Position {
}
}
impl From<usize> for Position {
fn from(sz: usize) -> Self {
Self(sz)
impl TryFrom<usize> for Position {
type Error = TryFromIntError;
fn try_from(sz: usize) -> Result<Self, Self::Error> {
<u64>::try_from(sz).map(Self)
}
}
impl TryFrom<u64> for Position {
impl TryFrom<Position> for usize {
type Error = TryFromIntError;
fn try_from(sz: u64) -> Result<Self, Self::Error> {
<usize>::try_from(sz).map(Self)
fn try_from(p: Position) -> Result<Self, Self::Error> {
<usize>::try_from(p.0)
}
}
@ -216,6 +219,7 @@ impl From<Level> for u8 {
}
}
// Supporting sub-8-bit platforms isn't on our
impl From<Level> for usize {
fn from(level: Level) -> usize {
level.0 as usize
@ -245,12 +249,12 @@ impl Sub<u8> for Level {
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Address {
level: Level,
index: usize,
index: u64,
}
impl Address {
/// Construct a new address from its constituent parts.
pub fn from_parts(level: Level, index: usize) -> Self {
pub fn from_parts(level: Level, index: u64) -> Self {
Address { level, index }
}
@ -272,7 +276,7 @@ impl Address {
/// The index of an address is defined as the number of subtrees with their roots
/// at the address's level that appear to the left of this address in a binary
/// tree of arbitrary height > level * 2 + 1.
pub fn index(&self) -> usize {
pub fn index(&self) -> u64 {
self.index
}
@ -354,7 +358,7 @@ impl Address {
/// than or equal to that of this address) or the range of indices of root addresses of
/// subtrees with roots at the given level contained within the tree with its root at this
/// address otherwise.
pub fn context(&self, level: Level) -> Either<Address, Range<usize>> {
pub fn context(&self, level: Level) -> Either<Address, Range<u64>> {
if level >= self.level {
Either::Left(Address {
level,
@ -541,7 +545,7 @@ pub(crate) mod tests {
assert!(Position(3).is_complete_subtree(Level(2)));
assert!(!Position(4).is_complete_subtree(Level(2)));
assert!(Position(7).is_complete_subtree(Level(3)));
assert!(Position(u32::MAX as usize).is_complete_subtree(Level(32)));
assert!(Position(u32::MAX as u64).is_complete_subtree(Level(32)));
}
#[test]

View File

@ -147,11 +147,11 @@ pub fn append_str<C>(x: &str, retention: Retention<C>) -> Operation<String, C> {
Operation::Append(x.to_string(), retention)
}
pub fn unmark<H, C>(pos: usize) -> Operation<H, C> {
pub fn unmark<H, C>(pos: u64) -> Operation<H, C> {
Operation::Unmark(Position::from(pos))
}
pub fn witness<H, C>(pos: usize, depth: usize) -> Operation<H, C> {
pub fn witness<H, C>(pos: u64, depth: usize) -> Operation<H, C> {
Operation::Witness(Position::from(pos), depth)
}
@ -218,7 +218,7 @@ pub fn arb_retention() -> impl Strategy<Value = Retention<()>> {
pub fn arb_operation<G: Strategy + Clone>(
item_gen: G,
pos_gen: impl Strategy<Value = usize> + Clone,
pos_gen: impl Strategy<Value = Position> + Clone,
) -> impl Strategy<Value = Operation<G::Value, ()>>
where
G::Value: Clone + 'static,
@ -230,17 +230,11 @@ where
Just(Operation::MarkedPositions),
],
Just(Operation::GarbageCollect),
pos_gen
.clone()
.prop_map(|i| Operation::MarkedLeaf(Position::from(i))),
pos_gen
.clone()
.prop_map(|i| Operation::Unmark(Position::from(i))),
pos_gen.clone().prop_map(Operation::MarkedLeaf),
pos_gen.clone().prop_map(Operation::Unmark),
Just(Operation::Checkpoint(())),
Just(Operation::Rewind),
pos_gen
.prop_flat_map(|i| (0usize..10)
.prop_map(move |depth| Operation::Witness(Position::from(i), depth))),
pos_gen.prop_flat_map(|i| (0usize..10).prop_map(move |depth| Operation::Witness(i, depth))),
]
}
@ -289,19 +283,20 @@ pub fn check_operations<H: Hashable + Ord + Clone, C: Clone, T: Tree<H, C>>(
} else {
prop_assert_eq!(
tree_size,
tree.current_position().map_or(0, |p| usize::from(p) + 1)
tree.current_position()
.map_or(0, |p| usize::try_from(p).unwrap() + 1)
);
}
}
CurrentPosition => {
if let Some(pos) = tree.current_position() {
prop_assert!(tree_size > 0);
prop_assert_eq!(tree_size - 1, pos.into());
prop_assert_eq!(tree_size - 1, pos.try_into().unwrap());
}
}
MarkedLeaf(position) => {
if tree.get_marked_leaf(*position).is_some() {
prop_assert!(<usize>::from(*position) < tree_size);
prop_assert!(<usize>::try_from(*position).unwrap() < tree_size);
}
}
Unmark(position) => {
@ -322,7 +317,7 @@ pub fn check_operations<H: Hashable + Ord + Clone, C: Clone, T: Tree<H, C>>(
}
Witness(position, depth) => {
if let Some(path) = tree.witness(*position, *depth) {
let value: H = tree_values[<usize>::from(*position)].clone();
let value: H = tree_values[<usize>::try_from(*position).unwrap()].clone();
let tree_root = tree.root(*depth);
if tree_checkpoints.len() >= *depth {
@ -361,7 +356,7 @@ pub fn compute_root_from_witness<H: Hashable>(value: H, position: Position, path
for (i, v) in path
.iter()
.enumerate()
.map(|(i, v)| (((<usize>::from(position) >> i) & 1) == 1, v))
.map(|(i, v)| (((<u64>::from(position) >> i) & 1) == 1, v))
{
if i {
cur = H::combine(lvl, v, &cur);
@ -719,7 +714,7 @@ pub fn check_witnesses<T: Tree<String, usize> + std::fmt::Debug, F: Fn(usize) ->
.map(|c| Append(c.to_string(), Marked))
.chain(Some(Append('m'.to_string(), Ephemeral)))
.chain(Some(Append('n'.to_string(), Ephemeral)))
.chain(Some(Witness(11usize.into(), 0)))
.chain(Some(Witness(11u64.into(), 0)))
.collect::<Vec<_>>();
let mut tree = new_tree(100);
@ -770,7 +765,7 @@ pub fn check_witnesses<T: Tree<String, usize> + std::fmt::Debug, F: Fn(usize) ->
is_marked: false,
},
),
Witness(3usize.into(), 5),
Witness(3u64.into(), 5),
];
let mut tree = new_tree(100);
assert_eq!(
@ -963,23 +958,23 @@ pub fn check_rewind_remove_mark<T: Tree<String, usize>, F: Fn(usize) -> T>(new_t
tree.append("e".to_string(), Retention::Marked);
tree.checkpoint(1);
assert!(tree.rewind());
assert!(tree.remove_mark(0usize.into()));
assert!(tree.remove_mark(0u64.into()));
// use a maximum number of checkpoints of 1
let mut tree = new_tree(1);
assert!(tree.append("e".to_string(), Retention::Marked));
assert!(tree.checkpoint(1));
assert!(tree.marked_positions().contains(&0usize.into()));
assert!(tree.marked_positions().contains(&0u64.into()));
assert!(tree.append("f".to_string(), Retention::Ephemeral));
// simulate a spend of `e` at `f`
assert!(tree.remove_mark(0usize.into()));
assert!(tree.remove_mark(0u64.into()));
// even though the mark has been staged for removal, it's not gone yet
assert!(tree.marked_positions().contains(&0usize.into()));
assert!(tree.marked_positions().contains(&0u64.into()));
assert!(tree.checkpoint(2));
// the newest checkpoint will have caused the oldest to roll off, and
// so the forgotten node will be unmarked
assert!(!tree.marked_positions().contains(&0usize.into()));
assert!(!tree.remove_mark(0usize.into()));
assert!(!tree.marked_positions().contains(&0u64.into()));
assert!(!tree.remove_mark(0u64.into()));
// The following check_operations tests cover errors where the
// test framework itself previously did not correctly handle

View File

@ -4,6 +4,8 @@ use std::collections::{BTreeMap, BTreeSet};
use crate::{testing::Tree, Hashable, Level, Position, Retention};
const MAX_COMPLETE_SIZE_ERROR: &str = "Positions of a `CompleteTree` must fit into the platform word size, because larger complete trees are not representable.";
pub(crate) fn root<H: Hashable + Clone>(leaves: &[H], depth: u8) -> H {
let empty_leaf = H::empty_leaf();
let mut leaves = leaves
@ -129,7 +131,9 @@ impl<H: Hashable, C: Clone + Ord + core::fmt::Debug, const DEPTH: u8> CompleteTr
if self.leaves.is_empty() {
None
} else {
Some((self.leaves.len() - 1).into())
// this unwrap is safe because nobody is ever going to create a complete
// tree with more than 2^64 leaves
Some((self.leaves.len() - 1).try_into().unwrap())
}
}
@ -158,7 +162,10 @@ impl<H: Hashable, C: Clone + Ord + core::fmt::Debug, const DEPTH: u8> CompleteTr
fn checkpoint(&mut self, id: C, pos: Option<Position>) {
self.checkpoints.insert(
id,
Checkpoint::at_length(pos.map_or_else(|| 0, |p| usize::from(p) + 1)),
Checkpoint::at_length(pos.map_or_else(
|| 0,
|p| usize::try_from(p).expect(MAX_COMPLETE_SIZE_ERROR) + 1,
)),
);
if self.checkpoints.len() > self.max_checkpoints {
self.drop_oldest_checkpoint();
@ -226,7 +233,7 @@ impl<H: Hashable + PartialEq + Clone, C: Ord + Clone + core::fmt::Debug, const D
fn get_marked_leaf(&self, position: Position) -> Option<&H> {
if self.marks.contains(&position) {
self.leaves
.get(usize::from(position))
.get(usize::try_from(position).expect(MAX_COMPLETE_SIZE_ERROR))
.and_then(|opt: &Option<H>| opt.as_ref())
} else {
None
@ -254,7 +261,7 @@ impl<H: Hashable + PartialEq + Clone, C: Ord + Clone + core::fmt::Debug, const D
} else {
let mut path = vec![];
let mut leaf_idx: usize = position.into();
let mut leaf_idx: usize = position.try_into().expect(MAX_COMPLETE_SIZE_ERROR);
for bit in 0..DEPTH {
leaf_idx ^= 1 << bit;
path.push(if leaf_idx < leaves_len {

View File

@ -85,7 +85,8 @@ impl<H, const DEPTH: u8> IncrementalWitness<H, DEPTH> {
/// Returns the position of the witnessed leaf node in the commitment tree.
pub fn position(&self) -> Position {
Position::from(self.tree.size() - 1)
Position::try_from(self.tree.size() - 1)
.expect("Commitment trees with more than 2^64 leaves are unsupported.")
}
/// Finds the next "depth" of an unfilled subtree.

View File

@ -1,5 +1,5 @@
use bitflags::bitflags;
use core::convert::Infallible;
use core::convert::{Infallible, TryFrom};
use core::fmt::Debug;
use core::marker::PhantomData;
use core::ops::{Deref, Range};
@ -1424,7 +1424,7 @@ impl<H> ShardStore<H> for Vec<LocatedPrunableTree<H>> {
type Error = Infallible;
fn get_shard(&self, shard_root: Address) -> Option<&LocatedPrunableTree<H>> {
self.get(shard_root.index())
self.get(usize::try_from(shard_root.index()).expect("SHARD_HEIGHT > 64 is unsupported"))
}
fn last_shard(&self) -> Option<&LocatedPrunableTree<H>> {
@ -1440,7 +1440,8 @@ impl<H> ShardStore<H> for Vec<LocatedPrunableTree<H>> {
root: Tree(Node::Nil),
})
}
self[subtree_addr.index()] = subtree;
self[usize::try_from(subtree_addr.index()).expect("SHARD_HEIGHT > 64 is unsupported")] =
subtree;
Ok(())
}
@ -1449,7 +1450,7 @@ impl<H> ShardStore<H> for Vec<LocatedPrunableTree<H>> {
}
fn truncate(&mut self, from: Address) -> Result<(), Self::Error> {
self.truncate(from.index());
self.truncate(usize::try_from(from.index()).expect("SHARD_HEIGHT > 64 is unsupported"));
Ok(())
}
}
@ -2764,7 +2765,10 @@ mod tests {
#[test]
fn check_randomized_u64_ops(
ops in proptest::collection::vec(
arb_operation((0..32u64).prop_map(SipHashable), 0usize..100),
arb_operation(
(0..32u64).prop_map(SipHashable),
(0u64..100).prop_map(Position::from)
),
1..100
)
) {
@ -2776,7 +2780,10 @@ mod tests {
#[test]
fn check_randomized_str_ops(
ops in proptest::collection::vec(
arb_operation((97u8..123).prop_map(|c| char::from(c).to_string()), 0usize..100),
arb_operation(
(97u8..123).prop_map(|c| char::from(c).to_string()),
(0u64..100).prop_map(Position::from)
),
1..100
)
) {