Clean deprecated crate.
This commit is contained in:
parent
2c99d1edb9
commit
079c55e881
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
name = "hashdb"
|
||||
version = "0.3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "trait for hash-keyed databases."
|
||||
repository = "https://github.com/paritytech/parity-common"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = []
|
|
@ -1,7 +0,0 @@
|
|||
# HashDB
|
||||
`HashDB` defines a common interface for databases of byte-slices keyed to their hash. It is generic over hash type through the `Hasher` trait.
|
||||
|
||||
The `Hasher` trait can be used in a `no_std` context.
|
||||
|
||||
|
||||
**This crate is deprecated in favor of ['hash-db'](https://crates.io/crates/hash-db)**
|
|
@ -1,85 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Database of byte-slices keyed to their hash.
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
extern crate core;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(feature = "std")]
|
||||
use std::collections::HashMap;
|
||||
use core::{fmt::Debug, hash::Hash};
|
||||
|
||||
/// Trait describing an object that can hash a slice of bytes. Used to abstract
|
||||
/// other types over the hashing algorithm. Defines a single `hash` method and an
|
||||
/// `Out` associated type with the necessary bounds.
|
||||
pub trait Hasher: Sync + Send {
|
||||
/// The output type of the `Hasher`
|
||||
type Out: AsRef<[u8]> + AsMut<[u8]> + Default + Debug + PartialEq + Eq + Hash + Send + Sync + Clone + Copy;
|
||||
/// What to use to build `HashMap`s with this `Hasher`
|
||||
type StdHasher: Sync + Send + Default + core::hash::Hasher;
|
||||
/// The length in bytes of the `Hasher` output
|
||||
const LENGTH: usize;
|
||||
|
||||
/// Compute the hash of the provided slice of bytes returning the `Out` type of the `Hasher`
|
||||
fn hash(x: &[u8]) -> Self::Out;
|
||||
}
|
||||
|
||||
/// Trait modelling datastore keyed by a hash defined by the `Hasher`.
|
||||
#[cfg(feature = "std")]
|
||||
pub trait HashDB<H: Hasher, T>: Send + Sync + AsHashDB<H, T> {
|
||||
/// Get the keys in the database together with number of underlying references.
|
||||
fn keys(&self) -> HashMap<H::Out, i32>;
|
||||
|
||||
/// Look up a given hash into the bytes that hash to it, returning None if the
|
||||
/// hash is not known.
|
||||
fn get(&self, key: &H::Out) -> Option<T>;
|
||||
|
||||
/// Check for the existance of a hash-key.
|
||||
fn contains(&self, key: &H::Out) -> bool;
|
||||
|
||||
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
|
||||
/// are counted and the equivalent number of `remove()`s must be performed before the data
|
||||
/// is considered dead.
|
||||
fn insert(&mut self, value: &[u8]) -> H::Out;
|
||||
|
||||
/// Like `insert()`, except you provide the key and the data is all moved.
|
||||
fn emplace(&mut self, key: H::Out, value: T);
|
||||
|
||||
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
|
||||
/// happen without the data being eventually being inserted into the DB. It can be "owed" more than once.
|
||||
fn remove(&mut self, key: &H::Out);
|
||||
}
|
||||
|
||||
/// Upcast trait.
|
||||
#[cfg(feature = "std")]
|
||||
pub trait AsHashDB<H: Hasher, T> {
|
||||
/// Perform upcast to HashDB for anything that derives from HashDB.
|
||||
fn as_hashdb(&self) -> &HashDB<H, T>;
|
||||
/// Perform mutable upcast to HashDB for anything that derives from HashDB.
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H, T>;
|
||||
}
|
||||
|
||||
// NOTE: There used to be a `impl<T> AsHashDB for T` but that does not work with generics. See https://stackoverflow.com/questions/48432842/implementing-a-trait-for-reference-and-non-reference-types-causes-conflicting-im
|
||||
// This means we need concrete impls of AsHashDB in several places, which somewhat defeats the point of the trait.
|
||||
#[cfg(feature = "std")]
|
||||
impl<'a, H: Hasher, T> AsHashDB<H, T> for &'a mut HashDB<H, T> {
|
||||
fn as_hashdb(&self) -> &HashDB<H, T> { &**self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H, T> { &mut **self }
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
[package]
|
||||
name = "memorydb"
|
||||
version = "0.3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "in-memory implementation of hashdb"
|
||||
repository = "https://github.com/paritytech/parity-common"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
heapsize = "0.4"
|
||||
hashdb = { version = "0.3.0", path = "../hashdb" }
|
||||
plain_hasher = { version = "0.2", path = "../plain_hasher", default-features = false }
|
||||
rlp = { version = "0.3.0", path = "../rlp", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
tiny-keccak = "1.4.2"
|
||||
ethereum-types = "0.4"
|
||||
keccak-hasher = { version = "0.1", path = "../test-support/keccak-hasher" }
|
||||
keccak-hash = { version = "0.1", path = "../keccak-hash" }
|
|
@ -1,3 +0,0 @@
|
|||
MemoryDB is a reference counted memory-based [`HashDB`](https://github.com/paritytech/parity-common/tree/master/hashdb) implementation backed by a `HashMap`.
|
||||
|
||||
**This crate is deprecated in favor of ['memory-db'](https://crates.io/crates/hash-db)**
|
|
@ -1,79 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
extern crate hashdb;
|
||||
extern crate memorydb;
|
||||
extern crate keccak_hasher;
|
||||
extern crate keccak_hash;
|
||||
extern crate rlp;
|
||||
extern crate test;
|
||||
|
||||
use memorydb::MemoryDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use keccak_hash::KECCAK_NULL_RLP;
|
||||
use rlp::NULL_RLP;
|
||||
use test::{Bencher, black_box};
|
||||
|
||||
|
||||
#[bench]
|
||||
fn instantiation(b: &mut Bencher) {
|
||||
b.iter(|| {
|
||||
MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn compare_to_null_embedded_in_struct(b: &mut Bencher) {
|
||||
struct X {a_hash: <KeccakHasher as Hasher>::Out};
|
||||
let x = X {a_hash: KeccakHasher::hash(&NULL_RLP)};
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
|
||||
b.iter(|| {
|
||||
black_box(key == x.a_hash);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn compare_to_null_in_const(b: &mut Bencher) {
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
|
||||
b.iter(|| {
|
||||
black_box(key == KECCAK_NULL_RLP);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn contains_with_non_null_key(b: &mut Bencher) {
|
||||
let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
m.insert(b"abcefghijklmnopqrstuvxyz");
|
||||
b.iter(|| {
|
||||
m.contains(&key);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn contains_with_null_key(b: &mut Bencher) {
|
||||
let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
let null_key = KeccakHasher::hash(&NULL_RLP);
|
||||
m.insert(b"abcefghijklmnopqrstuvxyz");
|
||||
b.iter(|| {
|
||||
m.contains(&null_key);
|
||||
})
|
||||
}
|
|
@ -1,397 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Reference-counted memory-based `HashDB` implementation.
|
||||
extern crate hashdb;
|
||||
extern crate heapsize;
|
||||
extern crate rlp;
|
||||
#[cfg(test)] extern crate keccak_hasher;
|
||||
#[cfg(test)] extern crate tiny_keccak;
|
||||
#[cfg(test)] extern crate ethereum_types;
|
||||
|
||||
use hashdb::{HashDB, Hasher as KeyHasher, AsHashDB};
|
||||
use heapsize::HeapSizeOf;
|
||||
use rlp::NULL_RLP;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::hash;
|
||||
use std::mem;
|
||||
|
||||
// Backing `HashMap` parametrized with a `Hasher` for the keys `Hasher::Out` and the `Hasher::StdHasher`
|
||||
// as hash map builder.
|
||||
type FastMap<H, T> = HashMap<<H as KeyHasher>::Out, T, hash::BuildHasherDefault<<H as KeyHasher>::StdHasher>>;
|
||||
|
||||
/// Reference-counted memory-based `HashDB` implementation.
|
||||
///
|
||||
/// Use `new()` to create a new database. Insert items with `insert()`, remove items
|
||||
/// with `remove()`, check for existence with `contains()` and lookup a hash to derive
|
||||
/// the data with `get()`. Clear with `clear()` and purge the portions of the data
|
||||
/// that have no references with `purge()`.
|
||||
///
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
///
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
/// fn main() {
|
||||
/// let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
/// let d = "Hello world!".as_bytes();
|
||||
///
|
||||
/// let k = m.insert(d);
|
||||
/// assert!(m.contains(&k));
|
||||
/// assert_eq!(m.get(&k).unwrap(), d);
|
||||
///
|
||||
/// m.insert(d);
|
||||
/// assert!(m.contains(&k));
|
||||
///
|
||||
/// m.remove(&k);
|
||||
/// assert!(m.contains(&k));
|
||||
///
|
||||
/// m.remove(&k);
|
||||
/// assert!(!m.contains(&k));
|
||||
///
|
||||
/// m.remove(&k);
|
||||
/// assert!(!m.contains(&k));
|
||||
///
|
||||
/// m.insert(d);
|
||||
/// assert!(!m.contains(&k));
|
||||
|
||||
/// m.insert(d);
|
||||
/// assert!(m.contains(&k));
|
||||
/// assert_eq!(m.get(&k).unwrap(), d);
|
||||
///
|
||||
/// m.remove(&k);
|
||||
/// assert!(!m.contains(&k));
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct MemoryDB<H: KeyHasher, T> {
|
||||
data: FastMap<H, (T, i32)>,
|
||||
hashed_null_node: H::Out,
|
||||
null_node_data: T,
|
||||
}
|
||||
|
||||
impl<'a, H, T> Default for MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
H::Out: HeapSizeOf,
|
||||
T: From<&'a [u8]> + Clone
|
||||
{
|
||||
fn default() -> Self { Self::new() }
|
||||
}
|
||||
|
||||
impl<'a, H, T> MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
H::Out: HeapSizeOf,
|
||||
T: From<&'a [u8]> + Clone,
|
||||
{
|
||||
/// Create a new instance of the memory DB.
|
||||
pub fn new() -> Self {
|
||||
MemoryDB::from_null_node(&NULL_RLP, NULL_RLP.as_ref().into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<H, T> MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
H::Out: HeapSizeOf,
|
||||
T: Default,
|
||||
{
|
||||
/// Remove an element and delete it from storage if reference count reaches zero.
|
||||
/// If the value was purged, return the old value.
|
||||
pub fn remove_and_purge(&mut self, key: &<H as KeyHasher>::Out) -> Option<T> {
|
||||
if key == &self.hashed_null_node {
|
||||
return None;
|
||||
}
|
||||
match self.data.entry(key.clone()) {
|
||||
Entry::Occupied(mut entry) =>
|
||||
if entry.get().1 == 1 {
|
||||
Some(entry.remove().0)
|
||||
} else {
|
||||
entry.get_mut().1 -= 1;
|
||||
None
|
||||
},
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((T::default(), -1)); // FIXME: shouldn't it be purged?
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: KeyHasher, T: Clone> MemoryDB<H, T> {
|
||||
|
||||
/// Create a new `MemoryDB` from a given null key/data
|
||||
pub fn from_null_node(null_key: &[u8], null_node_data: T) -> Self {
|
||||
MemoryDB {
|
||||
data: FastMap::<H,_>::default(),
|
||||
hashed_null_node: H::hash(null_key),
|
||||
null_node_data,
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all data from the database.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```rust
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
///
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
/// let hello_bytes = "Hello world!".as_bytes();
|
||||
/// let hash = m.insert(hello_bytes);
|
||||
/// assert!(m.contains(&hash));
|
||||
/// m.clear();
|
||||
/// assert!(!m.contains(&hash));
|
||||
/// }
|
||||
/// ```
|
||||
pub fn clear(&mut self) {
|
||||
self.data.clear();
|
||||
}
|
||||
|
||||
/// Purge all zero-referenced data from the database.
|
||||
pub fn purge(&mut self) {
|
||||
self.data.retain(|_, &mut (_, rc)| rc != 0);
|
||||
}
|
||||
|
||||
/// Return the internal map of hashes to data, clearing the current state.
|
||||
pub fn drain(&mut self) -> FastMap<H, (T, i32)> {
|
||||
mem::replace(&mut self.data, FastMap::<H,_>::default())
|
||||
}
|
||||
|
||||
/// Grab the raw information associated with a key. Returns None if the key
|
||||
/// doesn't exist.
|
||||
///
|
||||
/// Even when Some is returned, the data is only guaranteed to be useful
|
||||
/// when the refs > 0.
|
||||
pub fn raw(&self, key: &<H as KeyHasher>::Out) -> Option<(T, i32)> {
|
||||
if key == &self.hashed_null_node {
|
||||
return Some((self.null_node_data.clone(), 1));
|
||||
}
|
||||
self.data.get(key).map(|(value, count)| (value.clone(), *count))
|
||||
}
|
||||
|
||||
/// Consolidate all the entries of `other` into `self`.
|
||||
pub fn consolidate(&mut self, mut other: Self) {
|
||||
for (key, (value, rc)) in other.drain() {
|
||||
match self.data.entry(key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
if entry.get().1 < 0 {
|
||||
entry.get_mut().0 = value;
|
||||
}
|
||||
|
||||
entry.get_mut().1 += rc;
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((value, rc));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<H, T> MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
H::Out: HeapSizeOf,
|
||||
T: HeapSizeOf,
|
||||
{
|
||||
/// Returns the size of allocated heap memory
|
||||
pub fn mem_used(&self) -> usize {
|
||||
self.data.heap_size_of_children()
|
||||
}
|
||||
}
|
||||
|
||||
impl<H, T> HashDB<H, T> for MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
T: Default + PartialEq<T> + for<'a> From<&'a [u8]> + Send + Sync + Clone,
|
||||
{
|
||||
fn keys(&self) -> HashMap<H::Out, i32> {
|
||||
self.data.iter()
|
||||
.filter_map(|(k, v)| if v.1 != 0 {
|
||||
Some((*k, v.1))
|
||||
} else {
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get(&self, key: &H::Out) -> Option<T> {
|
||||
if key == &self.hashed_null_node {
|
||||
return Some(self.null_node_data.clone());
|
||||
}
|
||||
|
||||
match self.data.get(key) {
|
||||
Some(&(ref d, rc)) if rc > 0 => Some(d.clone()),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(&self, key: &H::Out) -> bool {
|
||||
if key == &self.hashed_null_node {
|
||||
return true;
|
||||
}
|
||||
|
||||
match self.data.get(key) {
|
||||
Some(&(_, x)) if x > 0 => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn emplace(&mut self, key:H::Out, value: T) {
|
||||
if value == self.null_node_data {
|
||||
return;
|
||||
}
|
||||
|
||||
match self.data.entry(key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
|
||||
if *rc <= 0 {
|
||||
*old_value = value;
|
||||
}
|
||||
*rc += 1;
|
||||
},
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((value, 1));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn insert(&mut self, value: &[u8]) -> H::Out {
|
||||
if value == &NULL_RLP {
|
||||
return self.hashed_null_node.clone();
|
||||
}
|
||||
let key = H::hash(value);
|
||||
match self.data.entry(key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
|
||||
if *rc <= 0 {
|
||||
*old_value = value.into();
|
||||
}
|
||||
*rc += 1;
|
||||
},
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((value.into(), 1));
|
||||
},
|
||||
}
|
||||
key
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &H::Out) {
|
||||
if key == &self.hashed_null_node {
|
||||
return;
|
||||
}
|
||||
|
||||
match self.data.entry(*key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let &mut (_, ref mut rc) = entry.get_mut();
|
||||
*rc -= 1;
|
||||
},
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((T::default(), -1));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<H, T> AsHashDB<H, T> for MemoryDB<H, T>
|
||||
where
|
||||
H: KeyHasher,
|
||||
T: Default + PartialEq<T> + for<'a> From<&'a[u8]> + Send + Sync + Clone,
|
||||
{
|
||||
fn as_hashdb(&self) -> &HashDB<H, T> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H, T> { self }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tiny_keccak::Keccak;
|
||||
use ethereum_types::H256;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
|
||||
#[test]
|
||||
fn memorydb_remove_and_purge() {
|
||||
let hello_bytes = b"Hello world!";
|
||||
let mut hello_key = [0;32];
|
||||
Keccak::keccak256(hello_bytes, &mut hello_key);
|
||||
let hello_key = H256(hello_key);
|
||||
|
||||
let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
m.remove(&hello_key);
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, -1);
|
||||
m.purge();
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, -1);
|
||||
m.insert(hello_bytes);
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, 0);
|
||||
m.purge();
|
||||
assert_eq!(m.raw(&hello_key), None);
|
||||
|
||||
let mut m = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
assert!(m.remove_and_purge(&hello_key).is_none());
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, -1);
|
||||
m.insert(hello_bytes);
|
||||
m.insert(hello_bytes);
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, 1);
|
||||
assert_eq!(&*m.remove_and_purge(&hello_key).unwrap(), hello_bytes);
|
||||
assert_eq!(m.raw(&hello_key), None);
|
||||
assert!(m.remove_and_purge(&hello_key).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn consolidate() {
|
||||
let mut main = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
let mut other = MemoryDB::<KeccakHasher, Vec<u8>>::new();
|
||||
let remove_key = other.insert(b"doggo");
|
||||
main.remove(&remove_key);
|
||||
|
||||
let insert_key = other.insert(b"arf");
|
||||
main.emplace(insert_key, "arf".as_bytes().to_vec());
|
||||
|
||||
let negative_remove_key = other.insert(b"negative");
|
||||
other.remove(&negative_remove_key); // ref cnt: 0
|
||||
other.remove(&negative_remove_key); // ref cnt: -1
|
||||
main.remove(&negative_remove_key); // ref cnt: -1
|
||||
|
||||
main.consolidate(other);
|
||||
|
||||
let overlay = main.drain();
|
||||
|
||||
assert_eq!(overlay.get(&remove_key).unwrap(), &("doggo".as_bytes().to_vec(), 0));
|
||||
assert_eq!(overlay.get(&insert_key).unwrap(), &("arf".as_bytes().to_vec(), 2));
|
||||
assert_eq!(overlay.get(&negative_remove_key).unwrap(), &("negative".as_bytes().to_vec(), -2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_works() {
|
||||
let mut db = MemoryDB::<KeccakHasher, Vec<u8>>::default();
|
||||
let hashed_null_node = KeccakHasher::hash(&NULL_RLP);
|
||||
assert_eq!(db.insert(&NULL_RLP), hashed_null_node);
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
[package]
|
||||
name = "patricia-trie"
|
||||
version = "0.3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "Merkle-Patricia Trie generic over key hasher and node encoding, deprecated please use trie-db crate"
|
||||
repository = "https://github.com/paritytech/parity-common"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
elastic-array = "0.10"
|
||||
log = "0.3"
|
||||
rand = "0.4"
|
||||
hashdb = { version = "0.3.0", path = "../hashdb" }
|
||||
parity-bytes = { version = "0.1", path = "../parity-bytes" }
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.5"
|
||||
ethereum-types = "0.4"
|
||||
keccak-hash = { version = "0.1", path = "../keccak-hash" }
|
||||
memorydb = { version = "0.3.0", path = "../memorydb", default-features = false }
|
||||
rlp = { version = "0.3.0", path = "../rlp", default-features = false }
|
||||
trie-standardmap = { version = "0.1", path = "../trie-standardmap", default-features = false }
|
||||
triehash = { version = "0.3.0", path = "../triehash", default-features = false }
|
||||
parity-bytes = { version = "0.1.0", path = "../parity-bytes" }
|
||||
patricia-trie-ethereum = { version = "0.1", path = "../test-support/patricia-trie-ethereum" }
|
||||
keccak-hasher = { version = "0.1", path = "../test-support/keccak-hasher" }
|
|
@ -1,214 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
extern crate parity_bytes;
|
||||
extern crate ethereum_types;
|
||||
extern crate memorydb;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate keccak_hasher;
|
||||
extern crate keccak_hash;
|
||||
extern crate trie_standardmap;
|
||||
extern crate hashdb;
|
||||
|
||||
use parity_bytes::Bytes;
|
||||
use ethereum_types::H256;
|
||||
use keccak_hash::keccak;
|
||||
use memorydb::MemoryDB;
|
||||
use test::{Bencher, black_box};
|
||||
use trie::{DBValue, TrieMut, Trie};
|
||||
use trie_standardmap::{Alphabet, ValueMode, StandardMap};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
|
||||
fn random_word(alphabet: &[u8], min_count: usize, diff_count: usize, seed: &mut H256) -> Vec<u8> {
|
||||
assert!(min_count + diff_count <= 32);
|
||||
*seed = keccak(&seed);
|
||||
let r = min_count + (seed[31] as usize % (diff_count + 1));
|
||||
let mut ret: Vec<u8> = Vec::with_capacity(r);
|
||||
for i in 0..r {
|
||||
ret.push(alphabet[seed[i] as usize % alphabet.len()]);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
fn random_bytes(min_count: usize, diff_count: usize, seed: &mut H256) -> Vec<u8> {
|
||||
assert!(min_count + diff_count <= 32);
|
||||
*seed = keccak(&seed);
|
||||
let r = min_count + (seed[31] as usize % (diff_count + 1));
|
||||
seed[0..r].to_vec()
|
||||
}
|
||||
|
||||
fn random_value(seed: &mut H256) -> Bytes {
|
||||
*seed = keccak(&seed);
|
||||
match seed[0] % 2 {
|
||||
1 => vec![seed[31];1],
|
||||
_ => seed.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_32_mir_1k(b: &mut Bencher) {
|
||||
let st = StandardMap {
|
||||
alphabet: Alphabet::All,
|
||||
min_key: 32,
|
||||
journal_key: 0,
|
||||
value_mode: ValueMode::Mirror,
|
||||
count: 1000,
|
||||
};
|
||||
let d = st.make();
|
||||
b.iter(&mut ||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
#[bench]
|
||||
fn trie_iter(b: &mut Bencher) {
|
||||
let st = StandardMap {
|
||||
alphabet: Alphabet::All,
|
||||
min_key: 32,
|
||||
journal_key: 0,
|
||||
value_mode: ValueMode::Mirror,
|
||||
count: 1000,
|
||||
};
|
||||
let d = st.make();
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
b.iter(&mut ||{
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
for n in t.iter().unwrap() {
|
||||
black_box(n).unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_32_ran_1k(b: &mut Bencher) {
|
||||
let st = StandardMap {
|
||||
alphabet: Alphabet::All,
|
||||
min_key: 32,
|
||||
journal_key: 0,
|
||||
value_mode: ValueMode::Random,
|
||||
count: 1000,
|
||||
};
|
||||
let d = st.make();
|
||||
let mut r = H256::new();
|
||||
b.iter(&mut ||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
r = t.root().clone();
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_six_high(b: &mut Bencher) {
|
||||
let mut d: Vec<(Bytes, Bytes)> = Vec::new();
|
||||
let mut seed = H256::new();
|
||||
for _ in 0..1000 {
|
||||
let k = random_bytes(6, 0, &mut seed);
|
||||
let v = random_value(&mut seed);
|
||||
d.push((k, v))
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_six_mid(b: &mut Bencher) {
|
||||
let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_";
|
||||
let mut d: Vec<(Bytes, Bytes)> = Vec::new();
|
||||
let mut seed = H256::new();
|
||||
for _ in 0..1000 {
|
||||
let k = random_word(alphabet, 6, 0, &mut seed);
|
||||
let v = random_value(&mut seed);
|
||||
d.push((k, v))
|
||||
}
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_random_mid(b: &mut Bencher) {
|
||||
let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_";
|
||||
let mut d: Vec<(Bytes, Bytes)> = Vec::new();
|
||||
let mut seed = H256::new();
|
||||
for _ in 0..1000 {
|
||||
let k = random_word(alphabet, 1, 5, &mut seed);
|
||||
let v = random_value(&mut seed);
|
||||
d.push((k, v))
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn trie_insertions_six_low(b: &mut Bencher) {
|
||||
let alphabet = b"abcdef";
|
||||
let mut d: Vec<(Bytes, Bytes)> = Vec::new();
|
||||
let mut seed = H256::new();
|
||||
for _ in 0..1000 {
|
||||
let k = random_word(alphabet, 6, 0, &mut seed);
|
||||
let v = random_value(&mut seed);
|
||||
d.push((k, v))
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
t.insert(&i.0, &i.1).unwrap();
|
||||
}
|
||||
})
|
||||
}
|
|
@ -1,147 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::{Result, DBValue, TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object.
|
||||
pub struct FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDB<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db, H, C> FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db HashDB<H, DBValue>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDB { raw: TrieDB::new(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB<H, DBValue> { self.raw.db() }
|
||||
}
|
||||
|
||||
impl<'db, H, C> Trie<H, C> for FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get_with(H::hash(key).as_ref(), query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, <H as Hasher>::Out, C::Error> {
|
||||
FatDBIterator::<H, C>::new(&self.raw).map(|iter| Box::new(iter) as Box<_>)
|
||||
}
|
||||
}
|
||||
|
||||
/// Itarator over inserted pairs of key values.
|
||||
pub struct FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
trie_iterator: TrieDBIterator<'db, H, C>,
|
||||
trie: &'db TrieDB<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db, H, C> FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Creates new iterator.
|
||||
pub fn new(trie: &'db TrieDB<H, C>) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDBIterator {
|
||||
trie_iterator: TrieDBIterator::new(trie)?,
|
||||
trie: trie,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> TrieIterator<H, C> for FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
let hashed_key = H::hash(key);
|
||||
self.trie_iterator.seek(hashed_key.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> Iterator for FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
type Item = TrieItem<'db, H::Out, C::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.trie_iterator.next()
|
||||
.map(|res| {
|
||||
res.map(|(hash, value)| {
|
||||
let aux_hash = H::hash(&hash);
|
||||
(self.trie.db().get(&aux_hash).expect("Missing fatdb hash").into_vec(), value)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use DBValue;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::trie::{Trie, TrieMut};
|
||||
use ethtrie::{FatDB, FatDBMut};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn fatdb_to_trie() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = FatDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(
|
||||
t.iter().unwrap().map(Result::unwrap).collect::<Vec<_>>(),
|
||||
vec![(vec![0x01u8, 0x23], DBValue::from_slice(&[0x01u8, 0x23] as &[u8]))]);
|
||||
}
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::{Result, DBValue, TrieDBMut, TrieMut};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object.
|
||||
pub struct FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDBMut<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db, H, C> FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db mut HashDB<H, DBValue>, root: &'db mut H::Out) -> Self {
|
||||
FatDBMut { raw: TrieDBMut::new(db, root) }
|
||||
}
|
||||
|
||||
/// Create a new trie with the backing database `db` and `root`.
|
||||
///
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn from_existing(db: &'db mut HashDB<H, DBValue>, root: &'db mut H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDBMut { raw: TrieDBMut::from_existing(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB<H, DBValue> {
|
||||
self.raw.db()
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db_mut(&mut self) -> &mut HashDB<H, DBValue> {
|
||||
self.raw.db_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> TrieMut<H, C> for FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&mut self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn is_empty(&self) -> bool { self.raw.is_empty() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
let hash = H::hash(key);
|
||||
let out = self.raw.insert(hash.as_ref(), value)?;
|
||||
let db = self.raw.db_mut();
|
||||
|
||||
// insert if it doesn't exist.
|
||||
if out.is_none() {
|
||||
let aux_hash = H::hash(hash.as_ref());
|
||||
db.emplace(aux_hash, DBValue::from_slice(key));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
let hash = H::hash(key);
|
||||
let out = self.raw.remove(hash.as_ref())?;
|
||||
|
||||
// remove if it already exists.
|
||||
if out.is_some() {
|
||||
let aux_hash = H::hash(hash.as_ref());
|
||||
self.raw.db_mut().remove(&aux_hash);
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use DBValue;
|
||||
use memorydb::MemoryDB;
|
||||
use ethtrie::trie::{Trie, TrieMut};
|
||||
use ethtrie::{TrieDB, FatDBMut};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use keccak;
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn fatdbmut_to_trie() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])), Ok(Some(DBValue::from_slice(&[0x01u8, 0x23]))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fatdbmut_insert_remove_key_mapping() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let key = [0x01u8, 0x23];
|
||||
let val = [0x01u8, 0x24];
|
||||
let key_hash = keccak::keccak(&key);
|
||||
let aux_hash = keccak::keccak(&key_hash);
|
||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&key, &val).unwrap();
|
||||
assert_eq!(t.get(&key), Ok(Some(DBValue::from_slice(&val))));
|
||||
assert_eq!(t.db().get(&aux_hash), Some(DBValue::from_slice(&key)));
|
||||
t.remove(&key).unwrap();
|
||||
assert_eq!(t.db().get(&aux_hash), None);
|
||||
}
|
||||
}
|
|
@ -1,334 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trie interface and implementation.
|
||||
extern crate elastic_array;
|
||||
extern crate parity_bytes as bytes; // TODO: name changed; update upstream when `parity-common` is available
|
||||
extern crate hashdb;
|
||||
extern crate rand;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate env_logger;
|
||||
#[cfg(test)]
|
||||
extern crate ethereum_types;
|
||||
#[cfg(test)]
|
||||
extern crate trie_standardmap as standardmap;
|
||||
#[cfg(test)]
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
#[cfg(test)]
|
||||
extern crate memorydb;
|
||||
#[cfg(test)]
|
||||
extern crate rlp;
|
||||
#[cfg(test)]
|
||||
extern crate keccak_hash as keccak;
|
||||
#[cfg(test)]
|
||||
extern crate keccak_hasher;
|
||||
#[cfg(test)]
|
||||
extern crate triehash;
|
||||
|
||||
use std::{fmt, error};
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub mod node;
|
||||
pub mod triedb;
|
||||
pub mod triedbmut;
|
||||
pub mod sectriedb;
|
||||
pub mod sectriedbmut;
|
||||
pub mod recorder;
|
||||
|
||||
mod fatdb;
|
||||
mod fatdbmut;
|
||||
mod lookup;
|
||||
mod nibblevec;
|
||||
mod nibbleslice;
|
||||
mod node_codec;
|
||||
|
||||
pub use self::triedb::{TrieDB, TrieDBIterator};
|
||||
pub use self::triedbmut::{TrieDBMut, ChildReference};
|
||||
pub use self::sectriedbmut::SecTrieDBMut;
|
||||
pub use self::sectriedb::SecTrieDB;
|
||||
pub use self::fatdb::{FatDB, FatDBIterator};
|
||||
pub use self::fatdbmut::FatDBMut;
|
||||
pub use self::recorder::Recorder;
|
||||
pub use self::lookup::Lookup;
|
||||
pub use self::nibbleslice::NibbleSlice;
|
||||
pub use node_codec::NodeCodec;
|
||||
|
||||
pub type DBValue = elastic_array::ElasticArray128<u8>;
|
||||
|
||||
/// Trie Errors.
|
||||
///
|
||||
/// These borrow the data within them to avoid excessive copying on every
|
||||
/// trie operation.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum TrieError<T, E> {
|
||||
/// Attempted to create a trie with a state root not in the DB.
|
||||
InvalidStateRoot(T),
|
||||
/// Trie item not found in the database,
|
||||
IncompleteDatabase(T),
|
||||
/// Corrupt Trie item
|
||||
DecoderError(T, E),
|
||||
}
|
||||
|
||||
impl<T, E> fmt::Display for TrieError<T, E> where T: std::fmt::Debug, E: std::fmt::Debug {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
TrieError::InvalidStateRoot(ref root) => write!(f, "Invalid state root: {:?}", root),
|
||||
TrieError::IncompleteDatabase(ref missing) => write!(f, "Database missing expected key: {:?}", missing),
|
||||
TrieError::DecoderError(ref hash, ref decoder_err) => {
|
||||
write!(f, "Decoding failed for hash {:?}; err: {:?}", hash, decoder_err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> error::Error for TrieError<T, E> where T: std::fmt::Debug, E: std::error::Error {
|
||||
fn description(&self) -> &str {
|
||||
match *self {
|
||||
TrieError::InvalidStateRoot(_) => "Invalid state root",
|
||||
TrieError::IncompleteDatabase(_) => "Incomplete database",
|
||||
TrieError::DecoderError(_, ref err) => err.description(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Trie result type. Boxed to avoid copying around extra space for the `Hasher`s `Out` on successful queries.
|
||||
pub type Result<T, H, E> = ::std::result::Result<T, Box<TrieError<H, E>>>;
|
||||
|
||||
|
||||
/// Trie-Item type used for iterators over trie data.
|
||||
pub type TrieItem<'a, U, E> = Result<(Vec<u8>, DBValue), U, E>;
|
||||
|
||||
/// Description of what kind of query will be made to the trie.
|
||||
///
|
||||
/// This is implemented for any &mut recorder (where the query will return
|
||||
/// a DBValue), any function taking raw bytes (where no recording will be made),
|
||||
/// or any tuple of (&mut Recorder, FnOnce(&[u8]))
|
||||
pub trait Query<H: Hasher> {
|
||||
/// Output item.
|
||||
type Item;
|
||||
|
||||
/// Decode a byte-slice into the desired item.
|
||||
fn decode(self, data: &[u8]) -> Self::Item;
|
||||
|
||||
/// Record that a node has been passed through.
|
||||
fn record(&mut self, _hash: &H::Out, _data: &[u8], _depth: u32) {}
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher> Query<H> for &'a mut Recorder<H::Out> {
|
||||
type Item = DBValue;
|
||||
fn decode(self, value: &[u8]) -> DBValue { DBValue::from_slice(value) }
|
||||
fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) {
|
||||
(&mut **self).record(hash, data, depth);
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, T, H: Hasher> Query<H> for F where F: for<'a> FnOnce(&'a [u8]) -> T {
|
||||
type Item = T;
|
||||
fn decode(self, value: &[u8]) -> T { (self)(value) }
|
||||
}
|
||||
|
||||
impl<'a, F, T, H: Hasher> Query<H> for (&'a mut Recorder<H::Out>, F) where F: FnOnce(&[u8]) -> T {
|
||||
type Item = T;
|
||||
fn decode(self, value: &[u8]) -> T { (self.1)(value) }
|
||||
fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) {
|
||||
self.0.record(hash, data, depth)
|
||||
}
|
||||
}
|
||||
|
||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||
pub trait Trie<H: Hasher, C: NodeCodec<H>> {
|
||||
/// Return the root of the trie.
|
||||
fn root(&self) -> &H::Out;
|
||||
|
||||
/// Is the trie empty?
|
||||
fn is_empty(&self) -> bool { *self.root() == C::HASHED_NULL_NODE }
|
||||
|
||||
/// Does the trie contain a given key?
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.get(key).map(|x|x.is_some() )
|
||||
}
|
||||
|
||||
/// What is the value of the given key in this trie?
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error> where 'a: 'key {
|
||||
self.get_with(key, DBValue::from_slice)
|
||||
}
|
||||
|
||||
/// Search for the key with the given query parameter. See the docs of the `Query`
|
||||
/// trait for more details.
|
||||
fn get_with<'a, 'key, Q: Query<H>>(
|
||||
&'a self,
|
||||
key: &'key [u8],
|
||||
query: Q
|
||||
) -> Result<Option<Q::Item>, H::Out, C::Error> where 'a: 'key;
|
||||
|
||||
/// Returns a depth-first iterator over the elements of trie.
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error >> + 'a>, H::Out, C::Error>;
|
||||
}
|
||||
|
||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||
pub trait TrieMut<H: Hasher, C: NodeCodec<H>> {
|
||||
/// Return the root of the trie.
|
||||
fn root(&mut self) -> &H::Out;
|
||||
|
||||
/// Is the trie empty?
|
||||
fn is_empty(&self) -> bool;
|
||||
|
||||
/// Does the trie contain a given key?
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.get(key).map(|x| x.is_some())
|
||||
}
|
||||
|
||||
/// What is the value of the given key in this trie?
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error> where 'a: 'key;
|
||||
|
||||
/// Insert a `key`/`value` pair into the trie. An empty value is equivalent to removing
|
||||
/// `key` from the trie. Returns the old value associated with this key, if it existed.
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error>;
|
||||
|
||||
/// Remove a `key` from the trie. Equivalent to making it equal to the empty
|
||||
/// value. Returns the old value associated with this key, if it existed.
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error>;
|
||||
}
|
||||
|
||||
/// A trie iterator that also supports random access (`seek()`).
|
||||
pub trait TrieIterator<H: Hasher, C: NodeCodec<H>>: Iterator {
|
||||
/// Position the iterator on the first element with key >= `key`
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, <C as NodeCodec<H>>::Error>;
|
||||
}
|
||||
|
||||
/// Trie types
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TrieSpec {
|
||||
/// Generic trie.
|
||||
Generic,
|
||||
/// Secure trie.
|
||||
Secure,
|
||||
/// Secure trie with fat database.
|
||||
Fat,
|
||||
}
|
||||
|
||||
impl Default for TrieSpec {
|
||||
fn default() -> TrieSpec {
|
||||
TrieSpec::Secure
|
||||
}
|
||||
}
|
||||
|
||||
/// Trie factory.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct TrieFactory<H: Hasher, C: NodeCodec<H>> {
|
||||
spec: TrieSpec,
|
||||
mark_hash: PhantomData<H>,
|
||||
mark_codec: PhantomData<C>,
|
||||
}
|
||||
|
||||
/// All different kinds of tries.
|
||||
/// This is used to prevent a heap allocation for every created trie.
|
||||
pub enum TrieKinds<'db, H: Hasher + 'db, C: NodeCodec<H>> {
|
||||
/// A generic trie db.
|
||||
Generic(TrieDB<'db, H, C>),
|
||||
/// A secure trie db.
|
||||
Secure(SecTrieDB<'db, H, C>),
|
||||
/// A fat trie db.
|
||||
Fat(FatDB<'db, H, C>),
|
||||
}
|
||||
|
||||
// wrapper macro for making the match easier to deal with.
|
||||
macro_rules! wrapper {
|
||||
($me: ident, $f_name: ident, $($param: ident),*) => {
|
||||
match *$me {
|
||||
TrieKinds::Generic(ref t) => t.$f_name($($param),*),
|
||||
TrieKinds::Secure(ref t) => t.$f_name($($param),*),
|
||||
TrieKinds::Fat(ref t) => t.$f_name($($param),*),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H: Hasher, C: NodeCodec<H>> Trie<H, C> for TrieKinds<'db, H, C> {
|
||||
fn root(&self) -> &H::Out {
|
||||
wrapper!(self, root,)
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
wrapper!(self, is_empty,)
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
wrapper!(self, contains, key)
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
wrapper!(self, get_with, key, query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
wrapper!(self, iter,)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> TrieFactory<H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
/// Creates new factory.
|
||||
pub fn new(spec: TrieSpec) -> Self {
|
||||
TrieFactory { spec, mark_hash: PhantomData, mark_codec: PhantomData }
|
||||
}
|
||||
|
||||
/// Create new immutable instance of Trie.
|
||||
pub fn readonly(
|
||||
&self,
|
||||
db: &'db HashDB<H, DBValue>,
|
||||
root: &'db H::Out
|
||||
) -> Result<TrieKinds<'db, H, C>, H::Out, <C as NodeCodec<H>>::Error> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Ok(TrieKinds::Generic(TrieDB::new(db, root)?)),
|
||||
TrieSpec::Secure => Ok(TrieKinds::Secure(SecTrieDB::new(db, root)?)),
|
||||
TrieSpec::Fat => Ok(TrieKinds::Fat(FatDB::new(db, root)?)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new mutable instance of Trie.
|
||||
pub fn create(&self, db: &'db mut HashDB<H, DBValue>, root: &'db mut H::Out) -> Box<TrieMut<H, C> + 'db> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Box::new(TrieDBMut::<_, C>::new(db, root)),
|
||||
TrieSpec::Secure => Box::new(SecTrieDBMut::<_, C>::new(db, root)),
|
||||
TrieSpec::Fat => Box::new(FatDBMut::<_, C>::new(db, root)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new mutable instance of trie and check for errors.
|
||||
pub fn from_existing(
|
||||
&self,
|
||||
db: &'db mut HashDB<H, DBValue>,
|
||||
root: &'db mut H::Out
|
||||
) -> Result<Box<TrieMut<H,C> + 'db>, H::Out, <C as NodeCodec<H>>::Error> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Ok(Box::new(TrieDBMut::<_, C>::from_existing(db, root)?)),
|
||||
TrieSpec::Secure => Ok(Box::new(SecTrieDBMut::<_, C>::from_existing(db, root)?)),
|
||||
TrieSpec::Fat => Ok(Box::new(FatDBMut::<_, C>::from_existing(db, root)?)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true iff the trie DB is a fat DB (allows enumeration of keys).
|
||||
pub fn is_fat(&self) -> bool { self.spec == TrieSpec::Fat }
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trie lookup via HashDB.
|
||||
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use nibbleslice::NibbleSlice;
|
||||
use node::Node;
|
||||
use node_codec::NodeCodec;
|
||||
use super::{DBValue, Result, TrieError, Query};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Trie lookup helper object.
|
||||
pub struct Lookup<'a, H: Hasher + 'a, C: NodeCodec<H>, Q: Query<H>> {
|
||||
/// database to query from.
|
||||
pub db: &'a HashDB<H, DBValue>,
|
||||
/// Query object to record nodes and transform data.
|
||||
pub query: Q,
|
||||
/// Hash to start at
|
||||
pub hash: H::Out,
|
||||
pub marker: PhantomData<C>, // TODO: probably not needed when all is said and done? When Query is made generic?
|
||||
}
|
||||
|
||||
impl<'a, H, C, Q> Lookup<'a, H, C, Q>
|
||||
where
|
||||
H: Hasher + 'a,
|
||||
C: NodeCodec<H> + 'a,
|
||||
Q: Query<H>,
|
||||
{
|
||||
/// Look up the given key. If the value is found, it will be passed to the given
|
||||
/// function to decode or copy.
|
||||
pub fn look_up(mut self, mut key: NibbleSlice) -> Result<Option<Q::Item>, H::Out, C::Error> {
|
||||
let mut hash = self.hash;
|
||||
|
||||
// this loop iterates through non-inline nodes.
|
||||
for depth in 0.. {
|
||||
let node_data = match self.db.get(&hash) {
|
||||
Some(value) => value,
|
||||
None => return Err(Box::new(match depth {
|
||||
0 => TrieError::InvalidStateRoot(hash),
|
||||
_ => TrieError::IncompleteDatabase(hash),
|
||||
})),
|
||||
};
|
||||
|
||||
self.query.record(&hash, &node_data, depth);
|
||||
|
||||
// this loop iterates through all inline children (usually max 1)
|
||||
// without incrementing the depth.
|
||||
let mut node_data = &node_data[..];
|
||||
loop {
|
||||
let decoded = match C::decode(node_data) {
|
||||
Ok(node) => node,
|
||||
Err(e) => {
|
||||
return Err(Box::new(TrieError::DecoderError(hash, e)))
|
||||
}
|
||||
};
|
||||
match decoded {
|
||||
Node::Leaf(slice, value) => {
|
||||
return Ok(match slice == key {
|
||||
true => Some(self.query.decode(value)),
|
||||
false => None,
|
||||
})
|
||||
}
|
||||
Node::Extension(slice, item) => {
|
||||
if key.starts_with(&slice) {
|
||||
node_data = item;
|
||||
key = key.mid(slice.len());
|
||||
} else {
|
||||
return Ok(None)
|
||||
}
|
||||
}
|
||||
Node::Branch(children, value) => match key.is_empty() {
|
||||
true => return Ok(value.map(move |val| self.query.decode(val))),
|
||||
false => {
|
||||
node_data = children[key.at(0) as usize];
|
||||
key = key.mid(1);
|
||||
}
|
||||
},
|
||||
_ => return Ok(None),
|
||||
}
|
||||
|
||||
// check if new node data is inline or hash.
|
||||
if let Some(h) = C::try_decode_hash(&node_data) {
|
||||
hash = h;
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
|
@ -1,328 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
||||
|
||||
use std::cmp::*;
|
||||
use std::fmt;
|
||||
use elastic_array::ElasticArray36;
|
||||
|
||||
/// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
||||
///
|
||||
/// This is an immutable struct. No operations actually change it.
|
||||
///
|
||||
/// # Example
|
||||
/// ```snippet
|
||||
/// use patricia_trie::nibbleslice::NibbleSlice;
|
||||
/// fn main() {
|
||||
/// let d1 = &[0x01u8, 0x23, 0x45];
|
||||
/// let d2 = &[0x34u8, 0x50, 0x12];
|
||||
/// let d3 = &[0x00u8, 0x12];
|
||||
/// let n1 = NibbleSlice::new(d1); // 0,1,2,3,4,5
|
||||
/// let n2 = NibbleSlice::new(d2); // 3,4,5,0,1,2
|
||||
/// let n3 = NibbleSlice::new_offset(d3, 1); // 0,1,2
|
||||
/// assert!(n1 > n3); // 0,1,2,... > 0,1,2
|
||||
/// assert!(n1 < n2); // 0,... < 3,...
|
||||
/// assert!(n2.mid(3) == n3); // 0,1,2 == 0,1,2
|
||||
/// assert!(n1.starts_with(&n3));
|
||||
/// assert_eq!(n1.common_prefix(&n3), 3);
|
||||
/// assert_eq!(n2.mid(3).common_prefix(&n1), 3);
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Copy, Clone, Eq, Ord)]
|
||||
pub struct NibbleSlice<'a> {
|
||||
data: &'a [u8],
|
||||
offset: usize,
|
||||
data_encode_suffix: &'a [u8],
|
||||
offset_encode_suffix: usize,
|
||||
}
|
||||
|
||||
/// Iterator type for a nibble slice.
|
||||
pub struct NibbleSliceIterator<'a> {
|
||||
p: &'a NibbleSlice<'a>,
|
||||
i: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for NibbleSliceIterator<'a> {
|
||||
type Item = u8;
|
||||
fn next(&mut self) -> Option<u8> {
|
||||
self.i += 1;
|
||||
match self.i <= self.p.len() {
|
||||
true => Some(self.p.at(self.i - 1)),
|
||||
false => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> NibbleSlice<'a> {
|
||||
/// Create a new nibble slice with the given byte-slice.
|
||||
pub fn new(data: &'a [u8]) -> Self { NibbleSlice::new_offset(data, 0) }
|
||||
|
||||
/// Create a new nibble slice with the given byte-slice with a nibble offset.
|
||||
pub fn new_offset(data: &'a [u8], offset: usize) -> Self {
|
||||
NibbleSlice {
|
||||
data,
|
||||
offset,
|
||||
data_encode_suffix: &b""[..],
|
||||
offset_encode_suffix: 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a composed nibble slice; one followed by the other.
|
||||
pub fn new_composed(a: &NibbleSlice<'a>, b: &NibbleSlice<'a>) -> Self {
|
||||
NibbleSlice {
|
||||
data: a.data,
|
||||
offset: a.offset,
|
||||
data_encode_suffix: b.data,
|
||||
offset_encode_suffix: b.offset
|
||||
}
|
||||
}
|
||||
|
||||
/// Get an iterator for the series of nibbles.
|
||||
pub fn iter(&'a self) -> NibbleSliceIterator<'a> {
|
||||
NibbleSliceIterator { p: self, i: 0 }
|
||||
}
|
||||
|
||||
/// Create a new nibble slice from the given HPE encoded data (e.g. output of `encoded()`).
|
||||
pub fn from_encoded(data: &'a [u8]) -> (NibbleSlice, bool) {
|
||||
(Self::new_offset(data, if data[0] & 16 == 16 {1} else {2}), data[0] & 32 == 32)
|
||||
}
|
||||
|
||||
/// Is this an empty slice?
|
||||
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
||||
|
||||
/// Get the length (in nibbles, naturally) of this slice.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize { (self.data.len() + self.data_encode_suffix.len()) * 2 - self.offset - self.offset_encode_suffix }
|
||||
|
||||
/// Get the nibble at position `i`.
|
||||
#[inline(always)]
|
||||
pub fn at(&self, i: usize) -> u8 {
|
||||
let l = self.data.len() * 2 - self.offset;
|
||||
if i < l {
|
||||
if (self.offset + i) & 1 == 1 {
|
||||
self.data[(self.offset + i) / 2] & 15u8
|
||||
}
|
||||
else {
|
||||
self.data[(self.offset + i) / 2] >> 4
|
||||
}
|
||||
}
|
||||
else {
|
||||
let i = i - l;
|
||||
if (self.offset_encode_suffix + i) & 1 == 1 {
|
||||
self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] & 15u8
|
||||
}
|
||||
else {
|
||||
self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] >> 4
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return object which represents a view on to this slice (further) offset by `i` nibbles.
|
||||
pub fn mid(&self, i: usize) -> NibbleSlice<'a> {
|
||||
NibbleSlice {
|
||||
data: self.data,
|
||||
offset: self.offset + i,
|
||||
data_encode_suffix: &b""[..],
|
||||
offset_encode_suffix: 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Do we start with the same nibbles as the whole of `them`?
|
||||
pub fn starts_with(&self, them: &Self) -> bool { self.common_prefix(them) == them.len() }
|
||||
|
||||
/// How many of the same nibbles at the beginning do we match with `them`?
|
||||
pub fn common_prefix(&self, them: &Self) -> usize {
|
||||
let s = min(self.len(), them.len());
|
||||
let mut i = 0usize;
|
||||
while i < s {
|
||||
if self.at(i) != them.at(i) { break; }
|
||||
i += 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
/// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`.
|
||||
#[inline]
|
||||
pub fn encoded(&self, is_leaf: bool) -> ElasticArray36<u8> {
|
||||
let l = self.len();
|
||||
let mut r = ElasticArray36::new();
|
||||
let mut i = l % 2;
|
||||
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
||||
while i < l {
|
||||
r.push(self.at(i) * 16 + self.at(i + 1));
|
||||
i += 2;
|
||||
}
|
||||
r
|
||||
}
|
||||
|
||||
/// Encode only the leftmost `n` bytes of the nibble slice in prefixed hex notation,
|
||||
/// noting whether it `is_leaf`.
|
||||
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> ElasticArray36<u8> {
|
||||
let l = min(self.len(), n);
|
||||
let mut r = ElasticArray36::new();
|
||||
let mut i = l % 2;
|
||||
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
||||
while i < l {
|
||||
r.push(self.at(i) * 16 + self.at(i + 1));
|
||||
i += 2;
|
||||
}
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for NibbleSlice<'a> {
|
||||
fn eq(&self, them: &Self) -> bool {
|
||||
self.len() == them.len() && self.starts_with(them)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for NibbleSlice<'a> {
|
||||
fn partial_cmp(&self, them: &Self) -> Option<Ordering> {
|
||||
let s = min(self.len(), them.len());
|
||||
let mut i = 0usize;
|
||||
while i < s {
|
||||
match self.at(i).partial_cmp(&them.at(i)).unwrap() {
|
||||
Ordering::Less => return Some(Ordering::Less),
|
||||
Ordering::Greater => return Some(Ordering::Greater),
|
||||
_ => i += 1,
|
||||
}
|
||||
}
|
||||
self.len().partial_cmp(&them.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for NibbleSlice<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for i in 0..self.len() {
|
||||
match i {
|
||||
0 => write!(f, "{:01x}", self.at(i))?,
|
||||
_ => write!(f, "'{:01x}", self.at(i))?,
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::NibbleSlice;
|
||||
use elastic_array::ElasticArray36;
|
||||
static D: &'static [u8;3] = &[0x01u8, 0x23, 0x45];
|
||||
|
||||
#[test]
|
||||
fn basics() {
|
||||
let n = NibbleSlice::new(D);
|
||||
assert_eq!(n.len(), 6);
|
||||
assert!(!n.is_empty());
|
||||
|
||||
let n = NibbleSlice::new_offset(D, 6);
|
||||
assert!(n.is_empty());
|
||||
|
||||
let n = NibbleSlice::new_offset(D, 3);
|
||||
assert_eq!(n.len(), 3);
|
||||
for i in 0..3 {
|
||||
assert_eq!(n.at(i), i as u8 + 3);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iterator() {
|
||||
let n = NibbleSlice::new(D);
|
||||
let mut nibbles: Vec<u8> = vec![];
|
||||
nibbles.extend(n.iter());
|
||||
assert_eq!(nibbles, (0u8..6).collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mid() {
|
||||
let n = NibbleSlice::new(D);
|
||||
let m = n.mid(2);
|
||||
for i in 0..4 {
|
||||
assert_eq!(m.at(i), i as u8 + 2);
|
||||
}
|
||||
let m = n.mid(3);
|
||||
for i in 0..3 {
|
||||
assert_eq!(m.at(i), i as u8 + 3);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encoded() {
|
||||
let n = NibbleSlice::new(D);
|
||||
assert_eq!(n.encoded(false), ElasticArray36::from_slice(&[0x00, 0x01, 0x23, 0x45]));
|
||||
assert_eq!(n.encoded(true), ElasticArray36::from_slice(&[0x20, 0x01, 0x23, 0x45]));
|
||||
assert_eq!(n.mid(1).encoded(false), ElasticArray36::from_slice(&[0x11, 0x23, 0x45]));
|
||||
assert_eq!(n.mid(1).encoded(true), ElasticArray36::from_slice(&[0x31, 0x23, 0x45]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_encoded() {
|
||||
let n = NibbleSlice::new(D);
|
||||
assert_eq!((n, false), NibbleSlice::from_encoded(&[0x00, 0x01, 0x23, 0x45]));
|
||||
assert_eq!((n, true), NibbleSlice::from_encoded(&[0x20, 0x01, 0x23, 0x45]));
|
||||
assert_eq!((n.mid(1), false), NibbleSlice::from_encoded(&[0x11, 0x23, 0x45]));
|
||||
assert_eq!((n.mid(1), true), NibbleSlice::from_encoded(&[0x31, 0x23, 0x45]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_length_of_common_prefix() {
|
||||
let n = NibbleSlice::new(D);
|
||||
|
||||
let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45, 0x67];
|
||||
let m = NibbleSlice::new(other);
|
||||
|
||||
assert_eq!(n.common_prefix(&m), 4);
|
||||
assert_eq!(m.common_prefix(&n), 4);
|
||||
assert_eq!(n.mid(1).common_prefix(&m.mid(1)), 3);
|
||||
assert_eq!(n.mid(1).common_prefix(&m.mid(2)), 0);
|
||||
assert_eq!(n.common_prefix(&m.mid(4)), 6);
|
||||
assert!(!n.starts_with(&m.mid(4)));
|
||||
assert!(m.mid(4).starts_with(&n));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compare_sizes() {
|
||||
let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45];
|
||||
let n = NibbleSlice::new(D);
|
||||
let m = NibbleSlice::new(other);
|
||||
|
||||
assert!(n != m);
|
||||
assert!(n > m);
|
||||
assert!(m < n);
|
||||
|
||||
assert!(n == m.mid(4));
|
||||
assert!(n >= m.mid(4));
|
||||
assert!(n <= m.mid(4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn common_prefix_for_concatenated_slices() {
|
||||
let first = NibbleSlice::new(&[0x01u8, 0x23, 0x45]); // 0'1'2'3'4'5'
|
||||
let first_ext = NibbleSlice::new(&[0x22u8, 0x44, 0x55]); // 2'2'4'4'5'5
|
||||
let concat = NibbleSlice::new_composed(&first, &first_ext);
|
||||
assert!(concat.len() == first.len() + first_ext.len());
|
||||
|
||||
// 0'1'2'3'4'5'2'2'9'9'5'5'
|
||||
let second = NibbleSlice::new(&[0x01u8, 0x23, 0x45, 0x22, 0x99, 0x55]);
|
||||
|
||||
let common_prefix_length = first.common_prefix(&second);
|
||||
assert_eq!(common_prefix_length, 6);
|
||||
|
||||
let common_prefix_length = concat.common_prefix(&second);
|
||||
assert_eq!(common_prefix_length, 8);
|
||||
}
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! An owning, nibble-oriented byte vector.
|
||||
use elastic_array::ElasticArray36;
|
||||
use nibbleslice::NibbleSlice;
|
||||
|
||||
/// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct NibbleVec {
|
||||
inner: ElasticArray36<u8>,
|
||||
len: usize,
|
||||
}
|
||||
|
||||
impl Default for NibbleVec {
|
||||
fn default() -> Self {
|
||||
NibbleVec::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl NibbleVec {
|
||||
/// Make a new `NibbleVec`
|
||||
pub fn new() -> Self {
|
||||
NibbleVec {
|
||||
inner: ElasticArray36::new(),
|
||||
len: 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Length of the `NibbleVec`
|
||||
#[inline(always)]
|
||||
pub fn len(&self) -> usize { self.len }
|
||||
|
||||
/// Retrurns true if `NibbleVec` has zero length
|
||||
pub fn is_empty(&self) -> bool { self.len == 0 }
|
||||
|
||||
/// Try to get the nibble at the given offset.
|
||||
#[inline]
|
||||
pub fn at(&self, idx: usize) -> u8 {
|
||||
if idx % 2 == 0 {
|
||||
self.inner[idx / 2] >> 4
|
||||
} else {
|
||||
self.inner[idx / 2] & 0x0F
|
||||
}
|
||||
}
|
||||
|
||||
/// Push a nibble onto the `NibbleVec`. Ignores the high 4 bits.
|
||||
pub fn push(&mut self, nibble: u8) {
|
||||
let nibble = nibble & 0x0F;
|
||||
|
||||
if self.len % 2 == 0 {
|
||||
self.inner.push(nibble << 4);
|
||||
} else {
|
||||
*self.inner.last_mut().expect("len != 0 since len % 2 != 0; inner has a last element; qed") |= nibble;
|
||||
}
|
||||
|
||||
self.len += 1;
|
||||
}
|
||||
|
||||
/// Try to pop a nibble off the `NibbleVec`. Fails if len == 0.
|
||||
pub fn pop(&mut self) -> Option<u8> {
|
||||
if self.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let byte = self.inner.pop().expect("len != 0; inner has last elem; qed");
|
||||
let nibble = if self.len % 2 == 0 {
|
||||
self.inner.push(byte & 0xF0);
|
||||
byte & 0x0F
|
||||
} else {
|
||||
byte >> 4
|
||||
};
|
||||
|
||||
self.len -= 1;
|
||||
Some(nibble)
|
||||
}
|
||||
|
||||
/// Try to treat this `NibbleVec` as a `NibbleSlice`. Works only if len is even.
|
||||
pub fn as_nibbleslice(&self) -> Option<NibbleSlice> {
|
||||
if self.len % 2 == 0 {
|
||||
Some(NibbleSlice::new(self.inner()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying byte slice.
|
||||
pub fn inner(&self) -> &[u8] {
|
||||
&self.inner[..]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<NibbleSlice<'a>> for NibbleVec {
|
||||
fn from(s: NibbleSlice<'a>) -> Self {
|
||||
let mut v = NibbleVec::new();
|
||||
for i in 0..s.len() {
|
||||
v.push(s.at(i));
|
||||
}
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::NibbleVec;
|
||||
|
||||
#[test]
|
||||
fn push_pop() {
|
||||
let mut v = NibbleVec::new();
|
||||
|
||||
for i in 0..16 {
|
||||
v.push(i);
|
||||
assert_eq!(v.len() - 1, i as usize);
|
||||
assert_eq!(v.at(i as usize), i);
|
||||
}
|
||||
|
||||
for i in (0..16).rev() {
|
||||
assert_eq!(v.pop(), Some(i));
|
||||
assert_eq!(v.len(), i as usize);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nibbleslice_conv() {
|
||||
let mut v = NibbleVec::new();
|
||||
for i in 0..10 {
|
||||
v.push(i);
|
||||
}
|
||||
|
||||
let v2: NibbleVec = v.as_nibbleslice().unwrap().into();
|
||||
assert_eq!(v, v2);
|
||||
}
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use elastic_array::ElasticArray36;
|
||||
use nibbleslice::NibbleSlice;
|
||||
use nibblevec::NibbleVec;
|
||||
use super::DBValue;
|
||||
|
||||
/// Partial node key type.
|
||||
pub type NodeKey = ElasticArray36<u8>;
|
||||
|
||||
/// Type of node in the trie and essential information thereof.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
pub enum Node<'a> {
|
||||
/// Null trie node; could be an empty root or an empty branch entry.
|
||||
Empty,
|
||||
/// Leaf node; has key slice and value. Value may not be empty.
|
||||
Leaf(NibbleSlice<'a>, &'a [u8]),
|
||||
/// Extension node; has key slice and node data. Data may not be null.
|
||||
Extension(NibbleSlice<'a>, &'a [u8]),
|
||||
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
|
||||
Branch([&'a [u8]; 16], Option<&'a [u8]>),
|
||||
}
|
||||
|
||||
/// A Sparse (non mutable) owned vector struct to hold branch keys and value
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
pub struct Branch {
|
||||
data: Vec<u8>,
|
||||
ubounds: [usize; 18],
|
||||
has_value: bool,
|
||||
}
|
||||
|
||||
impl Branch {
|
||||
fn new(a: [&[u8]; 16], value: Option<&[u8]>) -> Self {
|
||||
let mut data = Vec::with_capacity(a.iter().map(|inner| inner.len()).sum());
|
||||
let mut ubounds = [0; 18];
|
||||
for (inner, ub) in a.iter().zip(ubounds.iter_mut().skip(1)) {
|
||||
data.extend_from_slice(inner);
|
||||
*ub = data.len();
|
||||
}
|
||||
if let Some(value) = value {
|
||||
data.extend(value);
|
||||
ubounds[17] = data.len();
|
||||
}
|
||||
Branch { data, ubounds, has_value: value.is_some() }
|
||||
}
|
||||
|
||||
/// Get the node value, if any
|
||||
pub fn get_value(&self) -> Option<&[u8]> {
|
||||
if self.has_value {
|
||||
Some(&self.data[self.ubounds[16]..self.ubounds[17]])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Test if the node has a value
|
||||
pub fn has_value(&self) -> bool {
|
||||
self.has_value
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::ops::Index<usize> for Branch {
|
||||
type Output = [u8];
|
||||
fn index(&self, index: usize) -> &[u8] {
|
||||
assert!(index < 16);
|
||||
&self.data[self.ubounds[index]..self.ubounds[index + 1]]
|
||||
}
|
||||
}
|
||||
|
||||
/// An owning node type. Useful for trie iterators.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum OwnedNode {
|
||||
/// Empty trie node.
|
||||
Empty,
|
||||
/// Leaf node: partial key and value.
|
||||
Leaf(NibbleVec, DBValue),
|
||||
/// Extension node: partial key and child node.
|
||||
Extension(NibbleVec, DBValue),
|
||||
/// Branch node: 16 children and an optional value.
|
||||
Branch(Branch),
|
||||
}
|
||||
|
||||
impl<'a> From<Node<'a>> for OwnedNode {
|
||||
fn from(node: Node<'a>) -> Self {
|
||||
match node {
|
||||
Node::Empty => OwnedNode::Empty,
|
||||
Node::Leaf(k, v) => OwnedNode::Leaf(k.into(), DBValue::from_slice(v)),
|
||||
Node::Extension(k, child) => OwnedNode::Extension(k.into(), DBValue::from_slice(child)),
|
||||
Node::Branch(c, val) => OwnedNode::Branch(Branch::new(c, val)),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Generic trait for trie node encoding/decoding. Takes a `hashdb::Hasher`
|
||||
//! to parametrize the hashes used in the codec.
|
||||
|
||||
use hashdb::Hasher;
|
||||
use node::Node;
|
||||
use ChildReference;
|
||||
|
||||
use elastic_array::{ElasticArray128};
|
||||
|
||||
/// Trait for trie node encoding/decoding
|
||||
pub trait NodeCodec<H: Hasher>: Sized {
|
||||
/// Encoding error type
|
||||
type Error: ::std::error::Error;
|
||||
|
||||
/// Null node type
|
||||
const HASHED_NULL_NODE: H::Out;
|
||||
|
||||
/// Decode bytes to a `Node`. Returns `Self::E` on failure.
|
||||
fn decode(data: &[u8]) -> Result<Node, Self::Error>;
|
||||
|
||||
/// Decode bytes to the `Hasher`s output type. Returns `None` on failure.
|
||||
fn try_decode_hash(data: &[u8]) -> Option<H::Out>;
|
||||
|
||||
/// Check if the provided bytes correspond to the codecs "empty" node.
|
||||
fn is_empty_node(data: &[u8]) -> bool;
|
||||
|
||||
/// Returns an empty node
|
||||
fn empty_node() -> Vec<u8>;
|
||||
|
||||
/// Returns an encoded leaft node
|
||||
fn leaf_node(partial: &[u8], value: &[u8]) -> Vec<u8>;
|
||||
|
||||
/// Returns an encoded extension node
|
||||
fn ext_node(partial: &[u8], child_ref: ChildReference<H::Out>) -> Vec<u8>;
|
||||
|
||||
/// Returns an encoded branch node. Takes an iterator yielding `ChildReference<H::Out>` and an optional value
|
||||
fn branch_node<I>(children: I, value: Option<ElasticArray128<u8>>) -> Vec<u8>
|
||||
where I: IntoIterator<Item=Option<ChildReference<H::Out>>>;
|
||||
}
|
|
@ -1,208 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trie query recorder.
|
||||
|
||||
use bytes::Bytes;
|
||||
|
||||
/// A record of a visited node.
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub struct Record<HO> {
|
||||
/// The depth of this node.
|
||||
pub depth: u32,
|
||||
|
||||
/// The raw data of the node.
|
||||
pub data: Bytes,
|
||||
|
||||
/// The hash of the data.
|
||||
pub hash: HO,
|
||||
}
|
||||
|
||||
/// Records trie nodes as they pass it.
|
||||
#[derive(Debug)]
|
||||
pub struct Recorder<HO> {
|
||||
nodes: Vec<Record<HO>>,
|
||||
min_depth: u32,
|
||||
}
|
||||
|
||||
impl<HO: Copy> Default for Recorder<HO> {
|
||||
fn default() -> Self {
|
||||
Recorder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<HO: Copy> Recorder<HO> {
|
||||
/// Create a new `Recorder` which records all given nodes.
|
||||
#[inline]
|
||||
pub fn new() -> Self {
|
||||
Recorder::with_depth(0)
|
||||
}
|
||||
|
||||
/// Create a `Recorder` which only records nodes beyond a given depth.
|
||||
pub fn with_depth(depth: u32) -> Self {
|
||||
Recorder {
|
||||
nodes: Vec::new(),
|
||||
min_depth: depth,
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a visited node, given its hash, data, and depth.
|
||||
pub fn record(&mut self, hash: &HO, data: &[u8], depth: u32) {
|
||||
if depth >= self.min_depth {
|
||||
self.nodes.push(Record {
|
||||
depth: depth,
|
||||
data: data.into(),
|
||||
hash: *hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Drain all visited records.
|
||||
pub fn drain(&mut self) -> Vec<Record<HO>> {
|
||||
::std::mem::replace(&mut self.nodes, Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use keccak::keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn basic_recorder() {
|
||||
let mut basic = Recorder::<H256>::new();
|
||||
|
||||
let node1 = vec![1, 2, 3, 4];
|
||||
let node2 = vec![4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
let (hash1, hash2) = (keccak(&node1), keccak(&node2));
|
||||
basic.record(&hash1, &node1, 0);
|
||||
basic.record(&hash2, &node2, 456);
|
||||
|
||||
let record1 = Record {
|
||||
data: node1,
|
||||
hash: hash1,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
let record2 = Record {
|
||||
data: node2,
|
||||
hash: hash2,
|
||||
depth: 456,
|
||||
};
|
||||
|
||||
|
||||
assert_eq!(basic.drain(), vec![record1, record2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_recorder_min_depth() {
|
||||
let mut basic = Recorder::<H256>::with_depth(400);
|
||||
|
||||
let node1 = vec![1, 2, 3, 4];
|
||||
let node2 = vec![4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
let hash1 = keccak(&node1);
|
||||
let hash2 = keccak(&node2);
|
||||
basic.record(&hash1, &node1, 0);
|
||||
basic.record(&hash2, &node2, 456);
|
||||
|
||||
let records = basic.drain();
|
||||
|
||||
assert_eq!(records.len(), 1);
|
||||
|
||||
assert_eq!(records[0].clone(), Record {
|
||||
data: node2,
|
||||
hash: hash2,
|
||||
depth: 456,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trie_record() {
|
||||
use ethtrie::trie::{Trie, TrieMut, Recorder};
|
||||
use memorydb::MemoryDB;
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
use DBValue;
|
||||
|
||||
let mut db = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
|
||||
let mut root = H256::default();
|
||||
|
||||
{
|
||||
let mut x = TrieDBMut::new(&mut db, &mut root);
|
||||
|
||||
x.insert(b"dog", b"cat").unwrap();
|
||||
x.insert(b"lunch", b"time").unwrap();
|
||||
x.insert(b"notdog", b"notcat").unwrap();
|
||||
x.insert(b"hotdog", b"hotcat").unwrap();
|
||||
x.insert(b"letter", b"confusion").unwrap();
|
||||
x.insert(b"insert", b"remove").unwrap();
|
||||
x.insert(b"pirate", b"aargh!").unwrap();
|
||||
x.insert(b"yo ho ho", b"and a bottle of rum").unwrap();
|
||||
}
|
||||
|
||||
let trie = TrieDB::new(&db, &root).unwrap();
|
||||
let mut recorder = Recorder::<H256>::new();
|
||||
|
||||
trie.get_with(b"pirate", &mut recorder).unwrap().unwrap();
|
||||
|
||||
let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect();
|
||||
assert_eq!(nodes, vec![
|
||||
vec![
|
||||
248, 81, 128, 128, 128, 128, 128, 128, 160, 50, 19, 71, 57, 213, 63, 125, 149,
|
||||
92, 119, 88, 96, 80, 126, 59, 11, 160, 142, 98, 229, 237, 200, 231, 224, 79, 118,
|
||||
215, 93, 144, 246, 179, 176, 160, 118, 211, 171, 199, 172, 136, 136, 240, 221, 59,
|
||||
110, 82, 86, 54, 23, 95, 48, 108, 71, 125, 59, 51, 253, 210, 18, 116, 79, 0, 236,
|
||||
102, 142, 48, 128, 128, 128, 128, 128, 128, 128, 128, 128
|
||||
],
|
||||
vec![
|
||||
248, 60, 206, 134, 32, 105, 114, 97, 116, 101, 134, 97, 97, 114, 103, 104, 33,
|
||||
128, 128, 128, 128, 128, 128, 128, 128, 221, 136, 32, 111, 32, 104, 111, 32, 104,
|
||||
111, 147, 97, 110, 100, 32, 97, 32, 98, 111, 116, 116, 108, 101, 32, 111, 102,
|
||||
32, 114, 117, 109, 128, 128, 128, 128, 128, 128, 128
|
||||
]
|
||||
]);
|
||||
|
||||
trie.get_with(b"letter", &mut recorder).unwrap().unwrap();
|
||||
|
||||
let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect();
|
||||
assert_eq!(nodes, vec![
|
||||
vec![
|
||||
248, 81, 128, 128, 128, 128, 128, 128, 160, 50, 19, 71, 57, 213, 63, 125, 149,
|
||||
92, 119, 88, 96, 80, 126, 59, 11, 160, 142, 98, 229, 237, 200, 231, 224, 79, 118,
|
||||
215, 93, 144, 246, 179, 176, 160, 118, 211, 171, 199, 172, 136, 136, 240, 221,
|
||||
59, 110, 82, 86, 54, 23, 95, 48, 108, 71, 125, 59, 51, 253, 210, 18, 116, 79,
|
||||
0, 236, 102, 142, 48, 128, 128, 128, 128, 128, 128, 128, 128, 128
|
||||
],
|
||||
vec![
|
||||
248, 99, 128, 128, 128, 128, 200, 131, 32, 111, 103, 131, 99, 97, 116, 128, 128,
|
||||
128, 206, 134, 32, 111, 116, 100, 111, 103, 134, 104, 111, 116, 99, 97, 116, 206,
|
||||
134, 32, 110, 115, 101, 114, 116, 134, 114, 101, 109, 111, 118, 101, 128, 128,
|
||||
160, 202, 250, 252, 153, 229, 63, 255, 13, 100, 197, 80, 120, 190, 186, 92, 5,
|
||||
255, 135, 245, 205, 180, 213, 161, 8, 47, 107, 13, 105, 218, 1, 9, 5, 128,
|
||||
206, 134, 32, 111, 116, 100, 111, 103, 134, 110, 111, 116, 99, 97, 116, 128, 128
|
||||
],
|
||||
vec![
|
||||
235, 128, 128, 128, 128, 128, 128, 208, 133, 53, 116, 116, 101, 114, 137, 99,
|
||||
111, 110, 102, 117, 115, 105, 111, 110, 202, 132, 53, 110, 99, 104, 132, 116,
|
||||
105, 109, 101, 128, 128, 128, 128, 128, 128, 128, 128, 128
|
||||
]
|
||||
]);
|
||||
}
|
||||
}
|
|
@ -1,100 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::triedb::TrieDB;
|
||||
use super::{Result, DBValue, Trie, TrieItem, TrieIterator, Query};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
///
|
||||
/// Use it as a `Trie` trait object. You can use `raw()` to get the backing `TrieDB` object.
|
||||
pub struct SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDB<'db, H, C>
|
||||
}
|
||||
|
||||
impl<'db, H, C> SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
///
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn new(db: &'db HashDB<H, DBValue>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(SecTrieDB { raw: TrieDB::new(db, root)? })
|
||||
}
|
||||
|
||||
/// Get a reference to the underlying raw `TrieDB` struct.
|
||||
pub fn raw(&self) -> &TrieDB<H, C> {
|
||||
&self.raw
|
||||
}
|
||||
|
||||
/// Get a mutable reference to the underlying raw `TrieDB` struct.
|
||||
pub fn raw_mut(&mut self) -> &mut TrieDB<'db, H, C> {
|
||||
&mut self.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> Trie<H, C> for SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get_with(H::hash(key).as_ref(), query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
TrieDB::iter(&self.raw)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDBMut, SecTrieDB, trie::{Trie, TrieMut}};
|
||||
use ethereum_types::H256;
|
||||
use DBValue;
|
||||
|
||||
#[test]
|
||||
fn trie_to_sectrie() {
|
||||
let mut db = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||
t.insert(&keccak::keccak(&[0x01u8, 0x23]), &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = SecTrieDB::new(&db, &root).unwrap();
|
||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
}
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::{Result, DBValue, TrieMut, TrieDBMut};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object. You can use `raw()` to get the backing `TrieDBMut` object.
|
||||
pub struct SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDBMut<'db, H, C>
|
||||
}
|
||||
|
||||
impl<'db, H, C> SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db mut HashDB<H, DBValue>, root: &'db mut H::Out) -> Self {
|
||||
SecTrieDBMut { raw: TrieDBMut::new(db, root) }
|
||||
}
|
||||
|
||||
/// Create a new trie with the backing database `db` and `root`.
|
||||
///
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn from_existing(db: &'db mut HashDB<H, DBValue>, root: &'db mut H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(SecTrieDBMut { raw: TrieDBMut::from_existing(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB<H, DBValue> { self.raw.db() }
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db_mut(&mut self) -> &mut HashDB<H, DBValue> { self.raw.db_mut() }
|
||||
}
|
||||
|
||||
impl<'db, H, C> TrieMut<H, C> for SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&mut self) -> &H::Out {
|
||||
self.raw.root()
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.raw.is_empty()
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(&H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get(&H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
self.raw.insert(&H::hash(key).as_ref(), value)
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
self.raw.remove(&H::hash(key).as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDB, SecTrieDBMut, trie::{Trie, TrieMut}};
|
||||
use ethereum_types::H256;
|
||||
use DBValue;
|
||||
|
||||
#[test]
|
||||
fn sectrie_to_trie() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = SecTrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
}
|
||||
}
|
|
@ -1,637 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::fmt;
|
||||
use hashdb::*;
|
||||
use nibbleslice::NibbleSlice;
|
||||
use super::node::{Node, OwnedNode};
|
||||
use node_codec::NodeCodec;
|
||||
use super::lookup::Lookup;
|
||||
use super::{Result, DBValue, Trie, TrieItem, TrieError, TrieIterator, Query};
|
||||
use bytes::Bytes;
|
||||
use std::marker::PhantomData;
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher`
|
||||
/// implementation to generate keys and a `NodeCodec` implementation to encode/decode
|
||||
/// the nodes.
|
||||
///
|
||||
/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object.
|
||||
/// Use `get` and `contains` to query values associated with keys in the trie.
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// extern crate patricia_trie as trie;
|
||||
/// extern crate patricia_trie_ethereum as ethtrie;
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
/// extern crate ethereum_types;
|
||||
///
|
||||
/// use trie::*;
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
/// use ethereum_types::H256;
|
||||
/// use ethtrie::{TrieDB, TrieDBMut};
|
||||
///
|
||||
///
|
||||
/// fn main() {
|
||||
/// let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
/// let mut root = H256::new();
|
||||
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
|
||||
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
/// assert!(t.contains(b"foo").unwrap());
|
||||
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar"));
|
||||
/// }
|
||||
/// ```
|
||||
pub struct TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
db: &'db HashDB<H, DBValue>,
|
||||
root: &'db H::Out,
|
||||
/// The number of hashes performed so far in operations on this trie.
|
||||
hash_count: usize,
|
||||
codec_marker: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<'db, H, C> TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and `root`
|
||||
/// Returns an error if `root` does not exist
|
||||
pub fn new(db: &'db HashDB<H, DBValue>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
if !db.contains(root) {
|
||||
Err(Box::new(TrieError::InvalidStateRoot(*root)))
|
||||
} else {
|
||||
Ok(TrieDB {db, root, hash_count: 0, codec_marker: PhantomData})
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&'db self) -> &'db HashDB<H, DBValue> { self.db }
|
||||
|
||||
/// Get the data of the root node.
|
||||
fn root_data(&self) -> Result<DBValue, H::Out, C::Error> {
|
||||
self.db
|
||||
.get(self.root)
|
||||
.ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
|
||||
}
|
||||
|
||||
/// Given some node-describing data `node`, return the actual node RLP.
|
||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||
/// may require a database lookup.
|
||||
fn get_raw_or_lookup(&'db self, node: &[u8]) -> Result<Cow<'db, DBValue>, H::Out, C::Error> {
|
||||
match C::try_decode_hash(node) {
|
||||
Some(key) => {
|
||||
self.db
|
||||
.get(&key)
|
||||
.map(|v| Cow::Owned(v))
|
||||
.ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||
}
|
||||
None => Ok(Cow::Owned(DBValue::from_slice(node)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> Trie<H, C> for TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.root }
|
||||
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
Lookup {
|
||||
db: self.db,
|
||||
query: query,
|
||||
hash: self.root.clone(),
|
||||
marker: PhantomData::<C>,
|
||||
}.look_up(NibbleSlice::new(key))
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item=TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
|
||||
}
|
||||
}
|
||||
|
||||
// This is for pretty debug output only
|
||||
struct TrieAwareDebugNode<'db, 'a, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
trie: &'db TrieDB<'db, H, C>,
|
||||
key: &'a[u8]
|
||||
}
|
||||
|
||||
impl<'db, 'a, H, C> fmt::Debug for TrieAwareDebugNode<'db, 'a, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if let Ok(node) = self.trie.get_raw_or_lookup(self.key) {
|
||||
match C::decode(&node) {
|
||||
Ok(Node::Leaf(slice, value)) => f.debug_struct("Node::Leaf")
|
||||
.field("slice", &slice)
|
||||
.field("value", &value)
|
||||
.finish(),
|
||||
Ok(Node::Extension(ref slice, ref item)) => f.debug_struct("Node::Extension")
|
||||
.field("slice", &slice)
|
||||
.field("item", &TrieAwareDebugNode{trie: self.trie, key: item})
|
||||
.finish(),
|
||||
Ok(Node::Branch(ref nodes, ref value)) => {
|
||||
let nodes: Vec<TrieAwareDebugNode<H, C>> = nodes.into_iter().map(|n| TrieAwareDebugNode{trie: self.trie, key: n} ).collect();
|
||||
f.debug_struct("Node::Branch")
|
||||
.field("nodes", &nodes)
|
||||
.field("value", &value)
|
||||
.finish()
|
||||
},
|
||||
Ok(Node::Empty) => f.debug_struct("Node::Empty").finish(),
|
||||
|
||||
Err(e) => f.debug_struct("BROKEN_NODE")
|
||||
.field("key", &self.key)
|
||||
.field("error", &format!("ERROR decoding node branch Rlp: {}", e))
|
||||
.finish()
|
||||
}
|
||||
} else {
|
||||
f.debug_struct("BROKEN_NODE")
|
||||
.field("key", &self.key)
|
||||
.field("error", &"Not found")
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, H, C> fmt::Debug for TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
|
||||
f.debug_struct("TrieDB")
|
||||
.field("hash_count", &self.hash_count)
|
||||
.field("root", &TrieAwareDebugNode {
|
||||
trie: self,
|
||||
key: &root_rlp
|
||||
})
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
enum Status {
|
||||
Entering,
|
||||
At,
|
||||
AtChild(usize),
|
||||
Exiting,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct Crumb {
|
||||
node: OwnedNode,
|
||||
status: Status,
|
||||
}
|
||||
|
||||
impl Crumb {
|
||||
/// Move on to next status in the node's sequence.
|
||||
fn increment(&mut self) {
|
||||
self.status = match (&self.status, &self.node) {
|
||||
(_, &OwnedNode::Empty) => Status::Exiting,
|
||||
(&Status::Entering, _) => Status::At,
|
||||
(&Status::At, &OwnedNode::Branch(_)) => Status::AtChild(0),
|
||||
(&Status::AtChild(x), &OwnedNode::Branch(_)) if x < 15 => Status::AtChild(x + 1),
|
||||
_ => Status::Exiting,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator for going through all values in the trie.
|
||||
pub struct TrieDBIterator<'a, H: Hasher + 'a, C: NodeCodec<H> + 'a> {
|
||||
db: &'a TrieDB<'a, H, C>,
|
||||
trail: Vec<Crumb>,
|
||||
key_nibbles: Bytes,
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> TrieDBIterator<'a, H, C> {
|
||||
/// Create a new iterator.
|
||||
pub fn new(db: &'a TrieDB<H, C>) -> Result<TrieDBIterator<'a, H, C>, H::Out, C::Error> {
|
||||
let mut r = TrieDBIterator { db, trail: Vec::with_capacity(8), key_nibbles: Vec::with_capacity(64) };
|
||||
db.root_data().and_then(|root| r.descend(&root))?;
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn seek<'key>(&mut self, node_data: &DBValue, mut key: NibbleSlice<'key>) -> Result<(), H::Out, C::Error> {
|
||||
let mut node_data = Cow::Borrowed(node_data);
|
||||
loop {
|
||||
let (data, mid) = {
|
||||
let node = C::decode(&node_data).expect("encoded data read from db; qed");
|
||||
match node {
|
||||
Node::Leaf(slice, _) => {
|
||||
if slice >= key {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::Entering,
|
||||
node: node.clone().into(),
|
||||
});
|
||||
} else {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::Exiting,
|
||||
node: node.clone().into(),
|
||||
});
|
||||
}
|
||||
|
||||
self.key_nibbles.extend(slice.iter());
|
||||
return Ok(())
|
||||
},
|
||||
Node::Extension(ref slice, ref item) => {
|
||||
if key.starts_with(slice) {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::At,
|
||||
node: node.clone().into(),
|
||||
});
|
||||
self.key_nibbles.extend(slice.iter());
|
||||
let data = self.db.get_raw_or_lookup(&*item)?;
|
||||
(data, slice.len())
|
||||
} else {
|
||||
self.descend(&node_data)?;
|
||||
return Ok(())
|
||||
}
|
||||
},
|
||||
Node::Branch(ref nodes, _) => match key.is_empty() {
|
||||
true => {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::Entering,
|
||||
node: node.clone().into(),
|
||||
});
|
||||
return Ok(())
|
||||
},
|
||||
false => {
|
||||
let i = key.at(0);
|
||||
self.trail.push(Crumb {
|
||||
status: Status::AtChild(i as usize),
|
||||
node: node.clone().into(),
|
||||
});
|
||||
self.key_nibbles.push(i);
|
||||
let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?;
|
||||
(child, 1)
|
||||
}
|
||||
},
|
||||
_ => return Ok(()),
|
||||
}
|
||||
};
|
||||
|
||||
node_data = data;
|
||||
key = key.mid(mid);
|
||||
}
|
||||
}
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend(&mut self, d: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
let node_data = &self.db.get_raw_or_lookup(d)?;
|
||||
let node = C::decode(&node_data).expect("encoded node read from db; qed");
|
||||
Ok(self.descend_into_node(node.into()))
|
||||
}
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend_into_node(&mut self, node: OwnedNode) {
|
||||
self.trail.push(Crumb { status: Status::Entering, node });
|
||||
match &self.trail.last().expect("just pushed item; qed").node {
|
||||
&OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => {
|
||||
self.key_nibbles.extend((0..n.len()).map(|i| n.at(i)));
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// The present key.
|
||||
fn key(&self) -> Bytes {
|
||||
// collapse the key_nibbles down to bytes.
|
||||
let nibbles = &self.key_nibbles;
|
||||
let mut i = 1;
|
||||
let mut result = Bytes::with_capacity(nibbles.len() / 2);
|
||||
let len = nibbles.len();
|
||||
while i < len {
|
||||
result.push(nibbles[i - 1] * 16 + nibbles[i]);
|
||||
i += 2;
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> TrieIterator<H, C> for TrieDBIterator<'a, H, C> {
|
||||
/// Position the iterator on the first element with key >= `key`
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
self.trail.clear();
|
||||
self.key_nibbles.clear();
|
||||
let root_rlp = self.db.root_data()?;
|
||||
self.seek(&root_rlp, NibbleSlice::new(key.as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> Iterator for TrieDBIterator<'a, H, C> {
|
||||
type Item = TrieItem<'a, H::Out, C::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
enum IterStep<'b, O, E> {
|
||||
Continue,
|
||||
PopTrail,
|
||||
Descend(Result<Cow<'b, DBValue>, O, E>),
|
||||
}
|
||||
loop {
|
||||
let iter_step = {
|
||||
self.trail.last_mut()?.increment();
|
||||
let b = self.trail.last().expect("trail.last_mut().is_some(); qed");
|
||||
|
||||
match (b.status.clone(), &b.node) {
|
||||
(Status::Exiting, n) => {
|
||||
match *n {
|
||||
OwnedNode::Leaf(ref n, _) | OwnedNode::Extension(ref n, _) => {
|
||||
let l = self.key_nibbles.len();
|
||||
self.key_nibbles.truncate(l - n.len());
|
||||
},
|
||||
OwnedNode::Branch(_) => { self.key_nibbles.pop(); },
|
||||
_ => {}
|
||||
}
|
||||
IterStep::PopTrail
|
||||
},
|
||||
(Status::At, &OwnedNode::Branch(ref branch)) if branch.has_value() => {
|
||||
let value = branch.get_value().expect("already checked `has_value`");
|
||||
return Some(Ok((self.key(), DBValue::from_slice(value))));
|
||||
},
|
||||
(Status::At, &OwnedNode::Leaf(_, ref v)) => {
|
||||
return Some(Ok((self.key(), v.clone())));
|
||||
},
|
||||
(Status::At, &OwnedNode::Extension(_, ref d)) => {
|
||||
IterStep::Descend::<H::Out, C::Error>(self.db.get_raw_or_lookup(&*d))
|
||||
},
|
||||
(Status::At, &OwnedNode::Branch(_)) => IterStep::Continue,
|
||||
(Status::AtChild(i), &OwnedNode::Branch(ref branch)) if !branch[i].is_empty() => {
|
||||
match i {
|
||||
0 => self.key_nibbles.push(0),
|
||||
i => *self.key_nibbles.last_mut()
|
||||
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
|
||||
}
|
||||
IterStep::Descend::<H::Out, C::Error>(self.db.get_raw_or_lookup(&branch[i]))
|
||||
},
|
||||
(Status::AtChild(i), &OwnedNode::Branch(_)) => {
|
||||
if i == 0 {
|
||||
self.key_nibbles.push(0);
|
||||
}
|
||||
IterStep::Continue
|
||||
},
|
||||
_ => panic!() // Should never see Entering or AtChild without a Branch here.
|
||||
}
|
||||
};
|
||||
|
||||
match iter_step {
|
||||
IterStep::PopTrail => {
|
||||
self.trail.pop();
|
||||
},
|
||||
IterStep::Descend::<H::Out, C::Error>(Ok(d)) => {
|
||||
let node = C::decode(&d).expect("encoded data read from db; qed");
|
||||
self.descend_into_node(node.into())
|
||||
},
|
||||
IterStep::Descend::<H::Out, C::Error>(Err(e)) => {
|
||||
return Some(Err(e))
|
||||
}
|
||||
IterStep::Continue => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use DBValue;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use memorydb::MemoryDB;
|
||||
use ethtrie::{TrieDB, TrieDBMut, RlpCodec, trie::{Trie, TrieMut, Lookup}};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn iterator() {
|
||||
let d = vec![DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B")];
|
||||
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for x in &d {
|
||||
t.insert(x, x).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(d.iter().map(|i| i.clone().into_vec()).collect::<Vec<_>>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::<Vec<_>>());
|
||||
assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iterator_seek() {
|
||||
let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ];
|
||||
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for x in &d {
|
||||
t.insert(x, x).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
let mut iter = t.iter().unwrap();
|
||||
assert_eq!(iter.next().unwrap().unwrap(), (b"A".to_vec(), DBValue::from_slice(b"A")));
|
||||
iter.seek(b"!").unwrap();
|
||||
assert_eq!(d, iter.map(|x| x.unwrap().1).collect::<Vec<_>>());
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"A").unwrap();
|
||||
assert_eq!(d, &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"AA").unwrap();
|
||||
assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"A!").unwrap();
|
||||
assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"AB").unwrap();
|
||||
assert_eq!(&d[2..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"AB!").unwrap();
|
||||
assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"B").unwrap();
|
||||
assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
let mut iter = t.iter().unwrap();
|
||||
iter.seek(b"C").unwrap();
|
||||
assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_len() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(b"A", b"ABC").unwrap();
|
||||
t.insert(b"B", b"ABCBA").unwrap();
|
||||
}
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()).unwrap(), Some(3));
|
||||
assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()).unwrap(), Some(5));
|
||||
assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()).unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn debug_output_supports_pretty_print() {
|
||||
let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ];
|
||||
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
let root = {
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for x in &d {
|
||||
t.insert(x, x).unwrap();
|
||||
}
|
||||
t.root().clone()
|
||||
};
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
|
||||
assert_eq!(format!("{:?}", t), "TrieDB { hash_count: 0, root: Node::Extension { slice: 4, item: Node::Branch { nodes: [Node::Empty, Node::Branch { nodes: [Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Branch { nodes: [Node::Empty, Node::Leaf { slice: , value: [65, 65] }, Node::Leaf { slice: , value: [65, 66] }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: None }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: Some([65]) }, Node::Leaf { slice: , value: [66] }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: None } } }");
|
||||
assert_eq!(format!("{:#?}", t),
|
||||
"TrieDB {
|
||||
hash_count: 0,
|
||||
root: Node::Extension {
|
||||
slice: 4,
|
||||
item: Node::Branch {
|
||||
nodes: [
|
||||
Node::Empty,
|
||||
Node::Branch {
|
||||
nodes: [
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Branch {
|
||||
nodes: [
|
||||
Node::Empty,
|
||||
Node::Leaf {
|
||||
slice: ,
|
||||
value: [
|
||||
65,
|
||||
65
|
||||
]
|
||||
},
|
||||
Node::Leaf {
|
||||
slice: ,
|
||||
value: [
|
||||
65,
|
||||
66
|
||||
]
|
||||
},
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty
|
||||
],
|
||||
value: None
|
||||
},
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty
|
||||
],
|
||||
value: Some(
|
||||
[
|
||||
65
|
||||
]
|
||||
)
|
||||
},
|
||||
Node::Leaf {
|
||||
slice: ,
|
||||
value: [
|
||||
66
|
||||
]
|
||||
},
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty,
|
||||
Node::Empty
|
||||
],
|
||||
value: None
|
||||
}
|
||||
}
|
||||
}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lookup_with_corrupt_data_returns_decoder_error() {
|
||||
use rlp;
|
||||
use ethereum_types::H512;
|
||||
use std::marker::PhantomData;
|
||||
use ethtrie::trie::NibbleSlice;
|
||||
|
||||
let mut memdb = MemoryDB::<KeccakHasher, DBValue>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(b"A", b"ABC").unwrap();
|
||||
t.insert(b"B", b"ABCBA").unwrap();
|
||||
}
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
|
||||
// query for an invalid data type to trigger an error
|
||||
let q = rlp::decode::<H512>;
|
||||
let lookup = Lookup::<_, RlpCodec, _>{ db: t.db(), query: q, hash: root, marker: PhantomData };
|
||||
let query_result = lookup.look_up(NibbleSlice::new(b"A"));
|
||||
assert_eq!(query_result.unwrap().unwrap().unwrap_err(), rlp::DecoderError::RlpIsTooShort);
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
name = "keccak-hasher"
|
||||
version = "0.1.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "Keccak-256 implementation of the Hasher trait"
|
||||
repository = "https://github.com/paritytech/parity/"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
ethereum-types = "0.4"
|
||||
tiny-keccak = "1.4.2"
|
||||
hashdb = { version = "0.3.0", path = "../../hashdb" }
|
||||
plain_hasher = { path = "../../plain_hasher" }
|
|
@ -1,41 +0,0 @@
|
|||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Hasher implementation for the Keccak-256 hash
|
||||
//!
|
||||
//! This is deprecated in favor of trie crate.
|
||||
extern crate hashdb;
|
||||
extern crate ethereum_types;
|
||||
extern crate tiny_keccak;
|
||||
extern crate plain_hasher;
|
||||
|
||||
use hashdb::Hasher;
|
||||
use ethereum_types::H256;
|
||||
use tiny_keccak::Keccak;
|
||||
use plain_hasher::PlainHasher;
|
||||
/// Concrete `Hasher` impl for the Keccak-256 hash
|
||||
#[derive(Default, Debug, Clone, PartialEq)]
|
||||
pub struct KeccakHasher;
|
||||
impl Hasher for KeccakHasher {
|
||||
type Out = H256;
|
||||
type StdHasher = PlainHasher;
|
||||
const LENGTH: usize = 32;
|
||||
fn hash(x: &[u8]) -> Self::Out {
|
||||
let mut out = [0;32];
|
||||
Keccak::keccak256(x, &mut out);
|
||||
out.into()
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue