Checkpoint commit; tons of disorganized changes for rustc

BTW after all this is done I'm gonna indent the entire codebase...
so `git blame` is gonna be totally broken anyway, hence my
capricious cadence of commits.
This commit is contained in:
Andrew Poelstra 2015-04-05 12:58:49 -05:00
parent 160f2f9ea6
commit 7738722ab5
19 changed files with 627 additions and 586 deletions

View File

@ -23,7 +23,7 @@
//! //!
use std::num::Zero; use std::num::Zero;
use std::marker; use std::{marker, ptr};
use blockdata::block::{Block, BlockHeader}; use blockdata::block::{Block, BlockHeader};
use blockdata::transaction::Transaction; use blockdata::transaction::Transaction;
@ -102,8 +102,8 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for BlockchainNode {
required_difficulty: try!(ConsensusDecodable::consensus_decode(d)), required_difficulty: try!(ConsensusDecodable::consensus_decode(d)),
height: try!(ConsensusDecodable::consensus_decode(d)), height: try!(ConsensusDecodable::consensus_decode(d)),
has_txdata: try!(ConsensusDecodable::consensus_decode(d)), has_txdata: try!(ConsensusDecodable::consensus_decode(d)),
prev: RawPtr::null(), prev: ptr::null(),
next: RawPtr::null() next: ptr::null()
}) })
} }
} }
@ -159,7 +159,7 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for Blockchain {
let prevptr = let prevptr =
match unsafe { (*raw_tree).lookup(&hash, 256) } { match unsafe { (*raw_tree).lookup(&hash, 256) } {
Some(node) => &**node as NodePtr, Some(node) => &**node as NodePtr,
None => RawPtr::null() None => ptr::null()
}; };
node.prev = prevptr; node.prev = prevptr;
} }
@ -191,7 +191,7 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for Blockchain {
} }
// TODO: this should maybe be public, in which case it needs to be tagged // TODO: this should maybe be public, in which case it needs to be tagged
// with a ContravariantLifetime marker tying it to the tree's lifetime. // with a PhantomData marker tying it to the tree's lifetime.
struct LocatorHashIter { struct LocatorHashIter {
index: NodePtr, index: NodePtr,
count: usize, count: usize,
@ -215,7 +215,7 @@ impl Iterator<Sha256dHash> for LocatorHashIter {
self.index = unsafe { (*self.index).prev }; self.index = unsafe { (*self.index).prev };
// If we are not at the genesis, rewind `self.skip` times, or until we are. // If we are not at the genesis, rewind `self.skip` times, or until we are.
if self.index.is_not_null() { if self.index.is_not_null() {
for _ in range(1, self.skip) { for _ in 1..self.skip {
unsafe { unsafe {
if (*self.index).prev.is_null() { if (*self.index).prev.is_null() {
break; break;
@ -241,7 +241,7 @@ pub struct BlockIter<'tree> {
// mutable blockchain methods call .mut_borrow() on the block // mutable blockchain methods call .mut_borrow() on the block
// links, which would blow up if the iterator did a regular // links, which would blow up if the iterator did a regular
// borrow at the same time. // borrow at the same time.
marker: marker::ContravariantLifetime<'tree> marker: marker::PhantomData<&'tree Blockchain>
} }
/// An iterator over blocks in reverse blockheight order. Note that this /// An iterator over blocks in reverse blockheight order. Note that this
@ -253,7 +253,7 @@ pub struct BlockIter<'tree> {
pub struct RevBlockIter<'tree> { pub struct RevBlockIter<'tree> {
index: NodePtr, index: NodePtr,
// See comment in BlockIter for why we need this // See comment in BlockIter for why we need this
marker: marker::ContravariantLifetime<'tree> marker: marker::PhantomData<&'tree Blockchain>
} }
/// An iterator over blocks in reverse blockheight order, which yielding only /// An iterator over blocks in reverse blockheight order, which yielding only
@ -313,7 +313,7 @@ impl<'tree> Iterator<&'tree Block> for RevStaleBlockIter<'tree> {
if next_index.is_not_null() && if next_index.is_not_null() &&
(*next_index).next != self.index && (*next_index).next != self.index &&
(&*next_index).is_on_main_chain(self.chain) { (&*next_index).is_on_main_chain(self.chain) {
self.index = RawPtr::null(); self.index = ptr::null();
} else { } else {
self.index = next_index; self.index = next_index;
} }
@ -348,8 +348,8 @@ impl Blockchain {
block: genesis, block: genesis,
height: 0, height: 0,
has_txdata: true, has_txdata: true,
prev: RawPtr::null(), prev: ptr::null(),
next: RawPtr::null() next: ptr::null()
}); });
let raw_ptr = &*new_node as NodePtr; let raw_ptr = &*new_node as NodePtr;
Blockchain { Blockchain {
@ -449,7 +449,7 @@ impl Blockchain {
let timespan = unsafe { let timespan = unsafe {
// Scan back DIFFCHANGE_INTERVAL blocks // Scan back DIFFCHANGE_INTERVAL blocks
let mut scan = prev; let mut scan = prev;
for _ in range(0, DIFFCHANGE_INTERVAL - 1) { for _ in 0..(DIFFCHANGE_INTERVAL - 1) {
scan = (*scan).prev; scan = (*scan).prev;
} }
// Get clamped timespan between first and last blocks // Get clamped timespan between first and last blocks
@ -498,7 +498,7 @@ impl Blockchain {
height: unsafe { (*prev).height + 1 }, height: unsafe { (*prev).height + 1 },
has_txdata: has_txdata, has_txdata: has_txdata,
prev: prev, prev: prev,
next: RawPtr::null() next: ptr::null()
}); });
unsafe { unsafe {
let prev = prev as *mut BlockchainNode; let prev = prev as *mut BlockchainNode;
@ -568,11 +568,11 @@ impl Blockchain {
pub fn iter<'a>(&'a self, start_hash: Sha256dHash) -> BlockIter<'a> { pub fn iter<'a>(&'a self, start_hash: Sha256dHash) -> BlockIter<'a> {
let start = match self.tree.lookup(&start_hash.into_le(), 256) { let start = match self.tree.lookup(&start_hash.into_le(), 256) {
Some(boxptr) => &**boxptr as NodePtr, Some(boxptr) => &**boxptr as NodePtr,
None => RawPtr::null() None => ptr::null()
}; };
BlockIter { BlockIter {
index: start, index: start,
marker: marker::ContravariantLifetime::<'a> marker: marker::PhantomData
} }
} }
@ -580,11 +580,11 @@ impl Blockchain {
pub fn rev_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevBlockIter<'a> { pub fn rev_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevBlockIter<'a> {
let start = match self.tree.lookup(&start_hash.into_le(), 256) { let start = match self.tree.lookup(&start_hash.into_le(), 256) {
Some(boxptr) => &**boxptr as NodePtr, Some(boxptr) => &**boxptr as NodePtr,
None => RawPtr::null() None => ptr::null()
}; };
RevBlockIter { RevBlockIter {
index: start, index: start,
marker: marker::ContravariantLifetime::<'a> marker: marker::PhantomData
} }
} }
@ -594,12 +594,12 @@ impl Blockchain {
Some(boxptr) => { Some(boxptr) => {
// If we are already on the main chain, we have a dead iterator // If we are already on the main chain, we have a dead iterator
if boxptr.is_on_main_chain(self) { if boxptr.is_on_main_chain(self) {
RawPtr::null() ptr::null()
} else { } else {
&**boxptr as NodePtr &**boxptr as NodePtr
} }
} }
None => RawPtr::null() None => ptr::null()
}; };
RevStaleBlockIter { RevStaleBlockIter {
index: start, index: start,

View File

@ -564,7 +564,7 @@ impl All {
/// Classifies an Opcode into a broad class /// Classifies an Opcode into a broad class
#[inline] #[inline]
pub fn classify(&self) -> OpcodeClass { pub fn classify(&self) -> Class {
// 17 opcodes // 17 opcodes
if *self == All::OP_VERIF || *self == All::OP_VERNOTIF || if *self == All::OP_VERIF || *self == All::OP_VERNOTIF ||
*self == All::OP_CAT || *self == All::OP_SUBSTR || *self == All::OP_CAT || *self == All::OP_SUBSTR ||
@ -574,30 +574,30 @@ impl All {
*self == All::OP_2MUL || *self == All::OP_2DIV || *self == All::OP_2MUL || *self == All::OP_2DIV ||
*self == All::OP_MUL || *self == All::OP_DIV || *self == All::OP_MOD || *self == All::OP_MUL || *self == All::OP_DIV || *self == All::OP_MOD ||
*self == All::OP_LSHIFT || *self == All::OP_RSHIFT { *self == All::OP_LSHIFT || *self == All::OP_RSHIFT {
OpcodeClass::IllegalOp Class::IllegalOp
// 11 opcodes // 11 opcodes
} else if *self == All::OP_NOP || } else if *self == All::OP_NOP ||
(All::OP_NOP1 as u8 <= *self as u8 && (All::OP_NOP1 as u8 <= *self as u8 &&
*self as u8 <= All::OP_NOP10 as u8) { *self as u8 <= All::OP_NOP10 as u8) {
OpcodeClass::NoOp Class::NoOp
// 75 opcodes // 75 opcodes
} else if *self == All::OP_RESERVED || *self == All::OP_VER || *self == All::OP_RETURN || } else if *self == All::OP_RESERVED || *self == All::OP_VER || *self == All::OP_RETURN ||
*self == All::OP_RESERVED1 || *self == All::OP_RESERVED2 || *self == All::OP_RESERVED1 || *self == All::OP_RESERVED2 ||
*self as u8 >= All::OP_RETURN_186 as u8 { *self as u8 >= All::OP_RETURN_186 as u8 {
OpcodeClass::ReturnOp Class::ReturnOp
// 1 opcode // 1 opcode
} else if *self == All::OP_PUSHNUM_NEG1 { } else if *self == All::OP_PUSHNUM_NEG1 {
OpcodeClass::PushNum(-1) Class::PushNum(-1)
// 16 opcodes // 16 opcodes
} else if All::OP_PUSHNUM_1 as u8 <= *self as u8 && } else if All::OP_PUSHNUM_1 as u8 <= *self as u8 &&
*self as u8 <= All::OP_PUSHNUM_16 as u8 { *self as u8 <= All::OP_PUSHNUM_16 as u8 {
OpcodeClass::PushNum(1 + *self as isize - OP_PUSHNUM_1 as isize) Class::PushNum(1 + *self as isize - All::OP_PUSHNUM_1 as isize)
// 76 opcodes // 76 opcodes
} else if *self as u8 <= All::OP_PUSHBYTES_75 as u8 { } else if *self as u8 <= All::OP_PUSHBYTES_75 as u8 {
OpcodeClass::PushBytes(*self as usize) Class::PushBytes(*self as usize)
// 60 opcodes // 60 opcodes
} else { } else {
OpcodeClass::Ordinary(unsafe { transmute(*self) }) Class::Ordinary(unsafe { transmute(*self) })
} }
} }
} }
@ -629,7 +629,7 @@ pub static OP_TRUE: All = OP_PUSHNUM_1;
/// Broad categories of opcodes with similar behavior /// Broad categories of opcodes with similar behavior
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum OpcodeClass { pub enum Class {
/// Pushes the given number onto the stack /// Pushes the given number onto the stack
PushNum(isize), PushNum(isize),
/// Pushes the given number of bytes onto the stack /// Pushes the given number of bytes onto the stack
@ -644,7 +644,7 @@ pub enum OpcodeClass {
Ordinary(Ordinary) Ordinary(Ordinary)
} }
impl json::ToJson for OpcodeClass { impl json::ToJson for Class {
fn to_json(&self) -> json::Json { fn to_json(&self) -> json::Json {
json::String(self.to_string()) json::String(self.to_string())
} }

File diff suppressed because it is too large Load Diff

View File

@ -27,7 +27,7 @@ use std::default::Default;
use serialize::json; use serialize::json;
use util::hash::Sha256dHash; use util::hash::Sha256dHash;
use blockdata::script::{self, Script, ScriptError, ScriptTrace, read_scriptbool}; use blockdata::script::{self, Script, ScriptTrace, read_scriptbool};
use blockdata::utxoset::UtxoSet; use blockdata::utxoset::UtxoSet;
use network::encodable::ConsensusEncodable; use network::encodable::ConsensusEncodable;
use network::serialize::BitcoinHash; use network::serialize::BitcoinHash;
@ -81,9 +81,9 @@ impl TxOut {
if self.script_pubkey.len() == 25 && if self.script_pubkey.len() == 25 &&
self.script_pubkey.slice_to(3) == &[0x76, 0xa9, 0x14] && self.script_pubkey.slice_to(3) == &[0x76, 0xa9, 0x14] &&
self.script_pubkey.slice_from(23) == &[0x88, 0xac] { self.script_pubkey.slice_from(23) == &[0x88, 0xac] {
PayToPubkeyHash(self.script_pubkey.slice(3, 23).to_address(network)) ScriptPubkeyTemplate::PayToPubkeyHash(self.script_pubkey.slice(3, 23).to_address(network))
} else { } else {
Unknown ScriptPubkeyTemplate::Unknown
} }
} }
} }
@ -104,13 +104,13 @@ pub struct Transaction {
/// A transaction error /// A transaction error
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Clone, Debug)]
pub enum TransactionError { pub enum Error {
/// Concatenated script failed in the input half (script error) /// Concatenated script failed in the input half (script error)
InputScriptFailure(ScriptError), InputScriptFailure(script::Error),
/// Concatenated script failed in the output half (script error) /// Concatenated script failed in the output half (script error)
OutputScriptFailure(ScriptError), OutputScriptFailure(script::Error),
/// P2SH serialized script failed (script error) /// P2SH serialized script failed (script error)
P2shScriptFailure(ScriptError), P2shScriptFailure(script::Error),
/// P2SH serialized script ended with false at the top of the stack /// P2SH serialized script ended with false at the top of the stack
P2shScriptReturnedFalse, P2shScriptReturnedFalse,
/// P2SH serialized script ended with nothing in the stack /// P2SH serialized script ended with nothing in the stack
@ -123,7 +123,7 @@ pub enum TransactionError {
InputNotFound(Sha256dHash, u32), InputNotFound(Sha256dHash, u32),
} }
impl json::ToJson for TransactionError { impl json::ToJson for Error {
fn to_json(&self) -> json::Json { fn to_json(&self) -> json::Json {
json::String(self.to_string()) json::String(self.to_string())
} }
@ -137,7 +137,7 @@ pub struct InputTrace {
sig_trace: ScriptTrace, sig_trace: ScriptTrace,
pubkey_trace: Option<ScriptTrace>, pubkey_trace: Option<ScriptTrace>,
p2sh_trace: Option<ScriptTrace>, p2sh_trace: Option<ScriptTrace>,
error: Option<TransactionError> error: Option<Error>
} }
impl_json!(ScriptTrace, script, initial_stack, iterations, error); impl_json!(ScriptTrace, script, initial_stack, iterations, error);
@ -158,7 +158,7 @@ impl TxIn {
pub fn validate(&self, pub fn validate(&self,
utxoset: &UtxoSet, utxoset: &UtxoSet,
txn: &Transaction, txn: &Transaction,
index: usize) -> Result<(), TransactionError> { index: usize) -> Result<(), Error> {
let txo = utxoset.get_utxo(self.prev_hash, self.prev_index); let txo = utxoset.get_utxo(self.prev_hash, self.prev_index);
match txo { match txo {
Some((_, txo)) => { Some((_, txo)) => {
@ -168,7 +168,7 @@ impl TxIn {
let mut stack = Vec::with_capacity(6); let mut stack = Vec::with_capacity(6);
match self.script_sig.evaluate(&mut stack, Some((txn, index)), None) { match self.script_sig.evaluate(&mut stack, Some((txn, index)), None) {
Ok(_) => {} Ok(_) => {}
Err(e) => { return Err(InputScriptFailure(e)); } Err(e) => { return Err(Error::InputScriptFailure(e)); }
} }
if txo.script_pubkey.is_p2sh() && stack.len() > 0 { if txo.script_pubkey.is_p2sh() && stack.len() > 0 {
p2sh_stack = stack.clone(); p2sh_stack = stack.clone();
@ -180,32 +180,32 @@ impl TxIn {
} }
match txo.script_pubkey.evaluate(&mut stack, Some((txn, index)), None) { match txo.script_pubkey.evaluate(&mut stack, Some((txn, index)), None) {
Ok(_) => {} Ok(_) => {}
Err(e) => { return Err(OutputScriptFailure(e)); } Err(e) => { return Err(Error::OutputScriptFailure(e)); }
} }
match stack.pop() { match stack.pop() {
Some(v) => { Some(v) => {
if !read_scriptbool(v.as_slice()) { if !read_scriptbool(v.as_slice()) {
return Err(ScriptReturnedFalse); return Err(Error::ScriptReturnedFalse);
} }
} }
None => { return Err(ScriptReturnedEmptyStack); } None => { return Err(Error::ScriptReturnedEmptyStack); }
} }
if txo.script_pubkey.is_p2sh() { if txo.script_pubkey.is_p2sh() {
match p2sh_script.evaluate(&mut p2sh_stack, Some((txn, index)), None) { match p2sh_script.evaluate(&mut p2sh_stack, Some((txn, index)), None) {
Ok(_) => {} Ok(_) => {}
Err(e) => { return Err(P2shScriptFailure(e)); } Err(e) => { return Err(Error::P2shScriptFailure(e)); }
} }
match p2sh_stack.pop() { match p2sh_stack.pop() {
Some(v) => { Some(v) => {
if !read_scriptbool(v.as_slice()) { if !read_scriptbool(v.as_slice()) {
return Err(P2shScriptReturnedFalse); return Err(Error::P2shScriptReturnedFalse);
} }
} }
None => { return Err(P2shScriptReturnedEmptyStack); } None => { return Err(Error::P2shScriptReturnedEmptyStack); }
} }
} }
} }
None => { return Err(InputNotFound(self.prev_hash, self.prev_index)); } None => { return Err(Error::InputNotFound(self.prev_hash, self.prev_index)); }
} }
Ok(()) Ok(())
} }
@ -213,7 +213,7 @@ impl TxIn {
impl Transaction { impl Transaction {
/// Check a transaction for validity /// Check a transaction for validity
pub fn validate(&self, utxoset: &UtxoSet) -> Result<(), TransactionError> { pub fn validate(&self, utxoset: &UtxoSet) -> Result<(), Error> {
for (n, input) in self.input.iter().enumerate() { for (n, input) in self.input.iter().enumerate() {
try!(input.validate(utxoset, self, n)); try!(input.validate(utxoset, self, n));
} }
@ -249,7 +249,7 @@ impl Transaction {
let mut stack = Vec::with_capacity(6); let mut stack = Vec::with_capacity(6);
trace.sig_trace = input.script_sig.trace(&mut stack, Some((self, n))); trace.sig_trace = input.script_sig.trace(&mut stack, Some((self, n)));
let err = trace.sig_trace.error.as_ref().map(|e| e.clone()); let err = trace.sig_trace.error.as_ref().map(|e| e.clone());
err.map(|e| trace.error = Some(InputScriptFailure(e))); err.map(|e| trace.error = Some(Error::InputScriptFailure(e)));
if txo.script_pubkey.is_p2sh() && stack.len() > 0 { if txo.script_pubkey.is_p2sh() && stack.len() > 0 {
p2sh_stack = stack.clone(); p2sh_stack = stack.clone();
@ -262,32 +262,32 @@ impl Transaction {
if trace.error.is_none() { if trace.error.is_none() {
trace.pubkey_trace = Some(txo.script_pubkey.trace(&mut stack, Some((self, n)))); trace.pubkey_trace = Some(txo.script_pubkey.trace(&mut stack, Some((self, n))));
let err = trace.pubkey_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); let err = trace.pubkey_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone());
err.map(|e| trace.error = Some(OutputScriptFailure(e))); err.map(|e| trace.error = Some(Error::OutputScriptFailure(e)));
match stack.pop() { match stack.pop() {
Some(v) => { Some(v) => {
if !read_scriptbool(v.as_slice()) { if !read_scriptbool(v.as_slice()) {
trace.error = Some(ScriptReturnedFalse); trace.error = Some(Error::ScriptReturnedFalse);
} }
} }
None => { trace.error = Some(ScriptReturnedEmptyStack); } None => { trace.error = Some(Error::ScriptReturnedEmptyStack); }
} }
if trace.error.is_none() && txo.script_pubkey.is_p2sh() { if trace.error.is_none() && txo.script_pubkey.is_p2sh() {
trace.p2sh_trace = Some(p2sh_script.trace(&mut p2sh_stack, Some((self, n)))); trace.p2sh_trace = Some(p2sh_script.trace(&mut p2sh_stack, Some((self, n))));
let err = trace.p2sh_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); let err = trace.p2sh_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone());
err.map(|e| trace.error = Some(P2shScriptFailure(e))); err.map(|e| trace.error = Some(Error::P2shScriptFailure(e)));
match p2sh_stack.pop() { match p2sh_stack.pop() {
Some(v) => { Some(v) => {
if !read_scriptbool(v.as_slice()) { if !read_scriptbool(v.as_slice()) {
trace.error = Some(P2shScriptReturnedFalse); trace.error = Some(Error::P2shScriptReturnedFalse);
} }
} }
None => { trace.error = Some(P2shScriptReturnedEmptyStack); } None => { trace.error = Some(Error::P2shScriptReturnedEmptyStack); }
} }
} }
} }
} }
None => { None => {
trace.error = Some(InputNotFound(input.prev_hash, input.prev_index)); trace.error = Some(Error::InputNotFound(input.prev_hash, input.prev_index));
} }
} }
ret.inputs.push(trace); ret.inputs.push(trace);

View File

@ -26,8 +26,7 @@ use std::mem;
use num_cpus; use num_cpus;
use std::sync::Future; use std::sync::Future;
use blockdata::transaction::{Transaction, TxOut}; use blockdata::transaction::{self, Transaction, TxOut};
use blockdata::transaction::TransactionError::{self, InputNotFound};
use blockdata::constants::genesis_block; use blockdata::constants::genesis_block;
use blockdata::block::Block; use blockdata::block::Block;
use network::constants::Network; use network::constants::Network;
@ -38,24 +37,24 @@ use util::hash::{DumbHasher, Sha256dHash};
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)] #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)]
pub enum ValidationLevel { pub enum ValidationLevel {
/// Blindly update the UTXO set (NOT recommended) /// Blindly update the UTXO set (NOT recommended)
NoValidation, Nothing,
/// Check that the blocks are at least in the right order /// Check that the blocks are at least in the right order
ChainValidation, Chain,
/// Check that any inputs are actually txouts in the set /// Check that any inputs are actually txouts in the set
TxoValidation, Inputs,
/// Execute the scripts and ensure they pass /// Execute the scripts and ensure they pass
ScriptValidation Script
} }
/// An error returned from a UTXO set operation /// An error returned from a UTXO set operation
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Clone, Debug)]
pub enum UtxoSetError { pub enum Error {
/// prevhash of the new block is not the hash of the old block (expected, actual) /// prevhash of the new block is not the hash of the old block (expected, actual)
BadPrevHash(Sha256dHash, Sha256dHash), BadPrevHash(Sha256dHash, Sha256dHash),
/// A TXID was duplicated /// A TXID was duplicated
DuplicatedTxid(Sha256dHash), DuplicatedTxid(Sha256dHash),
/// A tx was invalid (txid, error) /// A tx was invalid (txid, error)
InvalidTx(Sha256dHash, TransactionError), InvalidTx(Sha256dHash, transaction::Error),
} }
struct UtxoNode { struct UtxoNode {
@ -203,11 +202,11 @@ impl UtxoSet {
/// Apply the transactions contained in a block /// Apply the transactions contained in a block
pub fn update(&mut self, block: &Block, blockheight: usize, validation: ValidationLevel) pub fn update(&mut self, block: &Block, blockheight: usize, validation: ValidationLevel)
-> Result<(), UtxoSetError> { -> Result<(), Error> {
// Make sure we are extending the UTXO set in order // Make sure we are extending the UTXO set in order
if validation >= ChainValidation && if validation >= ValidationLevel::Chain &&
self.last_hash != block.header.prev_blockhash { self.last_hash != block.header.prev_blockhash {
return Err(BadPrevHash(self.last_hash, block.header.prev_blockhash)); return Err(Error::BadPrevHash(self.last_hash, block.header.prev_blockhash));
} }
// Set the next hash immediately so that if anything goes wrong, // Set the next hash immediately so that if anything goes wrong,
@ -247,7 +246,7 @@ impl UtxoSet {
} }
// Otherwise fail the block // Otherwise fail the block
self.rewind(block); self.rewind(block);
return Err(DuplicatedTxid(txid)); return Err(Error::DuplicatedTxid(txid));
} }
} }
// Didn't replace anything? Good. // Didn't replace anything? Good.
@ -256,12 +255,12 @@ impl UtxoSet {
} }
// If we are validating scripts, do all that now in parallel // If we are validating scripts, do all that now in parallel
if validation >= ScriptValidation { if validation >= ValidationLevel::Script {
let mut future_vec = Vec::with_capacity(block.txdata.len() - 1); let mut future_vec = Vec::with_capacity(block.txdata.len() - 1);
// skip the genesis since we don't validate this script. (TODO this might // skip the genesis since we don't validate this script. (TODO this might
// be a consensus bug since we don't even check that the opcodes make sense.) // be a consensus bug since we don't even check that the opcodes make sense.)
let n_threads = cmp::min(block.txdata.len() - 1, num_cpus::get()); let n_threads = cmp::min(block.txdata.len() - 1, num_cpus::get());
for j in range(0, n_threads) { for j in 0..n_threads {
let n_elems = block.txdata.len() - 1; let n_elems = block.txdata.len() - 1;
let start = 1 + j * n_elems / n_threads; let start = 1 + j * n_elems / n_threads;
let end = cmp::min(n_elems, 1 + (j + 1) * n_elems / n_threads); let end = cmp::min(n_elems, 1 + (j + 1) * n_elems / n_threads);
@ -273,7 +272,7 @@ impl UtxoSet {
for tx in txes.slice(start, end).iter() { for tx in txes.slice(start, end).iter() {
match tx.validate(unsafe {&*s}) { match tx.validate(unsafe {&*s}) {
Ok(_) => {}, Ok(_) => {},
Err(e) => { return Err(InvalidTx(tx.bitcoin_hash(), e)); } Err(e) => { return Err(Error::InvalidTx(tx.bitcoin_hash(), e)); }
} }
} }
Ok(()) Ok(())
@ -301,10 +300,10 @@ impl UtxoSet {
match taken { match taken {
Some(txo) => { self.spent_txos.get_mut(spent_idx).push(((txid, n as u32), txo)); } Some(txo) => { self.spent_txos.get_mut(spent_idx).push(((txid, n as u32), txo)); }
None => { None => {
if validation >= TxoValidation { if validation >= ValidationLevel::Inputs {
self.rewind(block); self.rewind(block);
return Err(InvalidTx(txid, return Err(Error::InvalidTx(txid,
InputNotFound(input.prev_hash, input.prev_index))); transaction::Error::InputNotFound(input.prev_hash, input.prev_index)));
} }
} }
} }
@ -333,7 +332,7 @@ impl UtxoSet {
let mut skipped_genesis = false; let mut skipped_genesis = false;
for tx in block.txdata.iter() { for tx in block.txdata.iter() {
let txhash = tx.bitcoin_hash(); let txhash = tx.bitcoin_hash();
for n in range(0, tx.output.len()) { for n in 0..tx.output.len() {
// Just bomb out the whole transaction // Just bomb out the whole transaction
// TODO: this does not conform to BIP30: if a duplicate txid occurs, // TODO: this does not conform to BIP30: if a duplicate txid occurs,
// the block will be (rightly) rejected, causing it to be // the block will be (rightly) rejected, causing it to be
@ -424,7 +423,7 @@ mod tests {
use std::io::IoResult; use std::io::IoResult;
use serialize::hex::FromHex; use serialize::hex::FromHex;
use super::{UtxoSet, TxoValidation}; use super::{UtxoSet, ValidationLevel};
use blockdata::block::Block; use blockdata::block::Block;
use network::constants::Network::Bitcoin; use network::constants::Network::Bitcoin;
@ -437,7 +436,7 @@ mod tests {
let new_block: Block = deserialize("010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap()).unwrap(); let new_block: Block = deserialize("010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap()).unwrap();
// Make sure we can't add the block directly, since we are missing the inputs // Make sure we can't add the block directly, since we are missing the inputs
assert!(empty_set.update(&new_block, 1, TxoValidation).is_err()); assert!(empty_set.update(&new_block, 1, ValidationLevel::Inputs).is_err());
assert_eq!(empty_set.n_utxos(), 0); assert_eq!(empty_set.n_utxos(), 0);
// Add the block manually so that we'll have some UTXOs for the rest of the test // Add the block manually so that we'll have some UTXOs for the rest of the test
for tx in new_block.txdata.iter() { for tx in new_block.txdata.iter() {
@ -457,7 +456,7 @@ mod tests {
// Check again that we can't add the block, and that this doesn't mess up the // Check again that we can't add the block, and that this doesn't mess up the
// existing UTXOs // existing UTXOs
assert!(empty_set.update(&new_block, 2, TxoValidation).is_err()); assert!(empty_set.update(&new_block, 2, ValidationLevel::Inputs).is_err());
assert_eq!(empty_set.n_utxos(), 2); assert_eq!(empty_set.n_utxos(), 2);
for tx in new_block.txdata.iter() { for tx in new_block.txdata.iter() {
let hash = tx.bitcoin_hash(); let hash = tx.bitcoin_hash();
@ -497,7 +496,7 @@ mod tests {
for tx in new_block.txdata.iter() { for tx in new_block.txdata.iter() {
let hash = tx.bitcoin_hash(); let hash = tx.bitcoin_hash();
for n in range(0, tx.output.len()) { for n in 0..tx.output.len() {
let n = n as u32; let n = n as u32;
let ret = read_again.take_utxo(hash, n); let ret = read_again.take_utxo(hash, n);
assert_eq!(ret, None); assert_eq!(ret, None);

View File

@ -46,7 +46,6 @@
extern crate alloc; extern crate alloc;
extern crate byteorder; extern crate byteorder;
extern crate collections; extern crate collections;
extern crate core;
extern crate num_cpus; extern crate num_cpus;
extern crate rand; extern crate rand;
extern crate rustc_serialize as serialize; extern crate rustc_serialize as serialize;

View File

@ -107,7 +107,7 @@ macro_rules! user_enum {
impl <D: ::serialize::Decoder<E>, E> ::serialize::Decodable<D, E> for $name { impl <D: ::serialize::Decoder<E>, E> ::serialize::Decodable<D, E> for $name {
fn decode(d: &mut D) -> Result<$name, E> { fn decode(d: &mut D) -> Result<$name, E> {
let s = try!(d.read_str()); let s = try!(d.read_str());
$( if s.as_slice() == $txt { Ok($elem) } )else* $( if s.as_slice() == $txt { Ok($name::$elem) } )else*
else { Err(d.error(format!("unknown `{}`", s).as_slice())) } else { Err(d.error(format!("unknown `{}`", s).as_slice())) }
} }
} }

View File

@ -28,7 +28,7 @@ pub struct Address {
/// Services provided by the peer whose address this is /// Services provided by the peer whose address this is
pub services: u64, pub services: u64,
/// Network byte-order ipv6 address, or ipv4-mapped ipv6 address /// Network byte-order ipv6 address, or ipv4-mapped ipv6 address
pub address: [u8; 16], pub address: [u16; 8],
/// Network port /// Network port
pub port: u16 pub port: u16
} }

View File

@ -40,8 +40,8 @@ pub const USER_AGENT: &'static str = "bitcoin-rust v0.1";
/// at the start of every message /// at the start of every message
pub fn magic(network: Network) -> u32 { pub fn magic(network: Network) -> u32 {
match network { match network {
Bitcoin => 0xD9B4BEF9, Network::Bitcoin => 0xD9B4BEF9,
BitcoinTestnet => 0x0709110B Network::BitcoinTestnet => 0x0709110B
// Note: any new entries here must be added to `deserialize` below // Note: any new entries here must be added to `deserialize` below
} }
} }
@ -58,8 +58,8 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for Network {
fn consensus_decode(d: &mut D) -> Result<Network, E> { fn consensus_decode(d: &mut D) -> Result<Network, E> {
let magic: u32 = try!(ConsensusDecodable::consensus_decode(d)); let magic: u32 = try!(ConsensusDecodable::consensus_decode(d));
match magic { match magic {
0xD9B4BEF9 => Ok(Bitcoin), 0xD9B4BEF9 => Ok(Network::Bitcoin),
0x0709110B => Ok(BitcoinTestnet), 0x0709110B => Ok(Network::BitcoinTestnet),
x => Err(d.error(format!("Unknown network (magic {:x})", x).as_slice())) x => Err(d.error(format!("Unknown network (magic {:x})", x).as_slice()))
} }
} }
@ -67,17 +67,17 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for Network {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::Network::{self, Bitcoin, BitcoinTestnet}; use super::Network;
use network::serialize::{deserialize, serialize}; use network::serialize::{deserialize, serialize};
#[test] #[test]
fn serialize_test() { fn serialize_test() {
assert_eq!(serialize(&Bitcoin).unwrap(), vec![0xf9, 0xbe, 0xb4, 0xd9]); assert_eq!(serialize(&Network::Bitcoin).unwrap(), vec![0xf9, 0xbe, 0xb4, 0xd9]);
assert_eq!(serialize(&BitcoinTestnet).unwrap(), vec![0x0b, 0x11, 0x09, 0x07]); assert_eq!(serialize(&Network::BitcoinTestnet).unwrap(), vec![0x0b, 0x11, 0x09, 0x07]);
assert_eq!(deserialize(vec![0xf9, 0xbe, 0xb4, 0xd9]), Ok(Bitcoin)); assert_eq!(deserialize(vec![0xf9, 0xbe, 0xb4, 0xd9]), Ok(Network::Bitcoin));
assert_eq!(deserialize(vec![0x0b, 0x11, 0x09, 0x07]), Ok(BitcoinTestnet)); assert_eq!(deserialize(vec![0x0b, 0x11, 0x09, 0x07]), Ok(Network::BitcoinTestnet));
let bad: Result<Network, _> = deserialize("fakenet".as_bytes().to_vec()); let bad: Result<Network, _> = deserialize("fakenet".as_bytes().to_vec());
assert!(bad.is_err()); assert!(bad.is_err());

View File

@ -108,27 +108,27 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u8 {
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u16 { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u16 {
#[inline] #[inline]
fn consensus_decode(d: &mut D) -> Result<u16, E> { d.read_u16().map(|n| Int::from_le(n)) } fn consensus_decode(d: &mut D) -> Result<u16, E> { d.read_u16().map(|n| u16::from_le(n)) }
} }
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u32 { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u32 {
#[inline] #[inline]
fn consensus_decode(d: &mut D) -> Result<u32, E> { d.read_u32().map(|n| Int::from_le(n)) } fn consensus_decode(d: &mut D) -> Result<u32, E> { d.read_u32().map(|n| u32::from_le(n)) }
} }
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u64 { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for u64 {
#[inline] #[inline]
fn consensus_decode(d: &mut D) -> Result<u64, E> { d.read_u64().map(|n| Int::from_le(n)) } fn consensus_decode(d: &mut D) -> Result<u64, E> { d.read_u64().map(|n| u64::from_le(n)) }
} }
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for i32 { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for i32 {
#[inline] #[inline]
fn consensus_decode(d: &mut D) -> Result<i32, E> { d.read_i32().map(|n| Int::from_le(n)) } fn consensus_decode(d: &mut D) -> Result<i32, E> { d.read_i32().map(|n| i32::from_le(n)) }
} }
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for i64 { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for i64 {
#[inline] #[inline]
fn consensus_decode(d: &mut D) -> Result<i64, E> { d.read_i64().map(|n| Int::from_le(n)) } fn consensus_decode(d: &mut D) -> Result<i64, E> { d.read_i64().map(|n| i64::from_le(n)) }
} }
impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for VarInt { impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for VarInt {
@ -136,9 +136,9 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for VarInt {
fn consensus_decode(d: &mut D) -> Result<VarInt, E> { fn consensus_decode(d: &mut D) -> Result<VarInt, E> {
let n = try!(d.read_u8()); let n = try!(d.read_u8());
match n { match n {
0xFF => d.read_u64().map(|n| VarInt(Int::from_le(n))), 0xFF => d.read_u64().map(|n| VarInt(u64::from_le(n))),
0xFE => d.read_u32().map(|n| VarInt(Int::from_le(n) as u64)), 0xFE => d.read_u32().map(|n| VarInt(u32::from_le(n) as u64)),
0xFD => d.read_u16().map(|n| VarInt(Int::from_le(n) as u64)), 0xFD => d.read_u16().map(|n| VarInt(u16::from_le(n) as u64)),
n => Ok(VarInt(n as u64)) n => Ok(VarInt(n as u64))
} }
} }
@ -188,7 +188,7 @@ macro_rules! impl_array {
// Set everything to the first decode // Set everything to the first decode
let mut ret = [try!(ConsensusDecodable::consensus_decode(d)); $size]; let mut ret = [try!(ConsensusDecodable::consensus_decode(d)); $size];
// Set the rest // Set the rest
for i in range(1, $size) { ret[i] = try!(ConsensusDecodable::consensus_decode(d)); } for i in 1..$size { ret[i] = try!(ConsensusDecodable::consensus_decode(d)); }
Ok(ret) Ok(ret)
} }
} }
@ -224,7 +224,7 @@ impl<D:SimpleDecoder<E>, E, T:ConsensusDecodable<D, E>> ConsensusDecodable<D, E>
fn consensus_decode(d: &mut D) -> Result<Vec<T>, E> { fn consensus_decode(d: &mut D) -> Result<Vec<T>, E> {
let VarInt(len): VarInt = try!(ConsensusDecodable::consensus_decode(d)); let VarInt(len): VarInt = try!(ConsensusDecodable::consensus_decode(d));
let mut ret = Vec::with_capacity(len as usize); let mut ret = Vec::with_capacity(len as usize);
for _ in range(0, len) { ret.push(try!(ConsensusDecodable::consensus_decode(d))); } for _ in 0..len { ret.push(try!(ConsensusDecodable::consensus_decode(d))); }
Ok(ret) Ok(ret)
} }
} }
@ -302,7 +302,7 @@ impl<D:SimpleDecoder<E>, E> ConsensusDecodable<D, E> for CheckedData {
let len: u32 = try!(ConsensusDecodable::consensus_decode(d)); let len: u32 = try!(ConsensusDecodable::consensus_decode(d));
let checksum: [u8; 4] = try!(ConsensusDecodable::consensus_decode(d)); let checksum: [u8; 4] = try!(ConsensusDecodable::consensus_decode(d));
let mut ret = Vec::with_capacity(len as usize); let mut ret = Vec::with_capacity(len as usize);
for _ in range(0, len) { ret.push(try!(ConsensusDecodable::consensus_decode(d))); } for _ in 0..len { ret.push(try!(ConsensusDecodable::consensus_decode(d))); }
let expected_checksum = sha2_checksum(ret.as_slice()); let expected_checksum = sha2_checksum(ret.as_slice());
if expected_checksum != checksum { if expected_checksum != checksum {
Err(d.error("bad checksum")) Err(d.error("bad checksum"))
@ -380,7 +380,7 @@ impl<D:SimpleDecoder<E>, E, T,
let VarInt(len): VarInt = try!(ConsensusDecodable::consensus_decode(d)); let VarInt(len): VarInt = try!(ConsensusDecodable::consensus_decode(d));
let mut ret = HashMap::with_capacity_and_hasher(len as usize, Default::default()); let mut ret = HashMap::with_capacity_and_hasher(len as usize, Default::default());
for _ in range(0, len) { for _ in 0..len {
ret.insert(try!(ConsensusDecodable::consensus_decode(d)), ret.insert(try!(ConsensusDecodable::consensus_decode(d)),
try!(ConsensusDecodable::consensus_decode(d))); try!(ConsensusDecodable::consensus_decode(d)));
} }

View File

@ -114,19 +114,19 @@ pub enum NetworkMessage {
impl RawNetworkMessage { impl RawNetworkMessage {
fn command(&self) -> String { fn command(&self) -> String {
match self.payload { match self.payload {
Version(_) => "version", NetworkMessage::Version(_) => "version",
Verack => "verack", NetworkMessage::Verack => "verack",
Addr(_) => "addr", NetworkMessage::Addr(_) => "addr",
Inv(_) => "inv", NetworkMessage::Inv(_) => "inv",
GetData(_) => "getdata", NetworkMessage::GetData(_) => "getdata",
NotFound(_) => "notfound", NetworkMessage::NotFound(_) => "notfound",
GetBlocks(_) => "getblocks", NetworkMessage::GetBlocks(_) => "getblocks",
GetHeaders(_) => "getheaders", NetworkMessage::GetHeaders(_) => "getheaders",
Tx(_) => "tx", NetworkMessage::Tx(_) => "tx",
Block(_) => "block", NetworkMessage::Block(_) => "block",
Headers(_) => "headers", NetworkMessage::Headers(_) => "headers",
Ping(_) => "ping", NetworkMessage::Ping(_) => "ping",
Pong(_) => "pong", NetworkMessage::Pong(_) => "pong",
}.to_string() }.to_string()
} }
} }
@ -136,19 +136,19 @@ impl<S:SimpleEncoder<E>, E> ConsensusEncodable<S, E> for RawNetworkMessage {
try!(self.magic.consensus_encode(s)); try!(self.magic.consensus_encode(s));
try!(CommandString(self.command()).consensus_encode(s)); try!(CommandString(self.command()).consensus_encode(s));
try!(CheckedData(match self.payload { try!(CheckedData(match self.payload {
Version(ref dat) => serialize(dat), NetworkMessage::Version(ref dat) => serialize(dat),
Verack => Ok(vec![]), NetworkMessage::Verack => Ok(vec![]),
Addr(ref dat) => serialize(dat), NetworkMessage::Addr(ref dat) => serialize(dat),
Inv(ref dat) => serialize(dat), NetworkMessage::Inv(ref dat) => serialize(dat),
GetData(ref dat) => serialize(dat), NetworkMessage::GetData(ref dat) => serialize(dat),
NotFound(ref dat) => serialize(dat), NetworkMessage::NotFound(ref dat) => serialize(dat),
GetBlocks(ref dat) => serialize(dat), NetworkMessage::GetBlocks(ref dat) => serialize(dat),
GetHeaders(ref dat) => serialize(dat), NetworkMessage::GetHeaders(ref dat) => serialize(dat),
Tx(ref dat) => serialize(dat), NetworkMessage::Tx(ref dat) => serialize(dat),
Block(ref dat) => serialize(dat), NetworkMessage::Block(ref dat) => serialize(dat),
Headers(ref dat) => serialize(dat), NetworkMessage::Headers(ref dat) => serialize(dat),
Ping(ref dat) => serialize(dat), NetworkMessage::Ping(ref dat) => serialize(dat),
Pong(ref dat) => serialize(dat), NetworkMessage::Pong(ref dat) => serialize(dat),
}.unwrap()).consensus_encode(s)); }.unwrap()).consensus_encode(s));
Ok(()) Ok(())
} }
@ -162,19 +162,19 @@ impl<D:SimpleDecoder<io::Error>> ConsensusDecodable<D, io::Error> for RawNetwork
let mut mem_d = RawDecoder::new(Cursor::new(raw_payload)); let mut mem_d = RawDecoder::new(Cursor::new(raw_payload));
let payload = match cmd.as_slice() { let payload = match cmd.as_slice() {
"version" => Version(try!(prepend_err("version", ConsensusDecodable::consensus_decode(&mut mem_d)))), "version" => NetworkMessage::Version(try!(prepend_err("version", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"verack" => Verack, "verack" => NetworkMessage::Verack,
"addr" => Addr(try!(prepend_err("addr", ConsensusDecodable::consensus_decode(&mut mem_d)))), "addr" => NetworkMessage::Addr(try!(prepend_err("addr", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"inv" => Inv(try!(prepend_err("inv", ConsensusDecodable::consensus_decode(&mut mem_d)))), "inv" => NetworkMessage::Inv(try!(prepend_err("inv", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"getdata" => GetData(try!(prepend_err("getdata", ConsensusDecodable::consensus_decode(&mut mem_d)))), "getdata" => NetworkMessage::GetData(try!(prepend_err("getdata", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"notfound" => NotFound(try!(prepend_err("notfound", ConsensusDecodable::consensus_decode(&mut mem_d)))), "notfound" => NetworkMessage::NotFound(try!(prepend_err("notfound", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"getblocks" => GetBlocks(try!(prepend_err("getblocks", ConsensusDecodable::consensus_decode(&mut mem_d)))), "getblocks" => NetworkMessage::GetBlocks(try!(prepend_err("getblocks", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"getheaders" => GetHeaders(try!(prepend_err("getheaders", ConsensusDecodable::consensus_decode(&mut mem_d)))), "getheaders" => NetworkMessage::GetHeaders(try!(prepend_err("getheaders", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"block" => Block(try!(prepend_err("block", ConsensusDecodable::consensus_decode(&mut mem_d)))), "block" => NetworkMessage::Block(try!(prepend_err("block", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"headers" => Headers(try!(prepend_err("headers", ConsensusDecodable::consensus_decode(&mut mem_d)))), "headers" => NetworkMessage::Headers(try!(prepend_err("headers", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"ping" => Ping(try!(prepend_err("ping", ConsensusDecodable::consensus_decode(&mut mem_d)))), "ping" => NetworkMessage::Ping(try!(prepend_err("ping", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"pong" => Ping(try!(prepend_err("pong", ConsensusDecodable::consensus_decode(&mut mem_d)))), "pong" => NetworkMessage::Ping(try!(prepend_err("pong", ConsensusDecodable::consensus_decode(&mut mem_d)))),
"tx" => Tx(try!(prepend_err("tx", ConsensusDecodable::consensus_decode(&mut mem_d)))), "tx" => NetworkMessage::Tx(try!(prepend_err("tx", ConsensusDecodable::consensus_decode(&mut mem_d)))),
cmd => { cmd => {
return Err(io::Error { return Err(io::Error {
kind: io::ErrorKind::OtherError, kind: io::ErrorKind::OtherError,

View File

@ -20,7 +20,7 @@
//! //!
use collections::Vec; use collections::Vec;
use std::io::{self, Cursor}; use std::io::{self, Cursor, Read, Write};
use serialize::hex::ToHex; use serialize::hex::ToHex;
use network::encodable::{ConsensusDecodable, ConsensusEncodable}; use network::encodable::{ConsensusDecodable, ConsensusEncodable};
@ -39,14 +39,14 @@ impl BitcoinHash for Vec<u8> {
} }
/// Encode an object into a vector /// Encode an object into a vector
pub fn serialize<T: ConsensusEncodable<RawEncoder<MemWriter>, io::Error>>(obj: &T) -> io::Result<Vec<u8>> { pub fn serialize<T: ConsensusEncodable<RawEncoder<Cursor>, io::Error>>(obj: &T) -> io::Result<Vec<u8>> {
let mut encoder = RawEncoder::new(MemWriter::new()); let mut encoder = RawEncoder::new(Cursor::new(vec![]));
try!(obj.consensus_encode(&mut encoder)); try!(obj.consensus_encode(&mut encoder));
Ok(encoder.unwrap().unwrap()) Ok(encoder.unwrap().unwrap())
} }
/// Encode an object into a hex-encoded string /// Encode an object into a hex-encoded string
pub fn serialize_hex<T: ConsensusEncodable<RawEncoder<MemWriter>, io::Error>>(obj: &T) -> io::Result<String> { pub fn serialize_hex<T: ConsensusEncodable<RawEncoder<Cursor>, io::Error>>(obj: &T) -> io::Result<String> {
let serial = try!(serialize(obj)); let serial = try!(serialize(obj));
Ok(serial.as_slice().to_hex()) Ok(serial.as_slice().to_hex())
} }
@ -67,7 +67,7 @@ pub struct RawDecoder<R> {
reader: R reader: R
} }
impl<W:Writer> RawEncoder<W> { impl<W:Write> RawEncoder<W> {
/// Constructor /// Constructor
pub fn new(writer: W) -> RawEncoder<W> { pub fn new(writer: W) -> RawEncoder<W> {
RawEncoder { writer: writer } RawEncoder { writer: writer }
@ -79,7 +79,7 @@ impl<W:Writer> RawEncoder<W> {
} }
} }
impl<R:Reader> RawDecoder<R> { impl<R:Read> RawDecoder<R> {
/// Constructor /// Constructor
pub fn new(reader: R) -> RawDecoder<R> { pub fn new(reader: R) -> RawDecoder<R> {
RawDecoder { reader: reader } RawDecoder { reader: reader }
@ -144,7 +144,7 @@ pub trait SimpleDecoder<E> {
// TODO: trait reform: impl SimpleEncoder for every Encoder, ditto for Decoder // TODO: trait reform: impl SimpleEncoder for every Encoder, ditto for Decoder
impl<W:Writer> SimpleEncoder<io::Error> for RawEncoder<W> { impl<W:Write> SimpleEncoder<io::Error> for RawEncoder<W> {
#[inline] #[inline]
fn emit_u64(&mut self, v: u64) -> io::Result<()> { self.writer.write_le_u64(v) } fn emit_u64(&mut self, v: u64) -> io::Result<()> { self.writer.write_le_u64(v) }
#[inline] #[inline]
@ -167,7 +167,7 @@ impl<W:Writer> SimpleEncoder<io::Error> for RawEncoder<W> {
fn emit_bool(&mut self, v: bool) -> io::Result<()> { self.writer.write_i8(if v {1} else {0}) } fn emit_bool(&mut self, v: bool) -> io::Result<()> { self.writer.write_i8(if v {1} else {0}) }
} }
impl<R:Reader> SimpleDecoder<io::Error> for RawDecoder<R> { impl<R:Read> SimpleDecoder<io::Error> for RawDecoder<R> {
#[inline] #[inline]
fn read_u64(&mut self) -> io::Result<u64> { self.reader.read_le_u64() } fn read_u64(&mut self) -> io::Result<u64> { self.reader.read_le_u64() }
#[inline] #[inline]

View File

@ -34,17 +34,11 @@ use network::serialize::{RawEncoder, RawDecoder};
use util::misc::prepend_err; use util::misc::prepend_err;
/// Format an IP address in the 16-byte bitcoin protocol serialization /// Format an IP address in the 16-byte bitcoin protocol serialization
fn ipaddr_to_bitcoin_addr(ipaddr: &ip::IpAddr) -> [u8; 16] { fn ipaddr_to_bitcoin_addr(ipaddr: &ip::IpAddr) -> [u16; 8] {
match *ipaddr { match *ipaddr {
ip::Ipv4Addr(a, b, c, d) => ip::IpAddr::V4(ref addr) => &addr.to_ipv6_mapped(),
[0, 0, 0, 0, 0, 0, 0, 0, ip::IpAddr::V6(ref addr) => addr
0, 0, 0xff, 0xff, a, b, c, d], }.segments()
ip::Ipv6Addr(a, b, c, d, e, f, g, h) =>
[(a / 0x100) as u8, (a % 0x100) as u8, (b / 0x100) as u8, (b % 0x100) as u8,
(c / 0x100) as u8, (c % 0x100) as u8, (d / 0x100) as u8, (d % 0x100) as u8,
(e / 0x100) as u8, (e % 0x100) as u8, (f / 0x100) as u8, (f % 0x100) as u8,
(g / 0x100) as u8, (g % 0x100) as u8, (h / 0x100) as u8, (h % 0x100) as u8 ]
}
} }
/// A network socket along with information about the peer /// A network socket along with information about the peer
@ -72,7 +66,7 @@ impl Socket {
// TODO: we fix services to 0 // TODO: we fix services to 0
/// Construct a new socket /// Construct a new socket
pub fn new(network: constants::Network) -> Socket { pub fn new(network: constants::Network) -> Socket {
let mut rng = task_rng(); let mut rng = thread_rng();
Socket { Socket {
socket: None, socket: None,
buffered_reader: Arc::new(Mutex::new(None)), buffered_reader: Arc::new(Mutex::new(None)),

View File

@ -22,7 +22,7 @@ use util::hash::Sha256dHash;
/// An error that might occur during base58 decoding /// An error that might occur during base58 decoding
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub enum Base58Error { pub enum Error {
/// Invalid character encountered /// Invalid character encountered
BadByte(u8), BadByte(u8),
/// Checksum was not correct (expected, actual) /// Checksum was not correct (expected, actual)
@ -34,7 +34,7 @@ pub enum Base58Error {
/// Checked data was less than 4 bytes /// Checked data was less than 4 bytes
TooShort(usize), TooShort(usize),
/// Any other error /// Any other error
OtherBase58Error(String) Other(String)
} }
static BASE58_CHARS: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; static BASE58_CHARS: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
@ -62,21 +62,21 @@ static BASE58_DIGITS: [Option<u8>; 128] = [
pub trait FromBase58 { pub trait FromBase58 {
/// Constructs an object flrom the byte-encoding (base 256) /// Constructs an object flrom the byte-encoding (base 256)
/// representation of its base58 format /// representation of its base58 format
fn from_base58_layout(data: Vec<u8>) -> Result<Self, Base58Error>; fn from_base58_layout(data: Vec<u8>) -> Result<Self, Error>;
/// Obtain an object from its base58 encoding /// Obtain an object from its base58 encoding
fn from_base58(data: &str) -> Result<Self, Base58Error> { fn from_base58(data: &str) -> Result<Self, Error> {
// 11/15 is just over log_256(58) // 11/15 is just over log_256(58)
let mut scratch = Vec::from_elem(1 + data.len() * 11 / 15, 0u8); let mut scratch = Vec::from_elem(1 + data.len() * 11 / 15, 0u8);
// Build in base 256 // Build in base 256
for d58 in data.bytes() { for d58 in data.bytes() {
// Compute "X = X * 58 + next_digit" in base 256 // Compute "X = X * 58 + next_digit" in base 256
if d58 as usize > BASE58_DIGITS.len() { if d58 as usize > BASE58_DIGITS.len() {
return Err(BadByte(d58)); return Err(Error::BadByte(d58));
} }
let mut carry = match BASE58_DIGITS[d58 as usize] { let mut carry = match BASE58_DIGITS[d58 as usize] {
Some(d58) => d58 as u32, Some(d58) => d58 as u32,
None => { return Err(BadByte(d58)); } None => { return Err(Error::BadByte(d58)); }
}; };
for d256 in scratch.iter_mut().rev() { for d256 in scratch.iter_mut().rev() {
carry += *d256 as u32 * 58; carry += *d256 as u32 * 58;
@ -96,16 +96,16 @@ pub trait FromBase58 {
} }
/// Obtain an object from its base58check encoding /// Obtain an object from its base58check encoding
fn from_base58check(data: &str) -> Result<Self, Base58Error> { fn from_base58check(data: &str) -> Result<Self, Error> {
let mut ret: Vec<u8> = try!(FromBase58::from_base58(data)); let mut ret: Vec<u8> = try!(FromBase58::from_base58(data));
if ret.len() < 4 { if ret.len() < 4 {
return Err(TooShort(ret.len())); return Err(Error::TooShort(ret.len()));
} }
let ck_start = ret.len() - 4; let ck_start = ret.len() - 4;
let expected = Sha256dHash::from_data(ret.slice_to(ck_start)).into_le().low_u32(); let expected = Sha256dHash::from_data(ret.slice_to(ck_start)).into_le().low_u32();
let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]); let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]);
if expected != actual { if expected != actual {
return Err(BadChecksum(expected, actual)); return Err(Error::BadChecksum(expected, actual));
} }
ret.truncate(ck_start); ret.truncate(ck_start);
@ -174,7 +174,7 @@ impl ToBase58 for Vec<u8> {
} }
impl FromBase58 for Vec<u8> { impl FromBase58 for Vec<u8> {
fn from_base58_layout(data: Vec<u8>) -> Result<Vec<u8>, Base58Error> { fn from_base58_layout(data: Vec<u8>) -> Result<Vec<u8>, Error> {
Ok(data) Ok(data)
} }
} }

View File

@ -15,8 +15,8 @@
//! //!
//! Utility functions related to hashing data, including merkleization //! Utility functions related to hashing data, including merkleization
use core::char::from_digit; use std::char::from_digit;
use core::cmp::min; use std::cmp::min;
use std::default::Default; use std::default::Default;
use std::fmt; use std::fmt;
use std::io::Cursor; use std::io::Cursor;
@ -75,7 +75,7 @@ impl hash::Hash<DumbHasherState> for Sha256dHash {
fn hash(&self, state: &mut DumbHasherState) { fn hash(&self, state: &mut DumbHasherState) {
let &Sha256dHash(ref hash) = self; let &Sha256dHash(ref hash) = self;
let &DumbHasherState(ref mut arr) = state; let &DumbHasherState(ref mut arr) = state;
for i in range(0, 8) { for i in 0..8 {
arr[i] += hash[i]; arr[i] += hash[i];
} }
} }
@ -184,7 +184,7 @@ impl Sha256dHash {
pub fn le_hex_string(&self) -> String { pub fn le_hex_string(&self) -> String {
let &Sha256dHash(data) = self; let &Sha256dHash(data) = self;
let mut ret = String::with_capacity(64); let mut ret = String::with_capacity(64);
for i in range(0, 32) { for i in 0..32 {
ret.push_char(from_digit((data[i] / 0x10) as usize, 16).unwrap()); ret.push_char(from_digit((data[i] / 0x10) as usize, 16).unwrap());
ret.push_char(from_digit((data[i] & 0x0f) as usize, 16).unwrap()); ret.push_char(from_digit((data[i] & 0x0f) as usize, 16).unwrap());
} }
@ -195,7 +195,7 @@ impl Sha256dHash {
pub fn be_hex_string(&self) -> String { pub fn be_hex_string(&self) -> String {
let &Sha256dHash(data) = self; let &Sha256dHash(data) = self;
let mut ret = String::with_capacity(64); let mut ret = String::with_capacity(64);
for i in range(0, 32).rev() { for i in (0..32).rev() {
ret.push_char(from_digit((data[i] / 0x10) as usize, 16).unwrap()); ret.push_char(from_digit((data[i] / 0x10) as usize, 16).unwrap());
ret.push_char(from_digit((data[i] & 0x0f) as usize, 16).unwrap()); ret.push_char(from_digit((data[i] & 0x0f) as usize, 16).unwrap());
} }
@ -234,7 +234,7 @@ impl<D: ::serialize::Decoder<E>, E> ::serialize::Decodable<D, E> for Sha256dHash
let raw_str = try!(hex_str.as_slice().from_hex() let raw_str = try!(hex_str.as_slice().from_hex()
.map_err(|_| d.error("non-hexadecimal hash string"))); .map_err(|_| d.error("non-hexadecimal hash string")));
let mut ret = [0u8; 32]; let mut ret = [0u8; 32];
for i in range(0, 32) { for i in 0..32 {
ret[i] = raw_str[31 - i]; ret[i] = raw_str[31 - i];
} }
Ok(Sha256dHash(ret)) Ok(Sha256dHash(ret))
@ -279,7 +279,7 @@ impl<'a, T: BitcoinHash> MerkleRoot for &'a [T] {
} }
// Recursion // Recursion
let mut next = vec![]; let mut next = vec![];
for idx in range(0, (data.len() + 1) / 2) { for idx in 0..((data.len() + 1) / 2) {
let idx1 = 2 * idx; let idx1 = 2 * idx;
let idx2 = min(idx1 + 1, data.len() - 1); let idx2 = min(idx1 + 1, data.len() - 1);
let mut encoder = RawEncoder::new(Cursor::new(vec![])); let mut encoder = RawEncoder::new(Cursor::new(vec![]));

View File

@ -88,7 +88,7 @@ pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
while i <= top { while i <= top {
if haystack.slice(i, i + needle.len()) == needle { if haystack.slice(i, i + needle.len()) == needle {
let v = haystack.as_mut_slice(); let v = haystack.as_mut_slice();
for j in range(i, top) { for j in i..top {
v.swap(j + needle.len(), j); v.swap(j + needle.len(), j);
} }
n_deleted += 1; n_deleted += 1;
@ -97,11 +97,11 @@ pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
top -= needle.len(); top -= needle.len();
if overflow { break; } if overflow { break; }
} else { } else {
i += match Opcode::from_u8((*haystack)[i]).classify() { i += match opcodes::All::from_u8((*haystack)[i]).classify() {
opcodes::PushBytes(n) => n + 1, opcodes::Class::PushBytes(n) => n + 1,
opcodes::Ordinary(opcodes::OP_PUSHDATA1) => 2, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA1) => 2,
opcodes::Ordinary(opcodes::OP_PUSHDATA2) => 3, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA2) => 3,
opcodes::Ordinary(opcodes::OP_PUSHDATA4) => 5, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA4) => 5,
_ => 1 _ => 1
}; };
} }

View File

@ -20,10 +20,10 @@
//! strings; a Patricia tree uses bitstrings. //! strings; a Patricia tree uses bitstrings.
//! //!
use core::fmt::Debug; use std::fmt::Debug;
use core::cmp;
use std::marker; use std::marker;
use std::num::{Zero, One}; use std::num::{Zero, One};
use std::{cmp, ops, ptr};
use network::encodable::{ConsensusDecodable, ConsensusEncodable}; use network::encodable::{ConsensusDecodable, ConsensusEncodable};
use network::serialize::{SimpleDecoder, SimpleEncoder}; use network::serialize::{SimpleDecoder, SimpleEncoder};
@ -38,7 +38,7 @@ pub struct PatriciaTree<K, V> {
skip_len: u8 skip_len: u8
} }
impl<K:BitArray+Eq+Zero+One+BitXor<K,K>+Shl<usize,K>+Shr<usize,K>, V> PatriciaTree<K, V> { impl<K:BitArray+cmp::Eq+Zero+One+ops::BitXor<K,K>+ops::Shl<usize,K>+ops::Shr<usize,K>, V> PatriciaTree<K, V> {
/// Constructs a new Patricia tree /// Constructs a new Patricia tree
pub fn new() -> PatriciaTree<K, V> { pub fn new() -> PatriciaTree<K, V> {
PatriciaTree { PatriciaTree {
@ -214,7 +214,7 @@ impl<K:BitArray+Eq+Zero+One+BitXor<K,K>+Shl<usize,K>+Shr<usize,K>, V> PatriciaTr
pub fn delete(&mut self, key: &K, key_len: usize) -> Option<V> { pub fn delete(&mut self, key: &K, key_len: usize) -> Option<V> {
/// Return value is (deletable, actual return value), where `deletable` is true /// Return value is (deletable, actual return value), where `deletable` is true
/// is true when the entire node can be deleted (i.e. it has no children) /// is true when the entire node can be deleted (i.e. it has no children)
fn recurse<K:BitArray+Eq+Zero+One+Add<K,K>+Shr<usize,K>+Shl<usize,K>, V>(tree: &mut PatriciaTree<K, V>, key: &K, key_len: usize) -> (bool, Option<V>) { fn recurse<K:BitArray+cmp::Eq+Zero+One+ops::Add<K,K>+ops::Shr<usize,K>+ops::Shl<usize,K>, V>(tree: &mut PatriciaTree<K, V>, key: &K, key_len: usize) -> (bool, Option<V>) {
// If the search key is shorter than the node prefix, there is no // If the search key is shorter than the node prefix, there is no
// way we can match, so fail. // way we can match, so fail.
if key_len < tree.skip_len as usize { if key_len < tree.skip_len as usize {
@ -346,7 +346,7 @@ impl<K:BitArray+Eq+Zero+One+BitXor<K,K>+Shl<usize,K>+Shr<usize,K>, V> PatriciaTr
node: self as *mut _, node: self as *mut _,
parents: vec![], parents: vec![],
started: false, started: false,
marker: marker::ContravariantLifetime::<'a> marker: marker::PhantomData
} }
} }
} }
@ -355,14 +355,14 @@ impl<K:BitArray, V:Debug> PatriciaTree<K, V> {
/// Print the entire tree /// Print the entire tree
pub fn print<'a>(&'a self) { pub fn print<'a>(&'a self) {
fn recurse<'a, K:BitArray, V:Debug>(tree: &'a PatriciaTree<K, V>, depth: usize) { fn recurse<'a, K:BitArray, V:Debug>(tree: &'a PatriciaTree<K, V>, depth: usize) {
for i in range(0, tree.skip_len as usize) { for i in 0..tree.skip_len as usize {
print!("{:}", if tree.skip_prefix.bit(i) { 1 } else { 0 }); print!("{:}", if tree.skip_prefix.bit(i) { 1 } else { 0 });
} }
println!(": {:}", tree.data); println!(": {:}", tree.data);
// left gets no indentation // left gets no indentation
match tree.child_l { match tree.child_l {
Some(ref t) => { Some(ref t) => {
for _ in range(0, depth + tree.skip_len as usize) { for _ in 0..(depth + tree.skip_len as usize) {
print!("-"); print!("-");
} }
print!("0"); print!("0");
@ -373,7 +373,7 @@ impl<K:BitArray, V:Debug> PatriciaTree<K, V> {
// right one gets indentation // right one gets indentation
match tree.child_r { match tree.child_r {
Some(ref t) => { Some(ref t) => {
for _ in range(0, depth + tree.skip_len as usize) { for _ in 0..(depth + tree.skip_len as usize) {
print!("_"); print!("_");
} }
print!("1"); print!("1");
@ -422,7 +422,7 @@ pub struct MutItems<'tree, K, V> {
started: bool, started: bool,
node: *mut PatriciaTree<K, V>, node: *mut PatriciaTree<K, V>,
parents: Vec<*mut PatriciaTree<K, V>>, parents: Vec<*mut PatriciaTree<K, V>>,
marker: marker::ContravariantLifetime<'tree> marker: marker::PhantomData<&'tree PatriciaTree<K, V>>
} }
impl<'a, K, V> Iterator<&'a V> for Items<'a, K, V> { impl<'a, K, V> Iterator<&'a V> for Items<'a, K, V> {
@ -474,7 +474,7 @@ impl<'a, K, V> Iterator<&'a mut V> for MutItems<'a, K, V> {
fn borrow_opt<'a, K, V>(opt_ptr: &'a Option<Box<PatriciaTree<K, V>>>) -> *mut PatriciaTree<K, V> { fn borrow_opt<'a, K, V>(opt_ptr: &'a Option<Box<PatriciaTree<K, V>>>) -> *mut PatriciaTree<K, V> {
match *opt_ptr { match *opt_ptr {
Some(ref data) => &**data as *const _ as *mut _, Some(ref data) => &**data as *const _ as *mut _,
None => RawPtr::null() None => ptr::null()
} }
} }
@ -504,7 +504,7 @@ impl<'a, K, V> Iterator<&'a mut V> for MutItems<'a, K, V> {
self.node = child_r; self.node = child_r;
break; break;
} }
self.node = self.parents.pop().unwrap_or(RawPtr::null()); self.node = self.parents.pop().unwrap_or(ptr::null());
} }
} }
// Stop if we've found data. // Stop if we've found data.
@ -552,7 +552,7 @@ mod tests {
fn patricia_insert_lookup_delete_test() { fn patricia_insert_lookup_delete_test() {
let mut tree = PatriciaTree::new(); let mut tree = PatriciaTree::new();
let mut hashes = vec![]; let mut hashes = vec![];
for i in range(0u32, 5000) { for i in 0u32..5000 {
let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128();
tree.insert(&hash, 250, i); tree.insert(&hash, 250, i);
hashes.push(hash); hashes.push(hash);
@ -593,21 +593,21 @@ mod tests {
let mut tree = PatriciaTree::new(); let mut tree = PatriciaTree::new();
let mut hashes = vec![]; let mut hashes = vec![];
// Start by inserting a bunch of chunder // Start by inserting a bunch of chunder
for i in range(1u32, 500) { for i in 1u32..500 {
let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128();
tree.insert(&hash, 128, i * 1000); tree.insert(&hash, 128, i * 1000);
hashes.push(hash); hashes.push(hash);
} }
// Do the actual test -- note that we also test insertion and deletion // Do the actual test -- note that we also test insertion and deletion
// at the root here. // at the root here.
for i in range(0u32, 10) { for i in 0u32..10 {
tree.insert(&Zero::zero(), i as usize, i); tree.insert(&Zero::zero(), i as usize, i);
} }
for i in range(0u32, 10) { for i in 0u32..10 {
let m = tree.lookup(&Zero::zero(), i as usize); let m = tree.lookup(&Zero::zero(), i as usize);
assert_eq!(m, Some(&i)); assert_eq!(m, Some(&i));
} }
for i in range(0u32, 10) { for i in 0u32..10 {
let m = tree.delete(&Zero::zero(), i as usize); let m = tree.delete(&Zero::zero(), i as usize);
assert_eq!(m, Some(i)); assert_eq!(m, Some(i));
} }
@ -625,7 +625,7 @@ mod tests {
let mut tree = PatriciaTree::new(); let mut tree = PatriciaTree::new();
let mut data = Vec::from_elem(n_elems, None); let mut data = Vec::from_elem(n_elems, None);
// Start by inserting a bunch of stuff // Start by inserting a bunch of stuff
for i in range(0, n_elems) { for i in 0..n_elems {
let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128();
tree.insert(&hash, 128, i); tree.insert(&hash, 128, i);
*data.get_mut(i) = Some(()); *data.get_mut(i) = Some(());
@ -647,7 +647,7 @@ mod tests {
let mut tree = PatriciaTree::new(); let mut tree = PatriciaTree::new();
let mut data = Vec::from_elem(n_elems, None); let mut data = Vec::from_elem(n_elems, None);
// Start by inserting a bunch of stuff // Start by inserting a bunch of stuff
for i in range(0, n_elems) { for i in 0..n_elems {
let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128();
tree.insert(&hash, 128, i); tree.insert(&hash, 128, i);
*data.get_mut(i) = Some(()); *data.get_mut(i) = Some(());
@ -673,7 +673,7 @@ mod tests {
// Build a tree // Build a tree
let mut tree = PatriciaTree::new(); let mut tree = PatriciaTree::new();
let mut hashes = vec![]; let mut hashes = vec![];
for i in range(0u32, 5000) { for i in 0u32..5000 {
let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128();
tree.insert(&hash, 250, i); tree.insert(&hash, 250, i);
hashes.push(hash); hashes.push(hash);

View File

@ -24,8 +24,7 @@ use blockdata::script::Script;
use blockdata::opcodes; use blockdata::opcodes;
use network::constants::Network::{self, Bitcoin, BitcoinTestnet}; use network::constants::Network::{self, Bitcoin, BitcoinTestnet};
use util::hash::Ripemd160Hash; use util::hash::Ripemd160Hash;
use util::base58::Base58Error::{self, InvalidLength, InvalidVersion}; use util::base58::{self, FromBase58, ToBase58};
use util::base58::{FromBase58, ToBase58};
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
/// A Bitcoin address /// A Bitcoin address
@ -100,16 +99,16 @@ impl ToBase58 for Address {
} }
impl FromBase58 for Address { impl FromBase58 for Address {
fn from_base58_layout(data: Vec<u8>) -> Result<Address, Base58Error> { fn from_base58_layout(data: Vec<u8>) -> Result<Address, base58::Error> {
if data.len() != 21 { if data.len() != 21 {
return Err(InvalidLength(data.len())); return Err(base58::Error::InvalidLength(data.len()));
} }
Ok(Address { Ok(Address {
network: match data[0] { network: match data[0] {
0 => Bitcoin, 0 => Bitcoin,
111 => BitcoinTestnet, 111 => BitcoinTestnet,
x => { return Err(InvalidVersion(vec![x])); } x => { return Err(base58::Error::InvalidVersion(vec![x])); }
}, },
hash: Ripemd160Hash::from_slice(data.slice_from(1)) hash: Ripemd160Hash::from_slice(data.slice_from(1))
}) })

View File

@ -30,8 +30,7 @@ use secp256k1::key::{PublicKey, SecretKey};
use secp256k1; use secp256k1;
use network::constants::Network::{self, Bitcoin, BitcoinTestnet}; use network::constants::Network::{self, Bitcoin, BitcoinTestnet};
use util::base58::Base58Error::{self, InvalidLength, InvalidVersion, use util::base58;
OtherBase58Error};
use util::base58::{FromBase58, ToBase58}; use util::base58::{FromBase58, ToBase58};
/// A chain code /// A chain code
@ -310,9 +309,9 @@ impl ToBase58 for ExtendedPrivKey {
} }
impl FromBase58 for ExtendedPrivKey { impl FromBase58 for ExtendedPrivKey {
fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPrivKey, Base58Error> { fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPrivKey, base58::Error> {
if data.len() != 78 { if data.len() != 78 {
return Err(InvalidLength(data.len())); return Err(base58::Error::InvalidLength(data.len()));
} }
let cn_int = BigEndian::read_u32(&data[9..13]); let cn_int = BigEndian::read_u32(&data[9..13]);
@ -323,7 +322,7 @@ impl FromBase58 for ExtendedPrivKey {
network: match data.slice_to(4) { network: match data.slice_to(4) {
[0x04, 0x88, 0xAD, 0xE4] => Bitcoin, [0x04, 0x88, 0xAD, 0xE4] => Bitcoin,
[0x04, 0x35, 0x83, 0x94] => BitcoinTestnet, [0x04, 0x35, 0x83, 0x94] => BitcoinTestnet,
_ => { return Err(InvalidVersion(data.slice_to(4).to_vec())); } _ => { return Err(base58::Error::InvalidVersion(data.slice_to(4).to_vec())); }
}, },
depth: data[4], depth: data[4],
parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)), parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)),
@ -331,7 +330,7 @@ impl FromBase58 for ExtendedPrivKey {
chain_code: ChainCode::from_slice(data.slice(13, 45)), chain_code: ChainCode::from_slice(data.slice(13, 45)),
secret_key: try!(SecretKey::from_slice( secret_key: try!(SecretKey::from_slice(
data.slice(46, 78)).map_err(|e| data.slice(46, 78)).map_err(|e|
OtherBase58Error(e.to_string()))) base58::Error::Other(e.to_string())))
}) })
} }
} }
@ -361,9 +360,9 @@ impl ToBase58 for ExtendedPubKey {
} }
impl FromBase58 for ExtendedPubKey { impl FromBase58 for ExtendedPubKey {
fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPubKey, Base58Error> { fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPubKey, base58::Error> {
if data.len() != 78 { if data.len() != 78 {
return Err(InvalidLength(data.len())); return Err(base58::Error::InvalidLength(data.len()));
} }
let cn_int = BigEndian::read_u32(&data[9..13]); let cn_int = BigEndian::read_u32(&data[9..13]);
@ -374,7 +373,7 @@ impl FromBase58 for ExtendedPubKey {
network: match data.slice_to(4) { network: match data.slice_to(4) {
[0x04, 0x88, 0xB2, 0x1E] => Bitcoin, [0x04, 0x88, 0xB2, 0x1E] => Bitcoin,
[0x04, 0x35, 0x87, 0xCF] => BitcoinTestnet, [0x04, 0x35, 0x87, 0xCF] => BitcoinTestnet,
_ => { return Err(InvalidVersion(data.slice_to(4).to_vec())); } _ => { return Err(base58::Error::InvalidVersion(data.slice_to(4).to_vec())); }
}, },
depth: data[4], depth: data[4],
parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)), parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)),
@ -382,7 +381,7 @@ impl FromBase58 for ExtendedPubKey {
chain_code: ChainCode::from_slice(data.slice(13, 45)), chain_code: ChainCode::from_slice(data.slice(13, 45)),
public_key: try!(PublicKey::from_slice( public_key: try!(PublicKey::from_slice(
data.slice(45, 78)).map_err(|e| data.slice(45, 78)).map_err(|e|
OtherBase58Error(e.to_string()))) base58::Error::Other(e.to_string())))
}) })
} }
} }