diff --git a/zebra-consensus/src/checkpoint/list/tests.rs b/zebra-consensus/src/checkpoint/list/tests.rs index 52b8c4d5d..e3b5b7ce0 100644 --- a/zebra-consensus/src/checkpoint/list/tests.rs +++ b/zebra-consensus/src/checkpoint/list/tests.rs @@ -174,14 +174,13 @@ fn checkpoint_list_duplicate_heights_fail() -> Result<(), BoxError> { // Parse the genesis block let mut checkpoint_data = Vec::new(); - for b in &[&zebra_test::vectors::BLOCK_MAINNET_GENESIS_BYTES[..]] { - let block = Arc::::zcash_deserialize(*b)?; - let hash = block.hash(); - checkpoint_data.push(( - block.coinbase_height().expect("test block has height"), - hash, - )); - } + let block = + Arc::::zcash_deserialize(&zebra_test::vectors::BLOCK_MAINNET_GENESIS_BYTES[..])?; + let hash = block.hash(); + checkpoint_data.push(( + block.coinbase_height().expect("test block has height"), + hash, + )); // Then add some fake entries with duplicate heights checkpoint_data.push((block::Height(1), block::Hash([0xaa; 32]))); @@ -202,14 +201,13 @@ fn checkpoint_list_duplicate_hashes_fail() -> Result<(), BoxError> { // Parse the genesis block let mut checkpoint_data = Vec::new(); - for b in &[&zebra_test::vectors::BLOCK_MAINNET_GENESIS_BYTES[..]] { - let block = Arc::::zcash_deserialize(*b)?; - let hash = block.hash(); - checkpoint_data.push(( - block.coinbase_height().expect("test block has height"), - hash, - )); - } + let block = + Arc::::zcash_deserialize(&zebra_test::vectors::BLOCK_MAINNET_GENESIS_BYTES[..])?; + let hash = block.hash(); + checkpoint_data.push(( + block.coinbase_height().expect("test block has height"), + hash, + )); // Then add some fake entries with duplicate hashes checkpoint_data.push((block::Height(1), block::Hash([0xcc; 32]))); diff --git a/zebra-network/src/protocol/external/codec.rs b/zebra-network/src/protocol/external/codec.rs index 36e0a9718..c06724f58 100644 --- a/zebra-network/src/protocol/external/codec.rs +++ b/zebra-network/src/protocol/external/codec.rs @@ -633,17 +633,23 @@ impl Codec { } fn read_filterload(&self, mut reader: R, body_len: usize) -> Result { - const MAX_FILTERLOAD_LENGTH: usize = 36000; - const FILTERLOAD_REMAINDER_LENGTH: usize = 4 + 4 + 1; + // The maximum length of a filter. + const MAX_FILTERLOAD_FILTER_LENGTH: usize = 36000; - if !(FILTERLOAD_REMAINDER_LENGTH <= body_len - && body_len <= FILTERLOAD_REMAINDER_LENGTH + MAX_FILTERLOAD_LENGTH) - { + // The data length of the fields: + // hash_functions_count + tweak + flags. + const FILTERLOAD_FIELDS_LENGTH: usize = 4 + 4 + 1; + + // The maximum length of a filter message's data. + const MAX_FILTERLOAD_MESSAGE_LENGTH: usize = + MAX_FILTERLOAD_FILTER_LENGTH + FILTERLOAD_FIELDS_LENGTH; + + if !(FILTERLOAD_FIELDS_LENGTH..=MAX_FILTERLOAD_MESSAGE_LENGTH).contains(&body_len) { return Err(Error::Parse("Invalid filterload message body length.")); } - // Memory Denial of Service: we just limited the untrusted parsed length - let filter_length: usize = body_len - FILTERLOAD_REMAINDER_LENGTH; + // Memory Denial of Service: we just checked the untrusted parsed length + let filter_length: usize = body_len - FILTERLOAD_FIELDS_LENGTH; let filter_bytes = zcash_deserialize_bytes_external_count(filter_length, &mut reader)?; Ok(Message::FilterLoad {