cleanup(rust): Fix new nightly clippy warnings (#7135)

* Fix "comparison is always true" warning

* Add missing Send bound

* cargo clippy --fix --all-features --all-targets

* incorrect implementation of clone on a Copy type

* cargo fmt --all
This commit is contained in:
teor 2023-07-05 17:11:27 +10:00 committed by GitHub
parent f2a2a403a8
commit 147b8fa3a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 89 additions and 90 deletions

View File

@ -82,7 +82,11 @@ impl TryFrom<u32> for Height {
/// Checks that the `height` is within the valid [`Height`] range.
fn try_from(height: u32) -> Result<Self, Self::Error> {
// Check the bounds.
if Height::MIN.0 <= height && height <= Height::MAX.0 {
//
// Clippy warns that `height >= Height::MIN.0` is always true.
assert_eq!(Height::MIN.0, 0);
if height <= Height::MAX.0 {
Ok(Height(height))
} else {
Err("heights must be less than or equal to Height::MAX")

View File

@ -486,7 +486,7 @@ mod tests {
// Compute the AuthDataRoot with a single [0xFF; 32] digest.
// Since ZIP-244 specifies that this value must be used as the auth digest of
// pre-V5 transactions, then the roots must match.
let expect_auth_root = vec![AuthDigest([0xFF; 32])]
let expect_auth_root = [AuthDigest([0xFF; 32])]
.iter()
.copied()
.collect::<AuthDataRoot>();

View File

@ -1,3 +1,5 @@
//! Encrypted parts of Orchard notes.
use std::{fmt, io};
use serde_big_array::BigArray;
@ -17,9 +19,7 @@ impl Copy for EncryptedNote {}
impl Clone for EncryptedNote {
fn clone(&self) -> Self {
let mut bytes = [0; 580];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}
@ -86,9 +86,7 @@ impl Copy for WrappedNoteKey {}
impl Clone for WrappedNoteKey {
fn clone(&self) -> Self {
let mut bytes = [0; 80];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -25,9 +25,7 @@ impl Copy for Bctv14Proof {}
impl Clone for Bctv14Proof {
fn clone(&self) -> Self {
let mut bytes = [0; 296];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -25,9 +25,7 @@ impl Copy for Groth16Proof {}
impl Clone for Groth16Proof {
fn clone(&self) -> Self {
let mut bytes = [0; 192];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -1,3 +1,5 @@
//! Encrypted parts of Sapling notes.
use std::{fmt, io};
use serde_big_array::BigArray;
@ -24,9 +26,7 @@ impl Copy for EncryptedNote {}
impl Clone for EncryptedNote {
fn clone(&self) -> Self {
let mut bytes = [0; 580];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}
@ -73,9 +73,7 @@ impl Copy for WrappedNoteKey {}
impl Clone for WrappedNoteKey {
fn clone(&self) -> Self {
let mut bytes = [0; 80];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -1,3 +1,5 @@
//! Encrypted parts of Sprout notes.
use std::{fmt, io};
use serde::{Deserialize, Serialize};
@ -25,9 +27,7 @@ impl Copy for EncryptedNote {}
impl Clone for EncryptedNote {
fn clone(&self) -> Self {
let mut bytes = [0; 601];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -268,15 +268,15 @@ impl ZcashDeserialize for Option<sapling::ShieldedData<SharedAnchor>> {
// Create shielded spends from deserialized parts
let spends: Vec<_> = spend_prefixes
.into_iter()
.zip(spend_proofs.into_iter())
.zip(spend_sigs.into_iter())
.zip(spend_proofs)
.zip(spend_sigs)
.map(|((prefix, proof), sig)| Spend::<SharedAnchor>::from_v5_parts(prefix, proof, sig))
.collect();
// Create shielded outputs from deserialized parts
let outputs = output_prefixes
.into_iter()
.zip(output_proofs.into_iter())
.zip(output_proofs)
.map(|(prefix, proof)| Output::from_v5_parts(prefix, proof))
.collect();
@ -427,7 +427,7 @@ impl ZcashDeserialize for Option<orchard::ShieldedData> {
// Create the AuthorizedAction from deserialized parts
let authorized_actions: Vec<orchard::AuthorizedAction> = actions
.into_iter()
.zip(sigs.into_iter())
.zip(sigs)
.map(|(action, spend_auth_sig)| {
orchard::AuthorizedAction::from_parts(action, spend_auth_sig)
})

View File

@ -79,7 +79,7 @@ proptest! {
) {
let _init_guard = zebra_test::init();
let collection = vec![value_balance1, value_balance2];
let collection = [value_balance1, value_balance2];
let transparent = value_balance1.transparent + value_balance2.transparent;
let sprout = value_balance1.sprout + value_balance2.sprout;

View File

@ -87,9 +87,7 @@ impl Copy for Solution {}
impl Clone for Solution {
fn clone(&self) -> Self {
let mut bytes = [0; SOLUTION_SIZE];
bytes[..].copy_from_slice(&self.0[..]);
Self(bytes)
*self
}
}

View File

@ -103,7 +103,7 @@ fn checkpoint_list_no_genesis_fail() -> Result<(), BoxError> {
fn checkpoint_list_null_hash_fail() -> Result<(), BoxError> {
let _init_guard = zebra_test::init();
let checkpoint_data = vec![(block::Height(0), block::Hash([0; 32]))];
let checkpoint_data = [(block::Height(0), block::Hash([0; 32]))];
// Make a checkpoint list containing the non-genesis block
let checkpoint_list: BTreeMap<block::Height, block::Hash> =
@ -119,7 +119,7 @@ fn checkpoint_list_null_hash_fail() -> Result<(), BoxError> {
fn checkpoint_list_bad_height_fail() -> Result<(), BoxError> {
let _init_guard = zebra_test::init();
let checkpoint_data = vec![(
let checkpoint_data = [(
block::Height(block::Height::MAX.0 + 1),
block::Hash([1; 32]),
)];
@ -131,7 +131,7 @@ fn checkpoint_list_bad_height_fail() -> Result<(), BoxError> {
"a checkpoint list with an invalid block height (block::Height::MAX + 1) should fail",
);
let checkpoint_data = vec![(block::Height(u32::MAX), block::Hash([1; 32]))];
let checkpoint_data = [(block::Height(u32::MAX), block::Hash([1; 32]))];
// Make a checkpoint list containing the non-genesis block
let checkpoint_list: BTreeMap<block::Height, block::Hash> =

View File

@ -254,7 +254,7 @@ async fn continuous_blockchain(
// - checkpoints start at genesis
// - checkpoints end at the end of the range (there's no point in having extra blocks)
let expected_max_height = block::Height((blockchain_len - 1).try_into().unwrap());
let checkpoint_list = vec![
let checkpoint_list = [
&blockchain[0],
&blockchain[blockchain_len / 3],
&blockchain[blockchain_len / 2],

View File

@ -782,7 +782,9 @@ async fn state_error_converted_correctly() {
"expected matching state and transaction errors"
);
let TransactionError::ValidateContextError(propagated_validate_context_error) = transaction_error else {
let TransactionError::ValidateContextError(propagated_validate_context_error) =
transaction_error
else {
panic!("should be a ValidateContextError variant");
};

View File

@ -228,10 +228,7 @@ impl Config {
// Ignore disk errors because the cache is optional and the method already logs them.
let disk_peers = self.load_peer_cache().await.unwrap_or_default();
dns_peers
.into_iter()
.chain(disk_peers.into_iter())
.collect()
dns_peers.into_iter().chain(disk_peers).collect()
}
/// Concurrently resolves `peers` into zero or more IP addresses, with a

View File

@ -1531,8 +1531,8 @@ where
/// to be disconnected.
fn overload_drop_connection_probability(now: Instant, prev: Option<Instant>) -> f32 {
let Some(prev) = prev else {
return MIN_OVERLOAD_DROP_PROBABILITY;
};
return MIN_OVERLOAD_DROP_PROBABILITY;
};
let protection_fraction_since_last_overload =
(now - prev).as_secs_f32() / OVERLOAD_PROTECTION_INTERVAL.as_secs_f32();

View File

@ -1160,7 +1160,7 @@ pub(crate) async fn register_inventory_status(
let _ = inv_collector
.send(InventoryChange::new_available(*advertised, transient_addr));
}
[advertised @ ..] => {
advertised => {
let advertised = advertised
.iter()
.filter(|advertised| advertised.unmined_tx_id().is_some());

View File

@ -467,12 +467,13 @@ fn version_user_agent_size_limits() {
// Encode the rest of the message onto `bytes` (relay should be optional)
{
let Message::Version(VersionMessage {
user_agent,
start_height,
..
}) = invalid_version_message else {
unreachable!("version_message is a version");
};
user_agent,
start_height,
..
}) = invalid_version_message
else {
unreachable!("version_message is a version");
};
user_agent
.zcash_serialize(&mut writer)
@ -553,7 +554,8 @@ fn reject_command_and_reason_size_limits() {
ccode,
reason,
data,
} = invalid_reject_message else {
} = invalid_reject_message
else {
unreachable!("invalid_reject_message is a reject");
};

View File

@ -521,15 +521,15 @@ where
//
// Optional TODO:
// - add a `MempoolChange` type with an `async changed()` method (like `ChainTip`)
let Some(mempool_txs) =
fetch_mempool_transactions(mempool.clone(), tip_hash)
.await?
// If the mempool and state responses are out of sync:
// - if we are not long polling, omit mempool transactions from the template,
// - if we are long polling, continue to the next iteration of the loop to make fresh state and mempool requests.
.or_else(|| client_long_poll_id.is_none().then(Vec::new)) else {
continue;
};
let Some(mempool_txs) = fetch_mempool_transactions(mempool.clone(), tip_hash)
.await?
// If the mempool and state responses are out of sync:
// - if we are not long polling, omit mempool transactions from the template,
// - if we are long polling, continue to the next iteration of the loop to make fresh state and mempool requests.
.or_else(|| client_long_poll_id.is_none().then(Vec::new))
else {
continue;
};
// - Long poll ID calculation
let server_long_poll_id = LongPollInput::new(

View File

@ -39,8 +39,8 @@ pub use crate::methods::get_block_template_rpcs::types::get_block_template::*;
/// Returns an error if there's a mismatch between the mode and whether `data` is provided.
pub fn check_parameters(parameters: &Option<JsonParameters>) -> Result<()> {
let Some(parameters) = parameters else {
return Ok(())
};
return Ok(());
};
match parameters {
JsonParameters {
@ -267,7 +267,8 @@ where
let mempool::Response::FullTransactions {
transactions,
last_seen_tip_hash,
} = response else {
} = response
else {
unreachable!("unmatched response to a mempool::FullTransactions request")
};

View File

@ -286,10 +286,11 @@ pub async fn test_responses<State, ReadState>(
mock_read_state_request_handler,
);
let get_block_template::Response::TemplateMode(get_block_template) = get_block_template
.expect("unexpected error in getblocktemplate RPC call") else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let get_block_template::Response::TemplateMode(get_block_template) =
get_block_template.expect("unexpected error in getblocktemplate RPC call")
else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let coinbase_tx: Transaction = get_block_template
.coinbase_txn
@ -330,10 +331,11 @@ pub async fn test_responses<State, ReadState>(
mock_read_state_request_handler,
);
let get_block_template::Response::TemplateMode(get_block_template) = get_block_template
.expect("unexpected error in getblocktemplate RPC call") else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let get_block_template::Response::TemplateMode(get_block_template) =
get_block_template.expect("unexpected error in getblocktemplate RPC call")
else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let coinbase_tx: Transaction = get_block_template
.coinbase_txn

View File

@ -457,7 +457,12 @@ async fn rpc_getrawtransaction() {
}
let (response, _) = futures::join!(get_tx_verbose_1_req, make_mempool_req(tx_hash));
let GetRawTransaction::Object { hex, height, confirmations } = response.expect("We should have a GetRawTransaction struct") else {
let GetRawTransaction::Object {
hex,
height,
confirmations,
} = response.expect("We should have a GetRawTransaction struct")
else {
unreachable!("Should return a Raw enum")
};
@ -1291,10 +1296,11 @@ async fn rpc_getblocktemplate_mining_address(use_p2pkh: bool) {
make_mock_read_state_request_handler(),
);
let get_block_template::Response::TemplateMode(get_block_template) = get_block_template
.expect("unexpected error in getblocktemplate RPC call") else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let get_block_template::Response::TemplateMode(get_block_template) =
get_block_template.expect("unexpected error in getblocktemplate RPC call")
else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
assert_eq!(
get_block_template.capabilities,
@ -1456,10 +1462,11 @@ async fn rpc_getblocktemplate_mining_address(use_p2pkh: bool) {
make_mock_read_state_request_handler(),
);
let get_block_template::Response::TemplateMode(get_block_template) = get_block_template
.expect("unexpected error in getblocktemplate RPC call") else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
let get_block_template::Response::TemplateMode(get_block_template) =
get_block_template.expect("unexpected error in getblocktemplate RPC call")
else {
panic!("this getblocktemplate call without parameters should return the `TemplateMode` variant of the response")
};
// mempool transactions should be omitted if the tip hash in the GetChainInfo response from the state
// does not match the `last_seen_tip_hash` in the FullTransactions response from the mempool.

View File

@ -276,8 +276,5 @@ fn apply_tx_id_changes(
) -> BTreeMap<TransactionLocation, transaction::Hash> {
// Correctness: compensate for inconsistent tx IDs finalized blocks across multiple addresses,
// by combining them with overlapping non-finalized block tx IDs.
finalized_tx_ids
.into_iter()
.chain(chain_tx_ids.into_iter())
.collect()
finalized_tx_ids.into_iter().chain(chain_tx_ids).collect()
}

View File

@ -370,7 +370,7 @@ fn apply_utxo_changes(
// to compensate for overlapping finalized and non-finalized blocks.
finalized_utxos
.into_iter()
.chain(created_chain_utxos.into_iter())
.chain(created_chain_utxos)
.filter(|(utxo_location, _output)| !spent_chain_utxos.contains(utxo_location))
.collect()
}

View File

@ -146,6 +146,7 @@ pub struct ResponseSender<Request, Response, Error> {
impl<Request, Response, Assertion, Error> Service<Request>
for MockService<Request, Response, Assertion, Error>
where
Request: Send + 'static,
Response: Send + 'static,
Error: Send + 'static,
{

View File

@ -288,11 +288,7 @@ to create a github token."
let mut num_closed_issues = 0;
while let Some(res) = github_api_requests.join_next().await {
let Ok((
res,
id,
issue_refs,
)) = res else {
let Ok((res, id, issue_refs)) = res else {
println!("warning: failed to join api request thread/task");
continue;
};

View File

@ -303,7 +303,7 @@ impl ZebraCheckpointsTestDirExt for TempDir {
let zebra_checkpoints = self.spawn_child_with_command(zebra_checkpoints_path, args.clone());
let Err(system_path_error) = zebra_checkpoints else {
let Err(system_path_error) = zebra_checkpoints else {
return zebra_checkpoints;
};