diff --git a/core/src/archiver.rs b/core/src/archiver.rs index 246b23546..9bd3cb4db 100644 --- a/core/src/archiver.rs +++ b/core/src/archiver.rs @@ -4,6 +4,7 @@ use crate::{ cluster_info::{ClusterInfo, Node, VALIDATOR_PORT_RANGE}, contact_info::ContactInfo, gossip_service::GossipService, + packet::{limited_deserialize, PACKET_DATA_SIZE}, repair_service, repair_service::{RepairService, RepairSlotRange, RepairStrategy}, result::{Error, Result}, @@ -14,7 +15,6 @@ use crate::{ streamer::{receiver, responder, PacketReceiver}, window_service::WindowService, }; -use bincode::deserialize; use crossbeam_channel::unbounded; use rand::{thread_rng, Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -161,7 +161,7 @@ fn create_request_processor( if let Ok(packets) = packets { for packet in &packets.packets { let req: result::Result> = - deserialize(&packet.data[..packet.meta.size]); + limited_deserialize(&packet.data[..packet.meta.size]); match req { Ok(ArchiverRequest::GetSlotHeight(from)) => { if let Ok(blob) = to_shared_blob(slot, from) { @@ -933,7 +933,10 @@ impl Archiver { socket.send_to(&serialized_req, to).unwrap(); let mut buf = [0; 1024]; if let Ok((size, _addr)) = socket.recv_from(&mut buf) { - return deserialize(&buf[..size]).unwrap(); + return bincode::config() + .limit(PACKET_DATA_SIZE as u64) + .deserialize(&buf[..size]) + .unwrap(); } sleep(Duration::from_millis(500)); } diff --git a/core/src/banking_stage.rs b/core/src/banking_stage.rs index 917838576..be9b15488 100644 --- a/core/src/banking_stage.rs +++ b/core/src/banking_stage.rs @@ -3,13 +3,12 @@ //! can do its processing in parallel with signature verification on the GPU. use crate::{ cluster_info::ClusterInfo, - packet::{Packet, Packets, PACKETS_PER_BATCH}, + packet::{limited_deserialize, Packet, Packets, PACKETS_PER_BATCH}, poh_recorder::{PohRecorder, PohRecorderError, WorkingBankEntry}, poh_service::PohService, result::{Error, Result}, service::Service, }; -use bincode::deserialize; use crossbeam_channel::{Receiver as CrossbeamReceiver, RecvTimeoutError}; use itertools::Itertools; use solana_ledger::{ @@ -19,11 +18,10 @@ use solana_measure::measure::Measure; use solana_metrics::{inc_new_counter_debug, inc_new_counter_info, inc_new_counter_warn}; use solana_perf::perf_libs; use solana_runtime::{accounts_db::ErrorCounters, bank::Bank, transaction_batch::TransactionBatch}; -use solana_sdk::clock::MAX_TRANSACTION_FORWARDING_DELAY_GPU; use solana_sdk::{ clock::{ Slot, DEFAULT_TICKS_PER_SECOND, DEFAULT_TICKS_PER_SLOT, MAX_PROCESSING_AGE, - MAX_TRANSACTION_FORWARDING_DELAY, + MAX_TRANSACTION_FORWARDING_DELAY, MAX_TRANSACTION_FORWARDING_DELAY_GPU, }, poh_config::PohConfig, pubkey::Pubkey, @@ -421,7 +419,7 @@ impl BankingStage { fn deserialize_transactions(p: &Packets) -> Vec> { p.packets .iter() - .map(|x| deserialize(&x.data[0..x.meta.size]).ok()) + .map(|x| limited_deserialize(&x.data[0..x.meta.size]).ok()) .collect() } diff --git a/core/src/blob.rs b/core/src/blob.rs index 1eec8ea02..68068f9d9 100644 --- a/core/src/blob.rs +++ b/core/src/blob.rs @@ -404,6 +404,13 @@ pub fn index_blobs( } } +pub fn limited_deserialize(data: &[u8]) -> bincode::Result +where + T: serde::de::DeserializeOwned, +{ + bincode::config().limit(BLOB_SIZE as u64).deserialize(data) +} + #[cfg(test)] mod tests { use super::*; diff --git a/core/src/cluster_info.rs b/core/src/cluster_info.rs index 8f862a31b..9b41bde2f 100644 --- a/core/src/cluster_info.rs +++ b/core/src/cluster_info.rs @@ -13,20 +13,20 @@ //! //! Bank needs to provide an interface for us to query the stake weight use crate::{ - blob::{to_shared_blob, Blob, SharedBlob}, + blob::{limited_deserialize, to_shared_blob, Blob, SharedBlob}, contact_info::ContactInfo, crds_gossip::CrdsGossip, crds_gossip_error::CrdsGossipError, crds_gossip_pull::{CrdsFilter, CRDS_GOSSIP_PULL_CRDS_TIMEOUT_MS}, crds_value::{self, CrdsData, CrdsValue, CrdsValueLabel, EpochSlots, Vote}, - packet::Packet, + packet::{Packet, PACKET_DATA_SIZE}, repair_service::RepairType, result::{Error, Result}, sendmmsg::{multicast, send_mmsg}, streamer::{BlobReceiver, BlobSender}, weighted_shuffle::{weighted_best, weighted_shuffle}, }; -use bincode::{deserialize, serialize, serialized_size}; +use bincode::{serialize, serialized_size}; use core::cmp; use itertools::Itertools; use rand::{thread_rng, Rng}; @@ -38,7 +38,6 @@ use solana_netutil::{ }; use solana_sdk::{ clock::Slot, - packet::PACKET_DATA_SIZE, pubkey::Pubkey, signature::{Keypair, KeypairUtil, Signable, Signature}, timing::{duration_as_ms, timestamp}, @@ -1175,7 +1174,7 @@ impl ClusterInfo { blobs.iter().for_each(|blob| { let blob = blob.read().unwrap(); let from_addr = blob.meta.addr(); - deserialize(&blob.data[..blob.meta.size]) + limited_deserialize(&blob.data[..blob.meta.size]) .into_iter() .for_each(|request| match request { Protocol::PullRequest(filter, caller) => { diff --git a/core/src/packet.rs b/core/src/packet.rs index 7a72452d9..e8320f691 100644 --- a/core/src/packet.rs +++ b/core/src/packet.rs @@ -85,6 +85,15 @@ pub fn to_packets(xs: &[T]) -> Vec { to_packets_chunked(xs, NUM_PACKETS) } +pub fn limited_deserialize(data: &[u8]) -> bincode::Result +where + T: serde::de::DeserializeOwned, +{ + bincode::config() + .limit(PACKET_DATA_SIZE as u64) + .deserialize(data) +} + #[cfg(test)] mod tests { use super::*; diff --git a/core/src/rpc.rs b/core/src/rpc.rs index fdd021974..da1d8da7b 100644 --- a/core/src/rpc.rs +++ b/core/src/rpc.rs @@ -9,7 +9,7 @@ use crate::{ validator::ValidatorExit, version::VERSION, }; -use bincode::{deserialize, serialize}; +use bincode::serialize; use jsonrpc_core::{Error, Metadata, Result}; use jsonrpc_derive::rpc; use solana_client::rpc_request::{RpcEpochInfo, RpcVoteAccountInfo, RpcVoteAccountStatus}; @@ -655,10 +655,6 @@ impl RpcSol for RpcSolImpl { } fn send_transaction(&self, meta: Self::Metadata, data: Vec) -> Result { - let tx: Transaction = deserialize(&data).map_err(|err| { - info!("send_transaction: deserialize error: {:?}", err); - Error::invalid_request() - })?; if data.len() >= PACKET_DATA_SIZE { info!( "send_transaction: transaction too large: {} bytes (max: {} bytes)", @@ -667,6 +663,14 @@ impl RpcSol for RpcSolImpl { ); return Err(Error::invalid_request()); } + let tx: Transaction = bincode::config() + .limit(PACKET_DATA_SIZE as u64) + .deserialize(&data) + .map_err(|err| { + info!("send_transaction: deserialize error: {:?}", err); + Error::invalid_request() + })?; + let transactions_socket = UdpSocket::bind("0.0.0.0:0").unwrap(); let tpu_addr = get_tpu_addr(&meta.cluster_info)?; trace!("send_transaction: leader is {:?}", &tpu_addr); diff --git a/core/src/sigverify_shreds.rs b/core/src/sigverify_shreds.rs index 3cfb67819..5bc271b3c 100644 --- a/core/src/sigverify_shreds.rs +++ b/core/src/sigverify_shreds.rs @@ -1,8 +1,7 @@ #![allow(clippy::implicit_hasher)] -use crate::packet::{Packet, Packets}; +use crate::packet::{limited_deserialize, Packet, Packets}; use crate::sigverify::{self, TxOffset}; use crate::sigverify_stage::SigVerifier; -use bincode::deserialize; use rayon::iter::IndexedParallelIterator; use rayon::iter::IntoParallelIterator; use rayon::iter::IntoParallelRefMutIterator; @@ -57,7 +56,8 @@ impl ShredSigVerifier { let slot_end = slot_start + size_of::(); trace!("slot {} {}", slot_start, slot_end,); if slot_end <= packet.meta.size { - let slot: u64 = deserialize(&packet.data[slot_start..slot_end]).ok()?; + let slot: u64 = + limited_deserialize(&packet.data[slot_start..slot_end]).ok()?; Some(slot) } else { None @@ -120,7 +120,7 @@ fn verify_shred_cpu(packet: &Packet, slot_leaders: &HashMap) -> O if packet.meta.size < slot_end { return Some(0); } - let slot: u64 = deserialize(&packet.data[slot_start..slot_end]).ok()?; + let slot: u64 = limited_deserialize(&packet.data[slot_start..slot_end]).ok()?; trace!("slot {}", slot); let pubkey = slot_leaders.get(&slot)?; if packet.meta.size < sig_end { @@ -180,7 +180,7 @@ fn slot_key_data_for_gpu< return std::u64::MAX; } let slot: Option = - deserialize(&packet.data[slot_start..slot_end]).ok(); + limited_deserialize(&packet.data[slot_start..slot_end]).ok(); match slot { Some(slot) if slot_keys.get(&slot).is_some() => slot, _ => std::u64::MAX, @@ -379,7 +379,7 @@ fn sign_shred_cpu( "packet is not large enough for a slot" ); let slot: u64 = - deserialize(&packet.data[slot_start..slot_end]).expect("can't deserialize slot"); + limited_deserialize(&packet.data[slot_start..slot_end]).expect("can't deserialize slot"); trace!("slot {}", slot); let pubkey = slot_leaders_pubkeys .get(&slot) diff --git a/ledger/src/shred.rs b/ledger/src/shred.rs index b25d44090..939976812 100644 --- a/ledger/src/shred.rs +++ b/ledger/src/shred.rs @@ -115,7 +115,9 @@ impl Shred { where T: Deserialize<'de>, { - let ret = bincode::deserialize(&buf[*index..*index + size])?; + let ret = bincode::config() + .limit(PACKET_DATA_SIZE as u64) + .deserialize(&buf[*index..*index + size])?; *index += size; Ok(ret) } diff --git a/runtime/src/system_instruction_processor.rs b/runtime/src/system_instruction_processor.rs index ad23f5835..a3b39964b 100644 --- a/runtime/src/system_instruction_processor.rs +++ b/runtime/src/system_instruction_processor.rs @@ -1,7 +1,7 @@ use log::*; use solana_sdk::account::KeyedAccount; use solana_sdk::instruction::InstructionError; -use solana_sdk::instruction_processor_utils::next_keyed_account; +use solana_sdk::instruction_processor_utils::{limited_deserialize, next_keyed_account}; use solana_sdk::pubkey::Pubkey; use solana_sdk::system_instruction::{SystemError, SystemInstruction}; use solana_sdk::system_program; @@ -102,8 +102,7 @@ pub fn process_instruction( keyed_accounts: &mut [KeyedAccount], data: &[u8], ) -> Result<(), InstructionError> { - let instruction = - bincode::deserialize(data).map_err(|_| InstructionError::InvalidInstructionData)?; + let instruction = limited_deserialize(data)?; trace!("process_instruction: {:?}", instruction); trace!("keyed_accounts: {:?}", keyed_accounts);