Clean up exit flag handing in TPU
This commit is contained in:
parent
96bfe92334
commit
4808f6a9f8
|
@ -224,14 +224,15 @@ impl BroadcastStage {
|
|||
sock: UdpSocket,
|
||||
cluster_info: Arc<RwLock<ClusterInfo>>,
|
||||
receiver: Receiver<WorkingBankEntries>,
|
||||
exit_sender: Arc<AtomicBool>,
|
||||
exit_sender: &Arc<AtomicBool>,
|
||||
blocktree: &Arc<Blocktree>,
|
||||
) -> Self {
|
||||
let blocktree = blocktree.clone();
|
||||
let exit_sender = exit_sender.clone();
|
||||
let thread_hdl = Builder::new()
|
||||
.name("solana-broadcaster".to_string())
|
||||
.spawn(move || {
|
||||
let _exit = Finalizer::new(exit_sender);
|
||||
let _finalizer = Finalizer::new(exit_sender);
|
||||
Self::run(&sock, &cluster_info, &receiver, &blocktree)
|
||||
})
|
||||
.unwrap();
|
||||
|
@ -299,7 +300,7 @@ mod test {
|
|||
leader_info.sockets.broadcast,
|
||||
cluster_info,
|
||||
entry_receiver,
|
||||
exit_sender,
|
||||
&exit_sender,
|
||||
&blocktree,
|
||||
);
|
||||
|
||||
|
|
|
@ -10,30 +10,28 @@ use std::thread::{self, sleep, Builder, JoinHandle};
|
|||
use std::time::Duration;
|
||||
|
||||
pub struct ClusterInfoVoteListener {
|
||||
exit: Arc<AtomicBool>,
|
||||
thread_hdls: Vec<JoinHandle<()>>,
|
||||
}
|
||||
|
||||
impl ClusterInfoVoteListener {
|
||||
pub fn new(
|
||||
exit: Arc<AtomicBool>,
|
||||
exit: &Arc<AtomicBool>,
|
||||
cluster_info: Arc<RwLock<ClusterInfo>>,
|
||||
sender: PacketSender,
|
||||
) -> Self {
|
||||
let exit1 = exit.clone();
|
||||
let exit = exit.clone();
|
||||
let thread = Builder::new()
|
||||
.name("solana-cluster_info_vote_listener".to_string())
|
||||
.spawn(move || {
|
||||
let _ = Self::recv_loop(&exit1, &cluster_info, &sender);
|
||||
let _ = Self::recv_loop(exit, &cluster_info, &sender);
|
||||
})
|
||||
.unwrap();
|
||||
Self {
|
||||
exit,
|
||||
thread_hdls: vec![thread],
|
||||
}
|
||||
}
|
||||
fn recv_loop(
|
||||
exit: &Arc<AtomicBool>,
|
||||
exit: Arc<AtomicBool>,
|
||||
cluster_info: &Arc<RwLock<ClusterInfo>>,
|
||||
sender: &PacketSender,
|
||||
) -> Result<()> {
|
||||
|
@ -52,9 +50,6 @@ impl ClusterInfoVoteListener {
|
|||
sleep(Duration::from_millis(GOSSIP_SLEEP_MILLIS));
|
||||
}
|
||||
}
|
||||
pub fn close(&self) {
|
||||
self.exit.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for ClusterInfoVoteListener {
|
||||
|
|
|
@ -14,13 +14,13 @@ pub struct FetchStage {
|
|||
|
||||
impl FetchStage {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(sockets: Vec<UdpSocket>, exit: Arc<AtomicBool>) -> (Self, PacketReceiver) {
|
||||
pub fn new(sockets: Vec<UdpSocket>, exit: &Arc<AtomicBool>) -> (Self, PacketReceiver) {
|
||||
let (sender, receiver) = channel();
|
||||
(Self::new_with_sender(sockets, exit, &sender), receiver)
|
||||
}
|
||||
pub fn new_with_sender(
|
||||
sockets: Vec<UdpSocket>,
|
||||
exit: Arc<AtomicBool>,
|
||||
exit: &Arc<AtomicBool>,
|
||||
sender: &PacketSender,
|
||||
) -> Self {
|
||||
let tx_sockets = sockets.into_iter().map(Arc::new).collect();
|
||||
|
@ -28,7 +28,7 @@ impl FetchStage {
|
|||
}
|
||||
fn new_multi_socket(
|
||||
sockets: Vec<Arc<UdpSocket>>,
|
||||
exit: Arc<AtomicBool>,
|
||||
exit: &Arc<AtomicBool>,
|
||||
sender: &PacketSender,
|
||||
) -> Self {
|
||||
let thread_hdls: Vec<_> = sockets
|
||||
|
|
|
@ -78,9 +78,9 @@ impl Tpu {
|
|||
|
||||
let (packet_sender, packet_receiver) = channel();
|
||||
let fetch_stage =
|
||||
FetchStage::new_with_sender(transactions_sockets, exit.clone(), &packet_sender.clone());
|
||||
FetchStage::new_with_sender(transactions_sockets, &exit, &packet_sender.clone());
|
||||
let cluster_info_vote_listener =
|
||||
ClusterInfoVoteListener::new(exit.clone(), cluster_info.clone(), packet_sender);
|
||||
ClusterInfoVoteListener::new(&exit, cluster_info.clone(), packet_sender);
|
||||
|
||||
let (sigverify_stage, verified_receiver) =
|
||||
SigVerifyStage::new(packet_receiver, sigverify_disabled);
|
||||
|
@ -91,7 +91,7 @@ impl Tpu {
|
|||
broadcast_socket,
|
||||
cluster_info.clone(),
|
||||
entry_receiver,
|
||||
exit.clone(),
|
||||
&exit,
|
||||
blocktree,
|
||||
);
|
||||
|
||||
|
|
Loading…
Reference in New Issue