2023-01-11 07:43:07 -08:00
|
|
|
use anchor_client::{
|
2023-02-03 03:39:53 -08:00
|
|
|
solana_sdk::{commitment_config::CommitmentConfig, signature::Keypair},
|
2023-01-11 07:43:07 -08:00
|
|
|
Cluster,
|
|
|
|
};
|
2022-12-16 03:03:21 -08:00
|
|
|
use anchor_lang::prelude::Pubkey;
|
|
|
|
use client::{Client, MangoGroupContext};
|
2022-03-28 10:58:14 -07:00
|
|
|
use futures_channel::mpsc::{unbounded, UnboundedSender};
|
2023-01-11 07:43:07 -08:00
|
|
|
use futures_util::{
|
|
|
|
future::{self, Ready},
|
|
|
|
pin_mut, SinkExt, StreamExt, TryStreamExt,
|
|
|
|
};
|
2022-03-28 10:58:14 -07:00
|
|
|
use log::*;
|
2023-01-11 07:43:07 -08:00
|
|
|
use std::{
|
|
|
|
collections::{HashMap, HashSet},
|
|
|
|
fs::File,
|
|
|
|
io::Read,
|
|
|
|
net::SocketAddr,
|
|
|
|
str::FromStr,
|
|
|
|
sync::Arc,
|
|
|
|
sync::Mutex,
|
|
|
|
time::Duration,
|
|
|
|
};
|
2022-03-28 10:58:14 -07:00
|
|
|
use tokio::{
|
|
|
|
net::{TcpListener, TcpStream},
|
2023-01-11 07:43:07 -08:00
|
|
|
pin, time,
|
2022-03-28 10:58:14 -07:00
|
|
|
};
|
2022-09-10 13:29:17 -07:00
|
|
|
use tokio_tungstenite::tungstenite::{protocol::Message, Error};
|
2022-03-28 10:58:14 -07:00
|
|
|
|
|
|
|
use serde::Deserialize;
|
2023-01-11 07:43:07 -08:00
|
|
|
use solana_geyser_connector_lib::{
|
2023-02-03 03:49:22 -08:00
|
|
|
fill_event_filter::FillEventType,
|
|
|
|
fill_event_postgres_target,
|
2023-01-11 07:43:07 -08:00
|
|
|
metrics::{MetricType, MetricU64},
|
2023-02-03 03:49:22 -08:00
|
|
|
orderbook_filter::MarketConfig,
|
|
|
|
FilterConfig, PostgresConfig, PostgresTlsConfig, StatusResponse,
|
2023-01-11 07:43:07 -08:00
|
|
|
};
|
2022-03-28 10:58:14 -07:00
|
|
|
use solana_geyser_connector_lib::{
|
2022-12-16 03:03:21 -08:00
|
|
|
fill_event_filter::{self, FillCheckpoint, FillEventFilterMessage},
|
2022-10-07 03:44:53 -07:00
|
|
|
grpc_plugin_source, metrics, websocket_source, MetricsConfig, SourceConfig,
|
2022-03-28 10:58:14 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
type CheckpointMap = Arc<Mutex<HashMap<String, FillCheckpoint>>>;
|
2022-12-27 06:57:50 -08:00
|
|
|
type PeerMap = Arc<Mutex<HashMap<SocketAddr, Peer>>>;
|
|
|
|
|
2023-01-20 06:52:01 -08:00
|
|
|
// jemalloc seems to be better at keeping the memory footprint reasonable over
|
|
|
|
// longer periods of time
|
|
|
|
#[global_allocator]
|
|
|
|
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
|
|
|
|
2022-12-27 06:57:50 -08:00
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
#[serde(tag = "command")]
|
|
|
|
pub enum Command {
|
|
|
|
#[serde(rename = "subscribe")]
|
|
|
|
Subscribe(SubscribeCommand),
|
|
|
|
#[serde(rename = "unsubscribe")]
|
|
|
|
Unsubscribe(UnsubscribeCommand),
|
|
|
|
#[serde(rename = "getMarkets")]
|
|
|
|
GetMarkets,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct SubscribeCommand {
|
|
|
|
pub market_id: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct UnsubscribeCommand {
|
|
|
|
pub market_id: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct Peer {
|
|
|
|
pub sender: UnboundedSender<Message>,
|
|
|
|
pub subscriptions: HashSet<String>,
|
|
|
|
}
|
2022-03-28 10:58:14 -07:00
|
|
|
|
2022-09-10 13:29:17 -07:00
|
|
|
async fn handle_connection_error(
|
2022-03-28 10:58:14 -07:00
|
|
|
checkpoint_map: CheckpointMap,
|
|
|
|
peer_map: PeerMap,
|
2022-12-27 06:57:50 -08:00
|
|
|
market_ids: HashMap<String, String>,
|
2022-03-28 10:58:14 -07:00
|
|
|
raw_stream: TcpStream,
|
|
|
|
addr: SocketAddr,
|
2022-09-23 12:02:40 -07:00
|
|
|
metrics_opened_connections: MetricU64,
|
2022-10-05 15:53:20 -07:00
|
|
|
metrics_closed_connections: MetricU64,
|
2022-03-28 10:58:14 -07:00
|
|
|
) {
|
2022-09-23 12:02:40 -07:00
|
|
|
metrics_opened_connections.clone().increment();
|
|
|
|
|
2023-01-11 07:43:07 -08:00
|
|
|
let result = handle_connection(
|
|
|
|
checkpoint_map,
|
|
|
|
peer_map.clone(),
|
|
|
|
market_ids,
|
|
|
|
raw_stream,
|
|
|
|
addr,
|
|
|
|
)
|
|
|
|
.await;
|
2022-09-10 13:29:17 -07:00
|
|
|
if result.is_err() {
|
|
|
|
error!("connection {} error {}", addr, result.unwrap_err());
|
|
|
|
};
|
2022-09-23 12:02:40 -07:00
|
|
|
|
|
|
|
metrics_closed_connections.clone().increment();
|
|
|
|
|
2022-09-10 13:29:17 -07:00
|
|
|
peer_map.lock().unwrap().remove(&addr);
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn handle_connection(
|
|
|
|
checkpoint_map: CheckpointMap,
|
|
|
|
peer_map: PeerMap,
|
2022-12-27 06:57:50 -08:00
|
|
|
market_ids: HashMap<String, String>,
|
2022-09-10 13:29:17 -07:00
|
|
|
raw_stream: TcpStream,
|
|
|
|
addr: SocketAddr,
|
|
|
|
) -> Result<(), Error> {
|
2022-03-28 10:58:14 -07:00
|
|
|
info!("ws connected: {}", addr);
|
2022-09-10 13:29:17 -07:00
|
|
|
let ws_stream = tokio_tungstenite::accept_async(raw_stream).await?;
|
2023-01-11 07:43:07 -08:00
|
|
|
|
2022-12-27 06:57:50 -08:00
|
|
|
let (ws_tx, ws_rx) = ws_stream.split();
|
2022-03-28 10:58:14 -07:00
|
|
|
|
|
|
|
// 1: publish channel in peer map
|
|
|
|
let (chan_tx, chan_rx) = unbounded();
|
|
|
|
{
|
2022-12-27 06:57:50 -08:00
|
|
|
peer_map.lock().unwrap().insert(
|
|
|
|
addr,
|
|
|
|
Peer {
|
|
|
|
sender: chan_tx,
|
|
|
|
subscriptions: HashSet::<String>::new(),
|
|
|
|
},
|
|
|
|
);
|
2022-03-28 10:58:14 -07:00
|
|
|
}
|
|
|
|
|
2023-01-20 06:52:01 -08:00
|
|
|
let receive_commands = ws_rx.try_for_each(|msg| match msg {
|
|
|
|
Message::Text(_) => handle_commands(
|
|
|
|
addr,
|
|
|
|
msg,
|
|
|
|
peer_map.clone(),
|
|
|
|
checkpoint_map.clone(),
|
|
|
|
market_ids.clone(),
|
|
|
|
),
|
|
|
|
Message::Ping(_) => {
|
|
|
|
let peers = peer_map.clone();
|
|
|
|
let mut peers_lock = peers.lock().unwrap();
|
|
|
|
let peer = peers_lock.get_mut(&addr).expect("peer should be in map");
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Pong(Vec::new()))
|
|
|
|
.unwrap();
|
|
|
|
future::ready(Ok(()))
|
2023-01-11 07:43:07 -08:00
|
|
|
}
|
2023-01-20 06:52:01 -08:00
|
|
|
_ => future::ready(Ok(())),
|
2022-12-27 06:57:50 -08:00
|
|
|
});
|
2022-03-28 10:58:14 -07:00
|
|
|
let forward_updates = chan_rx.map(Ok).forward(ws_tx);
|
|
|
|
|
2022-12-27 06:57:50 -08:00
|
|
|
pin_mut!(receive_commands, forward_updates);
|
|
|
|
future::select(receive_commands, forward_updates).await;
|
|
|
|
|
|
|
|
peer_map.lock().unwrap().remove(&addr);
|
2022-09-10 13:29:17 -07:00
|
|
|
info!("ws disconnected: {}", &addr);
|
|
|
|
Ok(())
|
2022-03-28 10:58:14 -07:00
|
|
|
}
|
|
|
|
|
2022-12-27 06:57:50 -08:00
|
|
|
fn handle_commands(
|
|
|
|
addr: SocketAddr,
|
|
|
|
msg: Message,
|
|
|
|
peer_map: PeerMap,
|
|
|
|
checkpoint_map: CheckpointMap,
|
|
|
|
market_ids: HashMap<String, String>,
|
|
|
|
) -> Ready<Result<(), Error>> {
|
|
|
|
let msg_str = msg.clone().into_text().unwrap();
|
|
|
|
let command: Result<Command, serde_json::Error> = serde_json::from_str(&msg_str);
|
|
|
|
let mut peers = peer_map.lock().unwrap();
|
|
|
|
let peer = peers.get_mut(&addr).expect("peer should be in map");
|
|
|
|
match command {
|
|
|
|
Ok(Command::Subscribe(cmd)) => {
|
|
|
|
let market_id = cmd.clone().market_id;
|
|
|
|
match market_ids.get(&market_id) {
|
|
|
|
None => {
|
|
|
|
let res = StatusResponse {
|
|
|
|
success: false,
|
|
|
|
message: "market not found",
|
|
|
|
};
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(serde_json::to_string(&res).unwrap()))
|
|
|
|
.unwrap();
|
|
|
|
return future::ok(());
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
let subscribed = peer.subscriptions.insert(market_id.clone());
|
|
|
|
|
|
|
|
let res = if subscribed {
|
|
|
|
StatusResponse {
|
|
|
|
success: true,
|
|
|
|
message: "subscribed",
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
StatusResponse {
|
|
|
|
success: false,
|
|
|
|
message: "already subscribed",
|
|
|
|
}
|
|
|
|
};
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(serde_json::to_string(&res).unwrap()))
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
if subscribed {
|
|
|
|
let checkpoint_map = checkpoint_map.lock().unwrap();
|
|
|
|
let checkpoint = checkpoint_map.get(&market_id);
|
|
|
|
match checkpoint {
|
|
|
|
Some(checkpoint) => {
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(
|
|
|
|
serde_json::to_string(&checkpoint).unwrap(),
|
|
|
|
))
|
|
|
|
.unwrap();
|
|
|
|
}
|
2023-01-20 09:29:02 -08:00
|
|
|
None => info!("no checkpoint available on client subscription"),
|
|
|
|
};
|
2022-12-27 06:57:50 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(Command::Unsubscribe(cmd)) => {
|
|
|
|
info!("unsubscribe {}", cmd.market_id);
|
|
|
|
let unsubscribed = peer.subscriptions.remove(&cmd.market_id);
|
|
|
|
let res = if unsubscribed {
|
|
|
|
StatusResponse {
|
|
|
|
success: true,
|
|
|
|
message: "unsubscribed",
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
StatusResponse {
|
|
|
|
success: false,
|
|
|
|
message: "not subscribed",
|
|
|
|
}
|
|
|
|
};
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(serde_json::to_string(&res).unwrap()))
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
Ok(Command::GetMarkets) => {
|
|
|
|
info!("getMarkets");
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(serde_json::to_string(&market_ids).unwrap()))
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
info!("error deserializing user input {:?}", err);
|
|
|
|
let res = StatusResponse {
|
|
|
|
success: false,
|
|
|
|
message: "invalid input",
|
|
|
|
};
|
|
|
|
peer.sender
|
|
|
|
.unbounded_send(Message::Text(serde_json::to_string(&res).unwrap()))
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
future::ok(())
|
|
|
|
}
|
|
|
|
|
2022-03-28 10:58:14 -07:00
|
|
|
#[derive(Clone, Debug, Deserialize)]
|
|
|
|
pub struct Config {
|
|
|
|
pub source: SourceConfig,
|
2022-10-07 03:44:53 -07:00
|
|
|
pub metrics: MetricsConfig,
|
2022-03-28 10:58:14 -07:00
|
|
|
pub bind_ws_addr: String,
|
2022-12-16 03:03:21 -08:00
|
|
|
pub rpc_http_url: String,
|
|
|
|
pub mango_group: String,
|
2022-03-28 10:58:14 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#[tokio::main]
|
|
|
|
async fn main() -> anyhow::Result<()> {
|
|
|
|
let args: Vec<String> = std::env::args().collect();
|
2022-09-23 12:02:40 -07:00
|
|
|
|
2022-03-28 10:58:14 -07:00
|
|
|
if args.len() < 2 {
|
2022-09-23 12:02:40 -07:00
|
|
|
eprintln!("Please enter a config file path argument.");
|
|
|
|
|
2022-03-28 10:58:14 -07:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let config: Config = {
|
|
|
|
let mut file = File::open(&args[1])?;
|
|
|
|
let mut contents = String::new();
|
|
|
|
file.read_to_string(&mut contents)?;
|
|
|
|
toml::from_str(&contents).unwrap()
|
|
|
|
};
|
|
|
|
|
|
|
|
solana_logger::setup_with_default("info");
|
|
|
|
|
2022-10-08 04:57:47 -07:00
|
|
|
let metrics_tx = metrics::start(config.metrics, "fills".into());
|
2022-03-28 10:58:14 -07:00
|
|
|
|
2022-10-05 15:53:20 -07:00
|
|
|
let metrics_opened_connections =
|
2022-10-08 04:57:47 -07:00
|
|
|
metrics_tx.register_u64("fills_feed_opened_connections".into(), MetricType::Counter);
|
2022-09-23 12:02:40 -07:00
|
|
|
|
2022-10-05 15:53:20 -07:00
|
|
|
let metrics_closed_connections =
|
2022-10-08 04:57:47 -07:00
|
|
|
metrics_tx.register_u64("fills_feed_closed_connections".into(), MetricType::Counter);
|
2022-09-23 12:02:40 -07:00
|
|
|
|
2022-12-16 03:03:21 -08:00
|
|
|
let rpc_url = config.rpc_http_url;
|
|
|
|
let ws_url = rpc_url.replace("https", "wss");
|
|
|
|
let rpc_timeout = Duration::from_secs(10);
|
|
|
|
let cluster = Cluster::Custom(rpc_url.clone(), ws_url.clone());
|
|
|
|
let client = Client::new(
|
|
|
|
cluster.clone(),
|
|
|
|
CommitmentConfig::processed(),
|
|
|
|
&Keypair::new(),
|
|
|
|
Some(rpc_timeout),
|
2023-02-04 17:43:18 -08:00
|
|
|
0,
|
2022-12-16 03:03:21 -08:00
|
|
|
);
|
2023-01-11 07:43:07 -08:00
|
|
|
let group_context = Arc::new(
|
|
|
|
MangoGroupContext::new_from_rpc(
|
|
|
|
&client.rpc_async(),
|
|
|
|
Pubkey::from_str(&config.mango_group).unwrap(),
|
|
|
|
)
|
|
|
|
.await?,
|
|
|
|
);
|
2022-12-16 03:03:21 -08:00
|
|
|
|
2023-02-03 03:39:53 -08:00
|
|
|
// todo: reload markets at intervals
|
|
|
|
let perp_market_configs: Vec<(Pubkey, MarketConfig)> = group_context
|
2022-12-16 03:03:21 -08:00
|
|
|
.perp_markets
|
|
|
|
.iter()
|
2023-02-03 03:39:53 -08:00
|
|
|
.map(|(_, context)| {
|
|
|
|
let quote_decimals = match group_context.tokens.get(&context.market.settle_token_index)
|
|
|
|
{
|
|
|
|
Some(token) => token.decimals,
|
|
|
|
None => panic!("token not found for market"), // todo: default to 6 for usdc?
|
|
|
|
};
|
|
|
|
(
|
|
|
|
context.address,
|
|
|
|
MarketConfig {
|
|
|
|
name: context.market.name().to_owned(),
|
|
|
|
bids: context.market.bids,
|
|
|
|
asks: context.market.asks,
|
|
|
|
event_queue: context.market.event_queue,
|
|
|
|
base_decimals: context.market.base_decimals,
|
|
|
|
quote_decimals,
|
|
|
|
base_lot_size: context.market.base_lot_size,
|
|
|
|
quote_lot_size: context.market.quote_lot_size,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
})
|
2022-12-16 03:03:21 -08:00
|
|
|
.collect();
|
|
|
|
|
2023-02-03 03:39:53 -08:00
|
|
|
let spot_market_configs: Vec<(Pubkey, MarketConfig)> = group_context
|
2022-12-16 03:03:21 -08:00
|
|
|
.serum3_markets
|
|
|
|
.iter()
|
2023-02-03 03:39:53 -08:00
|
|
|
.map(|(_, context)| {
|
|
|
|
let base_decimals = match group_context.tokens.get(&context.market.base_token_index) {
|
|
|
|
Some(token) => token.decimals,
|
|
|
|
None => panic!("token not found for market"), // todo: default?
|
|
|
|
};
|
|
|
|
let quote_decimals = match group_context.tokens.get(&context.market.quote_token_index) {
|
|
|
|
Some(token) => token.decimals,
|
|
|
|
None => panic!("token not found for market"), // todo: default to 6 for usdc?
|
|
|
|
};
|
|
|
|
(
|
|
|
|
context.market.serum_market_external,
|
|
|
|
MarketConfig {
|
|
|
|
name: context.market.name().to_owned(),
|
|
|
|
bids: context.bids,
|
|
|
|
asks: context.asks,
|
|
|
|
event_queue: context.event_q,
|
|
|
|
base_decimals,
|
|
|
|
quote_decimals,
|
|
|
|
base_lot_size: context.pc_lot_size as i64,
|
|
|
|
quote_lot_size: context.coin_lot_size as i64,
|
|
|
|
},
|
|
|
|
)
|
2022-12-16 03:03:21 -08:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2023-02-03 03:39:53 -08:00
|
|
|
let perp_queue_pks: Vec<(Pubkey, Pubkey)> = group_context
|
|
|
|
.perp_markets
|
2022-12-16 03:03:21 -08:00
|
|
|
.iter()
|
2023-02-03 03:39:53 -08:00
|
|
|
.map(|(_, context)| (context.address, context.market.event_queue))
|
2022-12-16 03:03:21 -08:00
|
|
|
.collect();
|
|
|
|
|
2023-02-03 03:49:22 -08:00
|
|
|
let spot_queue_pks: Vec<(Pubkey, Pubkey)> = spot_market_configs
|
|
|
|
.iter()
|
|
|
|
.map(|x| (x.0, x.1.event_queue))
|
|
|
|
.collect();
|
2022-12-27 06:57:50 -08:00
|
|
|
let a: Vec<(String, String)> = group_context
|
|
|
|
.serum3_markets
|
|
|
|
.iter()
|
2023-01-11 07:43:07 -08:00
|
|
|
.map(|(_, context)| {
|
|
|
|
(
|
|
|
|
context.market.serum_market_external.to_string(),
|
|
|
|
context.market.name().to_owned(),
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect();
|
2022-12-27 06:57:50 -08:00
|
|
|
let b: Vec<(String, String)> = group_context
|
|
|
|
.perp_markets
|
|
|
|
.iter()
|
2023-01-11 07:43:07 -08:00
|
|
|
.map(|(_, context)| {
|
|
|
|
(
|
|
|
|
context.address.to_string(),
|
|
|
|
context.market.name().to_owned(),
|
|
|
|
)
|
|
|
|
})
|
2022-12-27 06:57:50 -08:00
|
|
|
.collect();
|
2023-01-11 07:43:07 -08:00
|
|
|
let market_pubkey_strings: HashMap<String, String> = [a, b].concat().into_iter().collect();
|
2022-12-27 06:57:50 -08:00
|
|
|
|
2023-02-03 03:39:53 -08:00
|
|
|
// TODO: read all this from config
|
|
|
|
let pgconf = PostgresConfig {
|
|
|
|
connection_string: "$PG_CONNECTION_STRING".to_owned(),
|
|
|
|
connection_count: 1,
|
|
|
|
max_batch_size: 1,
|
|
|
|
max_queue_size: 50_000,
|
|
|
|
retry_query_max_count: 10,
|
|
|
|
retry_query_sleep_secs: 2,
|
|
|
|
retry_connection_sleep_secs: 10,
|
|
|
|
fatal_connection_timeout_secs: 120,
|
|
|
|
allow_invalid_certs: true,
|
|
|
|
tls: Some(PostgresTlsConfig {
|
|
|
|
ca_cert_path: "$PG_CA_CERT".to_owned(),
|
|
|
|
client_key_path: "$PG_CLIENT_KEY".to_owned(),
|
2023-02-03 03:49:22 -08:00
|
|
|
}),
|
2023-02-03 03:39:53 -08:00
|
|
|
};
|
2023-02-03 03:49:22 -08:00
|
|
|
let postgres_update_sender =
|
|
|
|
fill_event_postgres_target::init(&pgconf, metrics_tx.clone()).await?;
|
2023-02-03 03:39:53 -08:00
|
|
|
|
2023-01-11 07:43:07 -08:00
|
|
|
let (account_write_queue_sender, slot_queue_sender, fill_receiver) = fill_event_filter::init(
|
2023-02-03 03:39:53 -08:00
|
|
|
perp_market_configs.clone(),
|
|
|
|
spot_market_configs.clone(),
|
2023-01-11 07:43:07 -08:00
|
|
|
metrics_tx.clone(),
|
|
|
|
)
|
|
|
|
.await?;
|
2022-03-28 10:58:14 -07:00
|
|
|
|
|
|
|
let checkpoints = CheckpointMap::new(Mutex::new(HashMap::new()));
|
|
|
|
let peers = PeerMap::new(Mutex::new(HashMap::new()));
|
|
|
|
|
|
|
|
let checkpoints_ref_thread = checkpoints.clone();
|
|
|
|
let peers_ref_thread = peers.clone();
|
2023-01-11 07:43:07 -08:00
|
|
|
let peers_ref_thread1 = peers.clone();
|
2022-03-28 10:58:14 -07:00
|
|
|
|
|
|
|
// filleventfilter websocket sink
|
|
|
|
tokio::spawn(async move {
|
|
|
|
pin!(fill_receiver);
|
|
|
|
loop {
|
|
|
|
let message = fill_receiver.recv().await.unwrap();
|
|
|
|
match message {
|
|
|
|
FillEventFilterMessage::Update(update) => {
|
2023-02-03 03:49:22 -08:00
|
|
|
debug!(
|
|
|
|
"ws update {} {:?} {:?} fill",
|
|
|
|
update.market_name, update.status, update.event.event_type
|
|
|
|
);
|
2022-03-28 10:58:14 -07:00
|
|
|
let mut peer_copy = peers_ref_thread.lock().unwrap().clone();
|
2022-12-27 06:57:50 -08:00
|
|
|
for (addr, peer) in peer_copy.iter_mut() {
|
2023-02-03 03:39:53 -08:00
|
|
|
let json = serde_json::to_string(&update.clone()).unwrap();
|
2023-01-11 07:43:07 -08:00
|
|
|
|
2022-12-27 07:03:18 -08:00
|
|
|
// only send updates if the peer is subscribed
|
2023-02-03 03:39:53 -08:00
|
|
|
if peer.subscriptions.contains(&update.market_key) {
|
2022-12-27 07:03:18 -08:00
|
|
|
let result = peer.sender.send(Message::Text(json)).await;
|
|
|
|
if result.is_err() {
|
2023-02-03 03:49:22 -08:00
|
|
|
error!(
|
|
|
|
"ws update {} fill could not reach {}",
|
|
|
|
update.market_name, addr
|
|
|
|
);
|
2023-02-03 03:39:53 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// send taker fills to db
|
|
|
|
let update_c = update.clone();
|
|
|
|
match update_c.event.event_type {
|
|
|
|
FillEventType::Perp => {
|
|
|
|
if !update_c.event.maker {
|
|
|
|
debug!("{:?}", update_c);
|
|
|
|
postgres_update_sender.send(update_c).await.unwrap();
|
2022-12-27 07:03:18 -08:00
|
|
|
}
|
2022-09-15 09:35:16 -07:00
|
|
|
}
|
2023-02-03 03:49:22 -08:00
|
|
|
_ => warn!("failed to write spot event to db"),
|
2022-03-28 10:58:14 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
FillEventFilterMessage::Checkpoint(checkpoint) => {
|
|
|
|
checkpoints_ref_thread
|
|
|
|
.lock()
|
|
|
|
.unwrap()
|
|
|
|
.insert(checkpoint.queue.clone(), checkpoint);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
info!("ws listen: {}", config.bind_ws_addr);
|
|
|
|
let try_socket = TcpListener::bind(&config.bind_ws_addr).await;
|
|
|
|
let listener = try_socket.expect("Failed to bind");
|
2023-01-11 07:43:07 -08:00
|
|
|
{
|
2023-01-20 06:52:01 -08:00
|
|
|
tokio::spawn(async move {
|
|
|
|
// Let's spawn the handling of each connection in a separate task.
|
|
|
|
while let Ok((stream, addr)) = listener.accept().await {
|
|
|
|
tokio::spawn(handle_connection_error(
|
|
|
|
checkpoints.clone(),
|
|
|
|
peers.clone(),
|
|
|
|
market_pubkey_strings.clone(),
|
|
|
|
stream,
|
|
|
|
addr,
|
|
|
|
metrics_opened_connections.clone(),
|
|
|
|
metrics_closed_connections.clone(),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
});
|
2023-01-11 07:43:07 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
// keepalive
|
|
|
|
{
|
2023-01-20 06:52:01 -08:00
|
|
|
tokio::spawn(async move {
|
|
|
|
let mut write_interval = time::interval(time::Duration::from_secs(30));
|
|
|
|
|
|
|
|
loop {
|
|
|
|
write_interval.tick().await;
|
|
|
|
let peers_copy = peers_ref_thread1.lock().unwrap().clone();
|
|
|
|
for (addr, peer) in peers_copy.iter() {
|
|
|
|
let pl = Vec::new();
|
|
|
|
let result = peer.clone().sender.send(Message::Ping(pl)).await;
|
|
|
|
if result.is_err() {
|
|
|
|
error!("ws ping could not reach {}", addr);
|
|
|
|
}
|
2023-01-11 07:43:07 -08:00
|
|
|
}
|
|
|
|
}
|
2023-01-20 06:52:01 -08:00
|
|
|
});
|
2023-01-11 07:43:07 -08:00
|
|
|
}
|
2022-03-28 10:58:14 -07:00
|
|
|
info!(
|
|
|
|
"rpc connect: {}",
|
|
|
|
config
|
|
|
|
.source
|
|
|
|
.grpc_sources
|
|
|
|
.iter()
|
|
|
|
.map(|c| c.connection_string.clone())
|
|
|
|
.collect::<String>()
|
|
|
|
);
|
2022-11-20 19:47:20 -08:00
|
|
|
let use_geyser = true;
|
2023-02-03 03:39:53 -08:00
|
|
|
let all_queue_pks = [perp_queue_pks.clone()].concat();
|
2023-01-20 06:52:01 -08:00
|
|
|
let relevant_pubkeys = all_queue_pks.iter().map(|m| m.1.to_string()).collect();
|
2022-12-16 03:03:21 -08:00
|
|
|
let filter_config = FilterConfig {
|
2023-01-20 06:52:01 -08:00
|
|
|
program_ids: vec![],
|
|
|
|
account_ids: relevant_pubkeys,
|
2022-12-16 03:03:21 -08:00
|
|
|
};
|
2022-03-28 10:58:14 -07:00
|
|
|
if use_geyser {
|
|
|
|
grpc_plugin_source::process_events(
|
|
|
|
&config.source,
|
2022-12-16 03:03:21 -08:00
|
|
|
&filter_config,
|
2022-03-28 10:58:14 -07:00
|
|
|
account_write_queue_sender,
|
|
|
|
slot_queue_sender,
|
2022-09-21 07:45:55 -07:00
|
|
|
metrics_tx.clone(),
|
2022-03-28 10:58:14 -07:00
|
|
|
)
|
|
|
|
.await;
|
|
|
|
} else {
|
|
|
|
websocket_source::process_events(
|
|
|
|
&config.source,
|
|
|
|
account_write_queue_sender,
|
|
|
|
slot_queue_sender,
|
|
|
|
)
|
|
|
|
.await;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|