Merge pull request #23 from blockworks-foundation/serge/add_metdata_feed
Add metdata feed
This commit is contained in:
commit
7dc3a5afe9
|
@ -98,6 +98,7 @@ async fn main() -> anyhow::Result<()> {
|
|||
&config,
|
||||
&filter_config,
|
||||
account_write_queue_sender,
|
||||
None,
|
||||
slot_queue_sender,
|
||||
metrics_tx.clone(),
|
||||
exit.clone(),
|
||||
|
|
|
@ -98,6 +98,7 @@ async fn main() -> anyhow::Result<()> {
|
|||
&config,
|
||||
&filter_config,
|
||||
account_write_queue_sender,
|
||||
None,
|
||||
slot_queue_sender,
|
||||
metrics_tx.clone(),
|
||||
exit.clone(),
|
||||
|
|
|
@ -80,6 +80,7 @@ async fn main() -> anyhow::Result<()> {
|
|||
&config,
|
||||
&filter_config,
|
||||
account_write_queue_sender,
|
||||
None,
|
||||
slot_queue_sender,
|
||||
)
|
||||
.await;
|
||||
|
|
|
@ -27,7 +27,8 @@ use crate::snapshot::{get_snapshot_gma, get_snapshot_gpa};
|
|||
use crate::{
|
||||
chain_data::SlotStatus,
|
||||
metrics::{MetricType, Metrics},
|
||||
AccountWrite, GrpcSourceConfig, SlotUpdate, SnapshotSourceConfig, SourceConfig, TlsConfig,
|
||||
AccountWrite, FeedMetadata, GrpcSourceConfig, SlotUpdate, SnapshotSourceConfig, SourceConfig,
|
||||
TlsConfig,
|
||||
};
|
||||
use crate::{EntityFilter, FilterConfig};
|
||||
|
||||
|
@ -415,6 +416,7 @@ pub async fn process_events(
|
|||
config: &SourceConfig,
|
||||
filter_config: &FilterConfig,
|
||||
account_write_queue_sender: async_channel::Sender<AccountWrite>,
|
||||
metdata_write_queue_sender: Option<async_channel::Sender<FeedMetadata>>,
|
||||
slot_queue_sender: async_channel::Sender<SlotUpdate>,
|
||||
metrics_sender: Metrics,
|
||||
exit: Arc<AtomicBool>,
|
||||
|
@ -504,6 +506,14 @@ pub async fn process_events(
|
|||
let mut metric_snapshot_account_writes =
|
||||
metrics_sender.register_u64("grpc_snapshot_account_writes".into(), MetricType::Counter);
|
||||
|
||||
let metadata_sender = |msg| {
|
||||
if let Some(sender) = &metdata_write_queue_sender {
|
||||
sender.send_blocking(msg)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
loop {
|
||||
if exit.load(Ordering::Relaxed) {
|
||||
warn!("shutting down grpc_plugin_source...");
|
||||
|
@ -593,6 +603,10 @@ pub async fn process_events(
|
|||
Message::Snapshot(update) => {
|
||||
metric_snapshots.increment();
|
||||
info!("processing snapshot...");
|
||||
if let Err(e) = metadata_sender(FeedMetadata::SnapshotStart) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
|
||||
for account in update.accounts.iter() {
|
||||
metric_snapshot_account_writes.increment();
|
||||
metric_account_queue.set(account_write_queue_sender.len() as u64);
|
||||
|
@ -607,10 +621,20 @@ pub async fn process_events(
|
|||
.await
|
||||
.expect("send success");
|
||||
}
|
||||
(key, None) => warn!("account not found {}", key),
|
||||
(key, None) => {
|
||||
warn!("account not found {}", key);
|
||||
let pubkey = Pubkey::from_str(key).unwrap();
|
||||
if let Err(e) = metadata_sender(FeedMetadata::InvalidAccount(pubkey)) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("processing snapshot done");
|
||||
if let Err(e) = metadata_sender(FeedMetadata::SnapshotEnd) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,12 @@ impl<T, E: std::fmt::Debug> AnyhowWrap for Result<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
pub enum FeedMetadata {
|
||||
InvalidAccount(Pubkey),
|
||||
SnapshotStart,
|
||||
SnapshotEnd,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct AccountWrite {
|
||||
pub pubkey: Pubkey,
|
||||
|
|
|
@ -25,8 +25,8 @@ use crate::snapshot::{
|
|||
get_snapshot_gma, get_snapshot_gpa, SnapshotMultipleAccounts, SnapshotProgramAccounts,
|
||||
};
|
||||
use crate::{
|
||||
chain_data::SlotStatus, AccountWrite, AnyhowWrap, EntityFilter, FilterConfig, SlotUpdate,
|
||||
SourceConfig,
|
||||
chain_data::SlotStatus, AccountWrite, AnyhowWrap, EntityFilter, FeedMetadata, FilterConfig,
|
||||
SlotUpdate, SourceConfig,
|
||||
};
|
||||
|
||||
const SNAPSHOT_REFRESH_INTERVAL: Duration = Duration::from_secs(300);
|
||||
|
@ -294,6 +294,7 @@ pub async fn process_events(
|
|||
config: &SourceConfig,
|
||||
filter_config: &FilterConfig,
|
||||
account_write_queue_sender: async_channel::Sender<AccountWrite>,
|
||||
metdata_write_queue_sender: Option<async_channel::Sender<FeedMetadata>>,
|
||||
slot_queue_sender: async_channel::Sender<SlotUpdate>,
|
||||
) {
|
||||
// Subscribe to program account updates websocket
|
||||
|
@ -325,6 +326,13 @@ pub async fn process_events(
|
|||
// The thread that pulls updates and forwards them to postgres
|
||||
//
|
||||
|
||||
let metadata_sender = |msg| {
|
||||
if let Some(sender) = &metdata_write_queue_sender {
|
||||
sender.send_blocking(msg)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
// consume websocket updates from rust channels
|
||||
loop {
|
||||
let update = update_receiver.recv().await.unwrap();
|
||||
|
@ -342,9 +350,13 @@ pub async fn process_events(
|
|||
}
|
||||
WebsocketMessage::SnapshotUpdate((slot, accounts)) => {
|
||||
trace!("snapshot update {slot}");
|
||||
if let Err(e) = metadata_sender(FeedMetadata::SnapshotStart) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
|
||||
for (pubkey, account) in accounts {
|
||||
let pubkey = Pubkey::from_str(&pubkey).unwrap();
|
||||
if let Some(account) = account {
|
||||
let pubkey = Pubkey::from_str(&pubkey).unwrap();
|
||||
account_write_queue_sender
|
||||
.send(AccountWrite::from(
|
||||
pubkey,
|
||||
|
@ -354,8 +366,14 @@ pub async fn process_events(
|
|||
))
|
||||
.await
|
||||
.expect("send success");
|
||||
} else if let Err(e) = metadata_sender(FeedMetadata::InvalidAccount(pubkey)) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = metadata_sender(FeedMetadata::SnapshotEnd) {
|
||||
warn!("failed to send feed matadata event: {}", e);
|
||||
}
|
||||
}
|
||||
WebsocketMessage::SlotUpdate(update) => {
|
||||
trace!("slot update");
|
||||
|
|
Loading…
Reference in New Issue