Configurable max account write queue size

This commit is contained in:
Christian Kamm 2022-03-16 11:04:30 +01:00
parent 80423fdac9
commit f25c49d983
5 changed files with 7 additions and 2 deletions

View File

@ -19,6 +19,7 @@ program_id = "mv3ekLzLbnVPNxjSKvqBpU3ZeZXPQdEC3bp5MDEBG68"
connection_string = "host=/var/run/postgresql" connection_string = "host=/var/run/postgresql"
account_write_connection_count = 4 account_write_connection_count = 4
account_write_max_batch_size = 10 account_write_max_batch_size = 10
account_write_max_queue_size = 10000
slot_update_connection_count = 4 slot_update_connection_count = 4
retry_query_max_count = 3 retry_query_max_count = 3
retry_query_sleep_secs = 5 retry_query_sleep_secs = 5

View File

@ -19,6 +19,7 @@ program_id = ""
connection_string = "host=/var/run/postgresql" connection_string = "host=/var/run/postgresql"
account_write_connection_count = 4 account_write_connection_count = 4
account_write_max_batch_size = 10 account_write_max_batch_size = 10
account_write_max_queue_size = 10000
slot_update_connection_count = 2 slot_update_connection_count = 2
retry_query_max_count = 3 retry_query_max_count = 3
retry_query_sleep_secs = 5 retry_query_sleep_secs = 5

View File

@ -242,7 +242,8 @@ pub async fn process_events(
metrics_sender: metrics::Metrics, metrics_sender: metrics::Metrics,
) { ) {
// Subscribe to accountsdb // Subscribe to accountsdb
let (msg_sender, msg_receiver) = async_channel::unbounded::<Message>(); let (msg_sender, msg_receiver) =
async_channel::bounded::<Message>(config.postgres_target.account_write_max_queue_size);
for grpc_source in config.grpc_sources { for grpc_source in config.grpc_sources {
let msg_sender = msg_sender.clone(); let msg_sender = msg_sender.clone();
let snapshot_source = config.snapshot_source.clone(); let snapshot_source = config.snapshot_source.clone();

View File

@ -74,6 +74,8 @@ pub struct PostgresConfig {
pub account_write_connection_count: u64, pub account_write_connection_count: u64,
/// Maximum batch size for account write inserts over one connection /// Maximum batch size for account write inserts over one connection
pub account_write_max_batch_size: usize, pub account_write_max_batch_size: usize,
/// Max size of account write queues
pub account_write_max_queue_size: usize,
/// Number of parallel postgres connections used for slot insertions /// Number of parallel postgres connections used for slot insertions
pub slot_update_connection_count: u64, pub slot_update_connection_count: u64,
/// Number of queries retries before fatal error /// Number of queries retries before fatal error

View File

@ -338,7 +338,7 @@ pub async fn init(
)> { )> {
// The actual message may want to also contain a retry count, if it self-reinserts on failure? // The actual message may want to also contain a retry count, if it self-reinserts on failure?
let (account_write_queue_sender, account_write_queue_receiver) = let (account_write_queue_sender, account_write_queue_receiver) =
async_channel::unbounded::<AccountWrite>(); async_channel::bounded::<AccountWrite>(config.account_write_max_queue_size);
// Slot updates flowing from the outside into the single processing thread. From // Slot updates flowing from the outside into the single processing thread. From
// there they'll flow into the postgres sending thread. // there they'll flow into the postgres sending thread.