Eclipse integration (#35)

* adding fixes to run autobahn on eclipse

* adding configuration files for eclipse

* add CONFIG_PATH build arg

* minor config changes and bugfixes

* increase acceptable slot lag, because eclipse is not so much active yet

---------

Co-authored-by: Riordan Panayides <riordan@panayid.es>
This commit is contained in:
galactus 2024-11-06 13:57:40 +01:00 committed by GitHub
parent 279aaf271f
commit fa5a08973f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 224 additions and 19 deletions

View File

@ -16,10 +16,11 @@ COPY . .
RUN cargo build --release --bin autobahn-router
FROM debian:bookworm-slim as run
ARG CONFIG_PATH=/app/bin/autobahn-router/template-config.toml
RUN apt-get update && apt-get -y install ca-certificates libc6 libssl3 libssl-dev openssl
COPY --from=build /app/target/release/autobahn-router /usr/local/bin/
COPY --from=build /app/bin/autobahn-router/template-config.toml /usr/local/bin/template-config.toml
COPY --from=build $CONFIG_PATH /usr/local/bin/template-config.toml
RUN adduser --system --group --no-create-home mangouser
USER mangouser

View File

@ -0,0 +1,99 @@
snapshot_timeout_in_seconds = 900
[infinity]
enabled = false
[orca]
enabled = true
mints = []
take_all_mints = true
add_mango_tokens = false
[cropper]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[openbook_v2]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[raydium]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[raydium_cp]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[saber]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[routing]
path_cache_validity_ms = 30000
path_warming_mode = "ConfiguredMints"
#path_warming_mode = "HotMints"
path_warming_amounts = [100, 1000, 10_000]
path_warming_for_mints = [
"So11111111111111111111111111111111111111112", # SOL
]
path_warming_interval_secs = 5
path_warming_max_accounts = [20, 30, 40, 64]
lookup_tables = []
cooldown_duration_multihop_secs = 30
cooldown_duration_singlehop_secs = 60
max_path_length = 3
retain_path_count = 5
max_edge_per_pair = 5
max_edge_per_cold_pair = 2
slot_excessive_lag_max_duration_secs = 1600
[server]
address = "[::]:8888"
[metrics]
output_http = true
prometheus_address = "[::]:9091"
output_stdout = false
[[sources]]
dedup_queue_size = 50000
rpc_http_url = "$RPC_HTTP_URL"
rpc_support_compression = false
re_snapshot_interval_secs = 1200
request_timeout_in_seconds = 300
[[sources.grpc_sources]]
name = "router-eclipse"
connection_string = "$RPC_HTTP_URL_WITHOUT_TOKEN"
token = "$RPC_TOKEN"
retry_connection_sleep_secs = 30
[price_feed]
birdeye_token = "$BIRDEYE_TOKEN"
refresh_interval_secs = 1200 # every 20 min
[safety_checks]
check_quote_out_amount_deviation = true
min_quote_out_to_in_amount_ratio = 0.65
[hot_mints]
always_hot_mints = [
"So11111111111111111111111111111111111111112", # SOL
]
keep_latest_count = 50
[debug_config]
reprice_using_live_rpc = true
reprice_probability = 0.05

View File

@ -52,7 +52,7 @@ path_warming_for_mints = [
# "27G8MtK7VtTcCHkpASjSDdkWWYfoqT6ggEuKidVJidD4" # JLP
]
path_warming_interval_secs = 5
path_warming_max_accounts = [20, 25, 30, 35, 40, 64]
path_warming_max_accounts = [20, 30, 40, 64]
lookup_tables = ["87TgskchTNEv1uXkGQk1U4zt65tjqbfGAZWNMGAcRRPx",
"AgCBUZ6UMWqPLftTxeAqpQxtrfiCyL2HgRfmmM6QTfCj",
"A1v3qxN7HbUvtyPnnaoCrKonXjkFLaDHXk3S6R2QfEaw",

25
fly-eclipse.toml Normal file
View File

@ -0,0 +1,25 @@
app = "router-eclipse"
primary_region = "fra"
kill_signal = "SIGTERM"
kill_timeout = "30s"
[build]
dockerfile = 'bin/autobahn-router/Dockerfile'
[build.args]
CONFIG_PATH="/app/bin/autobahn-router/template-config-eclipse.toml"
[experimental]
cmd = ["autobahn-router", "/usr/local/bin/template-config.toml"]
[[vm]]
size = "shared-cpu-4x"
memory = "8gb"
[[restart]]
policy = "always"
retries = 10
[metrics]
port = 9091
path = "/metrics"

View File

@ -1,5 +1,5 @@
app = "router-2"
primary_region = "dfw"
primary_region = "ams"
kill_signal = "SIGTERM"
kill_timeout = "30s"

View File

@ -310,10 +310,17 @@ pub async fn fetch_all_whirlpools(
.await?;
let result = whirlpools
.iter()
.map(|account| {
let whirlpool: Whirlpool =
AnchorDeserialize::deserialize(&mut &account.data[8..]).unwrap();
(account.pubkey, whirlpool)
.filter_map(|account| {
let pubkey = account.pubkey;
let whirlpool: Result<Whirlpool, std::io::Error> =
AnchorDeserialize::deserialize(&mut &account.data[8..]);
match whirlpool {
Ok(whirlpool) => Some((account.pubkey, whirlpool)),
Err(e) => {
error!("Error deserializing whirlpool account : {pubkey:?} error: {e:?}");
None
}
}
})
.collect_vec();
Ok(result)

View File

@ -5,7 +5,7 @@ use std::sync::Arc;
use anchor_lang::Id;
use anchor_spl::token::spl_token;
use anchor_spl::token::spl_token::state::AccountState;
use anchor_spl::token::spl_token::state::{Account, AccountState};
use anchor_spl::token_2022::Token2022;
use anyhow::Context;
use itertools::Itertools;
@ -229,7 +229,12 @@ impl OrcaDex {
.iter()
.filter(|x| {
x.1.owner == Token2022::id()
|| spl_token::state::Account::unpack(x.1.data()).unwrap().state
|| spl_token::state::Account::unpack(x.1.data())
.unwrap_or(Account {
state: AccountState::Frozen,
..Default::default()
})
.state
== AccountState::Frozen
})
.map(|x| x.0)
@ -246,7 +251,9 @@ impl OrcaDex {
// TODO: actually need to dynamically adjust subscriptions based on the tick?
let tick_arrays = filtered_pools
.iter()
.map(|(pk, wp)| whirlpool_tick_array_pks(wp, pk, program_id))
.map(|(pk, wp)| {
whirlpool_tick_array_pks(wp, pk, program_id)
})
.collect_vec();
let edge_pairs = filtered_pools

View File

@ -8,6 +8,16 @@ use router_lib::test_tools::{generate_dex_rpc_dump, rpc};
#[tokio::test]
async fn test_dump_input_data_cropper() -> anyhow::Result<()> {
let is_eclipse = std::env::var("ECLIPSE")
.map(|x| {
let value: bool = x.parse().unwrap();
value
})
.unwrap_or_default();
if is_eclipse {
// crooper is not yet on eclipse
return Ok(());
}
let options = HashMap::from([
(
"program_id".to_string(),

View File

@ -205,6 +205,47 @@ pub async fn get_compressed_program_account_rpc(
Ok((min_slot, snap_result))
}
// called on startup to get the required accounts, few calls with some 100 thousand accounts
#[tracing::instrument(skip_all, level = "trace")]
pub async fn get_uncompressed_program_account_rpc(
rpc_client: &RpcClient,
filters: &HashSet<Pubkey>,
config: RpcProgramAccountsConfig,
) -> anyhow::Result<(u64, Vec<AccountWrite>)> {
let slot = rpc_client.get_slot().await?;
let config = RpcProgramAccountsConfig {
with_context: Some(true),
account_config: RpcAccountInfoConfig {
encoding: Some(UiAccountEncoding::Base64),
min_context_slot: None,
commitment: config.account_config.commitment,
data_slice: config.account_config.data_slice,
},
filters: config.filters,
};
let mut snap_result = vec![];
let mut min_slot = u64::MAX;
// use getGPA compressed if available
for program_id in filters.iter() {
info!("gPA for {}", program_id);
min_slot = slot.min(min_slot);
let account_snapshot = rpc_client
.get_program_accounts_with_config(&program_id, config.clone())
.await
.map_err_anyhow()?;
tracing::log::debug!("gpa snapshot received {}", program_id);
let iter = account_snapshot.iter().map(|(pk, account)| {
account_write_from(*pk, slot, SNAP_ACCOUNT_WRITE_VERSION, account.clone())
});
snap_result.extend(iter);
}
Ok((min_slot, snap_result))
}
// called on startup to get the required accounts, few calls with some 100 thousand accounts
#[tracing::instrument(skip_all, level = "trace")]
pub async fn get_uncompressed_program_account(

View File

@ -9,7 +9,10 @@ use solana_sdk::account::Account;
use solana_sdk::pubkey::Pubkey;
use crate::account_write::AccountWrite;
use crate::get_program_account::{fetch_multiple_accounts, get_compressed_program_account_rpc};
use crate::get_program_account::{
fetch_multiple_accounts, get_compressed_program_account_rpc,
get_uncompressed_program_account_rpc,
};
use crate::router_rpc_client::RouterRpcClientTrait;
pub struct RouterRpcWrapper {
@ -52,10 +55,21 @@ impl RouterRpcClientTrait for RouterRpcWrapper {
pubkey: &Pubkey,
config: RpcProgramAccountsConfig,
) -> anyhow::Result<Vec<AccountWrite>> {
Ok(
get_compressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
let disable_compressed = std::env::var::<String>("DISABLE_COMRPESSED_GPA".to_string())
.unwrap_or("false".to_string());
let disable_compressed: bool = disable_compressed.trim().parse().unwrap();
if disable_compressed {
Ok(
get_uncompressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
} else {
Ok(
get_compressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
}
}
}

View File

@ -194,7 +194,6 @@ pub trait DexInterface: Sync + Send {
/// simulation tests.
fn program_ids(&self) -> HashSet<Pubkey>;
/// Initializes an Edge from ChainData (production) or BanksClient (test).
/// The Edge will be dropped once a new Edge for the same EdgeIndentifier
/// has been initialized. After calling initialize the DexInterface needs

View File

@ -227,7 +227,7 @@ pub async fn run_dump_swap_ix_with_custom_amount(
continue;
};
debug!(
println!(
"#{} || quote: {} => {} : {} => {}",
success,
id.input_mint(),

View File

@ -8,7 +8,9 @@ export DUMP_MAINNET_DATA=1 RUST_LOG=info
# define in addition
# RPC_HTTP_URL="http://fcs-ams1._peer.internal:18899"
# for eclipse
# export ECLIPSE=true
# export DISABLE_COMRPESSED_GPA=true
# saber
DUMP_SABER_START=$(date)