Merge branch 'blockworks-foundation:main' into main

This commit is contained in:
Michał Smykała 2024-11-06 14:02:02 +01:00 committed by GitHub
commit 3a0167cfff
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 965 additions and 367 deletions

40
Cargo.lock generated
View File

@ -681,7 +681,7 @@ dependencies = [
[[package]]
name = "autobahn-executor"
version = "1.0.0"
version = "1.0.1"
dependencies = [
"bonfida-test-utils",
"bytemuck",
@ -1409,6 +1409,12 @@ dependencies = [
"generic-array 0.14.7",
]
[[package]]
name = "circular-buffer"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67261db007b5f4cf8cba393c1a5c511a5cc072339ce16e12aeba1d7b9b77946"
[[package]]
name = "clap"
version = "2.34.0"
@ -3947,6 +3953,26 @@ version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "litesvm"
version = "0.1.0"
source = "git+https://github.com/blockworks-foundation/litesvm.git?branch=v0.1.0+solana_1.7#4c884606289e484090be42a08b79dc3bc5f449f7"
dependencies = [
"bincode",
"indexmap 2.2.6",
"itertools 0.12.1",
"log 0.4.21",
"solana-address-lookup-table-program",
"solana-bpf-loader-program",
"solana-compute-budget-program",
"solana-loader-v4-program",
"solana-program",
"solana-program-runtime",
"solana-sdk",
"solana-system-program",
"thiserror",
]
[[package]]
name = "lock_api"
version = "0.3.4"
@ -5392,7 +5418,7 @@ dependencies = [
[[package]]
name = "quic-geyser-client"
version = "0.1.5"
source = "git+https://github.com/blockworks-foundation/quic_geyser_plugin.git?branch=router_v1.17.29#8efcc200c795b1236675b161c04e5e65e00ace48"
source = "git+https://github.com/blockworks-foundation/quic_geyser_plugin.git?branch=router_v1.17.29#594077ccebfde826920192c17b0e501bb185650e"
dependencies = [
"anyhow",
"bincode",
@ -5409,10 +5435,11 @@ dependencies = [
[[package]]
name = "quic-geyser-common"
version = "0.1.5"
source = "git+https://github.com/blockworks-foundation/quic_geyser_plugin.git?branch=router_v1.17.29#8efcc200c795b1236675b161c04e5e65e00ace48"
source = "git+https://github.com/blockworks-foundation/quic_geyser_plugin.git?branch=router_v1.17.29#594077ccebfde826920192c17b0e501bb185650e"
dependencies = [
"anyhow",
"bincode",
"circular-buffer",
"itertools 0.10.5",
"log 0.4.21",
"lz4",
@ -6037,6 +6064,7 @@ dependencies = [
"serde_derive",
"serde_json",
"serde_with 2.3.3",
"sha2 0.10.8",
"solana-account-decoder",
"solana-client",
"solana-sdk",
@ -6840,13 +6868,17 @@ version = "0.1.0"
dependencies = [
"anyhow",
"autobahn-executor",
"base64 0.12.3",
"bincode",
"bonfida-test-utils",
"env_logger",
"litesvm",
"log 0.4.21",
"router-test-lib",
"sha2 0.10.8",
"solana-address-lookup-table-program",
"solana-program",
"solana-program-runtime",
"solana-program-test",
"solana-sdk",
"spl-associated-token-account 1.1.3",
@ -6854,6 +6886,8 @@ dependencies = [
"spl-token-2022 1.0.0",
"test-case",
"tokio",
"tracing",
"tracing-subscriber",
]
[[package]]

View File

@ -16,10 +16,11 @@ COPY . .
RUN cargo build --release --bin autobahn-router
FROM debian:bookworm-slim as run
ARG CONFIG_PATH=/app/bin/autobahn-router/template-config.toml
RUN apt-get update && apt-get -y install ca-certificates libc6 libssl3 libssl-dev openssl
COPY --from=build /app/target/release/autobahn-router /usr/local/bin/
COPY --from=build /app/bin/autobahn-router/template-config.toml /usr/local/bin/template-config.toml
COPY --from=build $CONFIG_PATH /usr/local/bin/template-config.toml
RUN adduser --system --group --no-create-home mangouser
USER mangouser

View File

@ -272,7 +272,7 @@ impl EdgeState {
}
pub fn cached_price_exact_out_for(&self, out_amount: u64) -> Option<(f64, f64)> {
if !self.is_valid_out() {
if !self.is_valid() {
return None;
}
@ -304,22 +304,6 @@ impl EdgeState {
true
}
pub fn is_valid_out(&self) -> bool {
if !self.is_valid {
return false;
}
if self.cooldown_until.is_some() {
// Do not check time here !
// We will reset "cooldown until" on first account update coming after cooldown
// So if this is not reset yet, it means that we didn't change anything
// No reason to be working again
return false;
}
true
}
pub fn reset_cooldown(&mut self) {
self.cooldown_event += 0;
self.cooldown_until = None;

View File

@ -16,7 +16,7 @@ use std::time::{Duration, Instant};
use tokio::sync::broadcast;
use tokio::sync::broadcast::error::RecvError;
use tokio::task::JoinHandle;
use tracing::{debug, error, info, trace, warn};
use tracing::{debug, error, info, warn};
#[derive(Clone)]
pub struct Dex {
@ -79,7 +79,7 @@ pub fn spawn_updater_job(
register_mint_sender: async_channel::Sender<Pubkey>,
ready_sender: async_channel::Sender<()>,
mut slot_updates: broadcast::Receiver<u64>,
mut account_updates: broadcast::Receiver<(Pubkey, u64)>,
mut account_updates: broadcast::Receiver<(Pubkey, Pubkey, u64)>,
mut metadata_updates: broadcast::Receiver<FeedMetadata>,
mut price_updates: broadcast::Receiver<PriceUpdate>,
mut exit: broadcast::Receiver<()>,
@ -167,20 +167,19 @@ pub fn spawn_updater_job(
if !updater.invalidate_one(res) {
break 'drain_loop;
}
// let mut batchsize: u32 = 0;
// let started_at = Instant::now();
// 'batch_loop: while let Ok(res) = account_updates.try_recv() {
// batchsize += 1;
// if !updater.invalidate_one(Ok(res)) {
// break 'drain_loop;
// }
let mut batchsize: u32 = 0;
let started_at = Instant::now();
'batch_loop: while let Ok(res) = account_updates.try_recv() {
batchsize += 1;
if !updater.invalidate_one(Ok(res)) {
break 'drain_loop;
}
// budget for microbatch
if batchsize > 10 || started_at.elapsed() > Duration::from_micros(500) {
break 'batch_loop;
}
}
// // budget for microbatch
// if batchsize > 10 || started_at.elapsed() > Duration::from_micros(500) {
// break 'batch_loop;
// }
// }
},
Ok(price_upd) = price_updates.recv() => {
if let Some(impacted_edges) = updater.state.edges_per_mint.get(&price_upd.mint) {
@ -242,14 +241,17 @@ impl EdgeUpdater {
Some(since) => {
if since.elapsed() > max_lag_duration {
panic!(
"Lagging a lot {} for more than {}s, exiting..",
"Lagging a lot {} for more than {}s, for dex {}..",
lag,
max_lag_duration.as_secs()
max_lag_duration.as_secs(),
self.dex.name,
);
}
}
}
return;
} else if state.slot_excessive_lagging_since.is_some() {
state.slot_excessive_lagging_since = None;
}
}
}
@ -309,9 +311,8 @@ impl EdgeUpdater {
}
}
fn invalidate_one(&mut self, res: Result<(Pubkey, u64), RecvError>) -> bool {
let state = &mut self.state;
let (pk, slot) = match res {
fn invalidate_one(&mut self, res: Result<(Pubkey, Pubkey, u64), RecvError>) -> bool {
let (pk, owner, slot) = match res {
Ok(v) => v,
Err(broadcast::error::RecvError::Closed) => {
error!("account update channel closed unexpectedly");
@ -326,6 +327,11 @@ impl EdgeUpdater {
}
};
// check if we need the update
if !self.do_update(&pk, &owner) {
return true;
}
let state = &mut self.state;
if let Some(impacted_edges) = self.dex.edges_per_pk.get(&pk) {
for edge in impacted_edges {
state.dirty_edges.insert(edge.unique_id(), edge.clone());
@ -333,7 +339,9 @@ impl EdgeUpdater {
};
state.received_account.insert(pk);
state.latest_slot_pending = slot;
if state.latest_slot_pending < slot {
state.latest_slot_pending = slot;
}
self.check_readiness();
@ -368,6 +376,27 @@ impl EdgeUpdater {
}
}
// ignore update if current dex does not need it
fn do_update(&mut self, pk: &Pubkey, owner: &Pubkey) -> bool {
if self.dex.edges_per_pk.contains_key(pk) {
return true;
}
match &self.dex.subscription_mode {
DexSubscriptionMode::Accounts(accounts) => {
return accounts.contains(pk)
},
DexSubscriptionMode::Disabled => {
false
},
DexSubscriptionMode::Programs(programs) => {
programs.contains(pk) || programs.contains(owner)
},
DexSubscriptionMode::Mixed(m) => {
m.accounts.contains(pk) || m.token_accounts_for_owner.contains(pk) || m.programs.contains(pk) || m.programs.contains(owner)
}
}
}
fn refresh_some(&mut self) {
let state = &mut self.state;
if state.dirty_edges.is_empty() || !state.is_ready {
@ -398,7 +427,7 @@ impl EdgeUpdater {
state.latest_slot_processed = state.latest_slot_pending;
if started_at.elapsed() > Duration::from_millis(100) {
info!(
debug!(
"{} - refresh {} - took - {:?}",
self.dex.name,
refreshed_edges.len(),

View File

@ -3,7 +3,7 @@ use router_config_lib::HotMintsConfig;
use solana_program::pubkey::Pubkey;
use std::collections::{HashSet, VecDeque};
use std::str::FromStr;
use tracing::info;
use tracing::debug;
pub struct HotMintsCache {
max_count: usize,
@ -50,11 +50,11 @@ impl HotMintsCache {
} else if self.latest_unordered.len() >= self.max_count {
let oldest = self.latest_ordered.pop_back().unwrap();
self.latest_unordered.remove(&oldest);
info!("Removing {} from hot mints", debug_tools::name(&oldest));
debug!("Removing {} from hot mints", debug_tools::name(&oldest));
}
if self.latest_unordered.insert(pubkey) {
info!("Adding {} to hot mints", debug_tools::name(&pubkey));
debug!("Adding {} to hot mints", debug_tools::name(&pubkey));
}
self.latest_ordered.push_front(pubkey);
return;

View File

@ -99,6 +99,14 @@ async fn main() -> anyhow::Result<()> {
let config = Config::load(&args[1])?;
let router_version = RouterVersion::OverestimateAmount;
if config.metrics.output_http {
let prom_bind_addr = config
.metrics
.prometheus_address
.clone()
.expect("prometheus_address must be set");
PrometheusSync::sync(prom_bind_addr);
}
let hot_mints = Arc::new(RwLock::new(HotMintsCache::new(&config.hot_mints)));
let mango_data = match mango::mango_fetcher::fetch_mango_data().await {
@ -146,7 +154,7 @@ async fn main() -> anyhow::Result<()> {
let (metadata_write_sender, metadata_write_receiver) =
async_channel::unbounded::<FeedMetadata>();
let (slot_sender, slot_receiver) = async_channel::unbounded::<SlotUpdate>();
let (account_update_sender, _) = broadcast::channel(1048576); // TODO this is huge, but init snapshot will completely spam this
let (account_update_sender, _) = broadcast::channel(4 * 1024 * 1024); // TODO this is huge, but init snapshot will completely spam this
let chain_data = Arc::new(RwLock::new(ChainData::new()));
start_chaindata_updating(
@ -200,14 +208,6 @@ async fn main() -> anyhow::Result<()> {
exit(-1);
};
if config.metrics.output_http {
let prom_bind_addr = config
.metrics
.prometheus_address
.clone()
.expect("prometheus_address must be set");
let _prometheus = PrometheusSync::sync(prom_bind_addr);
}
if config.metrics.output_stdout {
warn!("metrics output to stdout is not supported yet");
}
@ -576,7 +576,7 @@ fn start_chaindata_updating(
chain_data: ChainDataArcRw,
account_writes: async_channel::Receiver<AccountOrSnapshotUpdate>,
slot_updates: async_channel::Receiver<SlotUpdate>,
account_update_sender: broadcast::Sender<(Pubkey, u64)>,
account_update_sender: broadcast::Sender<(Pubkey, Pubkey, u64)>,
mut exit: broadcast::Receiver<()>,
) -> JoinHandle<()> {
use mango_feeds_connector::chain_data::SlotData;
@ -643,7 +643,7 @@ fn handle_updated_account(
most_recent_seen_slot: &mut u64,
chain_data: &mut RwLockWriteGuard<ChainData>,
update: AccountOrSnapshotUpdate,
account_update_sender: &broadcast::Sender<(Pubkey, u64)>,
account_update_sender: &broadcast::Sender<(Pubkey, Pubkey, u64)>,
) {
use mango_feeds_connector::chain_data::AccountData;
use solana_sdk::account::WritableAccount;
@ -652,7 +652,7 @@ fn handle_updated_account(
fn one_update(
most_recent_seen_slot: &mut u64,
chain_data: &mut RwLockWriteGuard<ChainData>,
account_update_sender: &broadcast::Sender<(Pubkey, u64)>,
account_update_sender: &broadcast::Sender<(Pubkey, Pubkey, u64)>,
account_write: AccountWrite,
) {
chain_data.update_account(
@ -680,7 +680,7 @@ fn handle_updated_account(
}
// ignore failing sends when there are no receivers
let _err = account_update_sender.send((account_write.pubkey, account_write.slot));
let _err = account_update_sender.send((account_write.pubkey, account_write.owner, account_write.slot));
}
match update {

View File

@ -1,17 +1,15 @@
use std::time::Duration;
use axum::{routing, Router};
use prometheus::{Encoder, TextEncoder};
use tokio::net::{TcpListener, ToSocketAddrs};
use tokio::task::JoinHandle;
use tokio::{
io::AsyncWriteExt,
net::{TcpListener, TcpStream, ToSocketAddrs},
};
use tracing::error;
use tracing::{error, info};
use crate::server::errors::AppError;
pub struct PrometheusSync;
impl PrometheusSync {
fn create_response(payload: &str) -> String {
fn create_response(payload: String) -> String {
format!(
"HTTP/1.1 200 OK\r\nContent-Length: {}\r\n\r\n{}",
payload.len(),
@ -19,7 +17,7 @@ impl PrometheusSync {
)
}
async fn handle_stream(stream: &mut TcpStream) -> anyhow::Result<()> {
async fn get_prometheus_stream() -> Result<String, AppError> {
let mut metrics_buffer = Vec::new();
let encoder = TextEncoder::new();
@ -29,29 +27,22 @@ impl PrometheusSync {
.unwrap();
let metrics_buffer = String::from_utf8(metrics_buffer).unwrap();
let response = Self::create_response(&metrics_buffer);
stream.writable().await?;
stream.write_all(response.as_bytes()).await?;
stream.flush().await?;
Ok(())
Ok(Self::create_response(metrics_buffer))
}
pub fn sync(addr: impl ToSocketAddrs + Send + 'static) -> JoinHandle<anyhow::Result<()>> {
tokio::spawn(async move {
let listener = TcpListener::bind(addr).await?;
loop {
let Ok((mut stream, _addr)) = listener.accept().await else {
error!("Error accepting prometheus stream");
tokio::time::sleep(Duration::from_millis(1)).await;
continue;
};
let mut router: Router<()> = Router::new();
router = router.route("/metrics", routing::get(Self::get_prometheus_stream));
let _ = Self::handle_stream(&mut stream).await;
}
let handle = axum::serve(listener, router);
info!("Prometheus Server started");
handle.await.expect("Prometheus Server failed");
Ok(())
})
}
}

View File

@ -614,7 +614,7 @@ impl Routing {
hot_mints: &HashSet<Pubkey>,
swap_mode: SwapMode,
) {
info!("prepare_pruned_edges_and_cleanup_cache started");
debug!("prepare_pruned_edges_and_cleanup_cache started");
self.path_discovery_cache.write().unwrap().expire_old();
let (valid_edge_count, out_edges_per_mint_index) = Self::select_best_pools(
@ -645,7 +645,7 @@ impl Routing {
(*writer).1 = out_edges_per_mint_index;
}
info!("prepare_pruned_edges_and_cleanup_cache done");
debug!("prepare_pruned_edges_and_cleanup_cache done");
}
fn compute_price_impact(edge: &Arc<Edge>) -> Option<f64> {
@ -843,6 +843,14 @@ impl Routing {
warn!(valid_edge_count, skipped_bad_price_impact, "pruning");
}
// for mint_vec in out_edges_per_mint_index.iter() {
// for mint in mint_vec {
// let input_mint = mint_to_index.iter().filter(|(_, x)| **x==mint.source_node).map(|(pk,_)| *pk).collect_vec();
// let output_mint = mint_to_index.iter().filter(|(_, x)| **x==mint.target_node).map(|(pk,_)| *pk).collect_vec();
// info!("input_mint {:?} {:?}", input_mint, output_mint );
// }
// }
(valid_edge_count, out_edges_per_mint_index)
}
@ -1541,11 +1549,11 @@ impl Routing {
let can_try_one_more_hop = max_path_length != self.max_path_length;
if !ignore_cache && (used_cached_paths || can_try_one_more_hop) {
if used_cached_paths {
info!("Invalid cached path, retrying without cache");
debug!("Invalid cached path, retrying without cache");
let mut cache = self.path_discovery_cache.write().unwrap();
cache.invalidate(input_index, output_index, max_accounts);
} else {
warn!("No path within boundaries, retrying with +1 hop");
debug!("No path within boundaries, retrying with +1 hop");
}
return self.find_best_route(
chain_data,
@ -1560,7 +1568,7 @@ impl Routing {
);
}
self.print_debug_data(input_mint, output_mint, max_accounts);
// self.print_debug_data(input_mint, output_mint, max_accounts);
bail!(RoutingError::NoPathBetweenMintPair(
input_mint.clone(),

View File

@ -35,6 +35,7 @@ use router_lib::model::quote_response::{RoutePlan, SwapInfo};
// make sure the transaction can be executed
const MAX_ACCOUNTS_PER_TX: usize = 64;
const MAX_TX_SIZE: usize = 1232;
const DEFAULT_COMPUTE_UNIT_PRICE_MICRO_LAMPORTS: u64 = 10_000;
pub struct HttpServer {
pub join_handle: JoinHandle<()>,
@ -179,6 +180,7 @@ impl HttpServer {
0,
"0".to_string(),
swap_mode,
DEFAULT_COMPUTE_UNIT_PRICE_MICRO_LAMPORTS,
)
.await?;
@ -282,6 +284,11 @@ impl HttpServer {
let swap_mode: SwapMode = SwapMode::from_str(&input.quote_response.swap_mode)
.map_err(|_| anyhow::Error::msg("Invalid SwapMode"))?;
let compute_unit_price_micro_lamports = match input.compute_unit_price_micro_lamports {
Some(price) => price,
None => DEFAULT_COMPUTE_UNIT_PRICE_MICRO_LAMPORTS,
};
let (bytes, _) = Self::build_swap_tx(
address_lookup_table_addresses,
hash_provider,
@ -294,13 +301,14 @@ impl HttpServer {
input.quote_response.slippage_bps,
input.quote_response.other_amount_threshold,
swap_mode,
compute_unit_price_micro_lamports,
)
.await?;
let json_response = serde_json::json!(SwapResponse {
swap_transaction: bytes,
last_valid_block_height: input.quote_response.context_slot,
priorization_fee_lamports: 100_000,
priorization_fee_lamports: compute_unit_price_micro_lamports / 1_000_000, // convert microlamports to lamports
});
Ok(Json(json_response))
@ -356,6 +364,7 @@ impl HttpServer {
slippage_bps: i32,
other_amount_threshold: String,
swap_mode: SwapMode,
compute_unit_price_micro_lamports: u64,
) -> Result<(Vec<u8>, usize), AppError> {
let wallet_pk = Pubkey::from_str(&wallet_pk)?;
@ -370,15 +379,14 @@ impl HttpServer {
)?;
let compute_budget_ixs = vec![
ComputeBudgetInstruction::set_compute_unit_price(10_000), // ~0.01 lamport / CU
ComputeBudgetInstruction::set_compute_unit_price(compute_unit_price_micro_lamports),
ComputeBudgetInstruction::set_compute_unit_limit(ixs.cu_estimate),
];
let transaction_addresses = ixs.accounts().into_iter().collect();
let instructions = ixs
.setup_instructions
let instructions = compute_budget_ixs
.into_iter()
.chain(compute_budget_ixs.into_iter())
.chain(ixs.setup_instructions.into_iter())
.chain(vec![ixs.swap_instruction].into_iter())
.chain(ixs.cleanup_instructions.into_iter())
.collect_vec();
@ -420,6 +428,11 @@ impl HttpServer {
let swap_mode: SwapMode = SwapMode::from_str(&input.quote_response.swap_mode)
.map_err(|_| anyhow::Error::msg("Invalid SwapMode"))?;
let compute_unit_price_micro_lamports = match input.compute_unit_price_micro_lamports {
Some(price) => price,
None => DEFAULT_COMPUTE_UNIT_PRICE_MICRO_LAMPORTS,
};
let ixs = ix_builder.build_ixs(
&wallet_pk,
&route_plan,
@ -447,7 +460,9 @@ impl HttpServer {
.collect();
let compute_budget_ixs = vec![
InstructionResponse::from_ix(ComputeBudgetInstruction::set_compute_unit_price(10_000))?, // ~0.01 lamport / CU
InstructionResponse::from_ix(ComputeBudgetInstruction::set_compute_unit_price(
compute_unit_price_micro_lamports,
))?,
InstructionResponse::from_ix(ComputeBudgetInstruction::set_compute_unit_limit(
ixs.cu_estimate,
))?,

View File

@ -1,5 +1,5 @@
pub mod alt_provider;
mod errors;
pub mod errors;
pub mod hash_provider;
pub mod http_server;
pub mod live_account_provider;

View File

@ -0,0 +1,99 @@
snapshot_timeout_in_seconds = 900
[infinity]
enabled = false
[orca]
enabled = true
mints = []
take_all_mints = true
add_mango_tokens = false
[cropper]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[openbook_v2]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[raydium]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[raydium_cp]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[saber]
enabled = false
mints = []
take_all_mints = true
add_mango_tokens = false
[routing]
path_cache_validity_ms = 30000
path_warming_mode = "ConfiguredMints"
#path_warming_mode = "HotMints"
path_warming_amounts = [100, 1000, 10_000]
path_warming_for_mints = [
"So11111111111111111111111111111111111111112", # SOL
]
path_warming_interval_secs = 5
path_warming_max_accounts = [20, 30, 40, 64]
lookup_tables = []
cooldown_duration_multihop_secs = 30
cooldown_duration_singlehop_secs = 60
max_path_length = 3
retain_path_count = 5
max_edge_per_pair = 5
max_edge_per_cold_pair = 2
slot_excessive_lag_max_duration_secs = 1600
[server]
address = "[::]:8888"
[metrics]
output_http = true
prometheus_address = "[::]:9091"
output_stdout = false
[[sources]]
dedup_queue_size = 50000
rpc_http_url = "$RPC_HTTP_URL"
rpc_support_compression = false
re_snapshot_interval_secs = 1200
request_timeout_in_seconds = 300
[[sources.grpc_sources]]
name = "router-eclipse"
connection_string = "$RPC_HTTP_URL_WITHOUT_TOKEN"
token = "$RPC_TOKEN"
retry_connection_sleep_secs = 30
[price_feed]
birdeye_token = "$BIRDEYE_TOKEN"
refresh_interval_secs = 1200 # every 20 min
[safety_checks]
check_quote_out_amount_deviation = true
min_quote_out_to_in_amount_ratio = 0.65
[hot_mints]
always_hot_mints = [
"So11111111111111111111111111111111111111112", # SOL
]
keep_latest_count = 50
[debug_config]
reprice_using_live_rpc = true
reprice_probability = 0.05

View File

@ -1,3 +1,5 @@
snapshot_timeout_in_seconds = 900
[infinity]
enabled = true
@ -22,8 +24,8 @@ add_mango_tokens = false
[raydium]
enabled = true
mints = []
take_all_mints = false
add_mango_tokens = true
take_all_mints = true
add_mango_tokens = false
[raydium_cp]
enabled = true
@ -50,7 +52,7 @@ path_warming_for_mints = [
# "27G8MtK7VtTcCHkpASjSDdkWWYfoqT6ggEuKidVJidD4" # JLP
]
path_warming_interval_secs = 5
path_warming_max_accounts = [20, 25, 30, 35, 40, 64]
path_warming_max_accounts = [20, 30, 40, 64]
lookup_tables = ["87TgskchTNEv1uXkGQk1U4zt65tjqbfGAZWNMGAcRRPx",
"AgCBUZ6UMWqPLftTxeAqpQxtrfiCyL2HgRfmmM6QTfCj",
"A1v3qxN7HbUvtyPnnaoCrKonXjkFLaDHXk3S6R2QfEaw",
@ -77,6 +79,7 @@ dedup_queue_size = 50000
rpc_http_url = "$RPC_HTTP_URL"
rpc_support_compression = true
re_snapshot_interval_secs = 1200
request_timeout_in_seconds = 300
[[sources.grpc_sources]]
name = "router-other"
@ -84,12 +87,40 @@ connection_string = "$RPC_HTTP_URL_WITHOUT_TOKEN"
token = "$RPC_TOKEN"
retry_connection_sleep_secs = 30
[[sources.quic_sources]]
name = "quic-client"
connection_string = "$RPC_QUIC_URL"
retry_connection_sleep_secs = 1
enable_gso = false
[[sources]]
region = "dfw"
dedup_queue_size = 50000
rpc_http_url = "$DFW_RPC_HTTP_URL"
rpc_support_compression = true
re_snapshot_interval_secs = 1200
request_timeout_in_seconds = 300
[[sources.grpc_sources]]
name = "router-dfw"
connection_string = "$DFW_RPC_HTTP_URL_WITHOUT_TOKEN"
token = "$AMS_RPC_TOKEN"
retry_connection_sleep_secs = 30
[[sources.quic_sources]]
name = "quic-client-dfw"
connection_string = "$DFW_RPC_QUIC_URL"
retry_connection_sleep_secs = 1
enable_gso = false
[[sources]]
region = "ams"
dedup_queue_size = 50000
rpc_http_url = "$AMS_RPC_HTTP_URL"
rpc_support_compression = true
re_snapshot_interval_secs = 1200
request_timeout_in_seconds = 300
[[sources.grpc_sources]]
name = "router-ams"
@ -97,6 +128,12 @@ connection_string = "$AMS_RPC_HTTP_URL_WITHOUT_TOKEN"
token = "$AMS_RPC_TOKEN"
retry_connection_sleep_secs = 30
[[sources.quic_sources]]
name = "quic-client-ams "
connection_string = "$AMS_RPC_QUIC_URL"
retry_connection_sleep_secs = 1
enable_gso = false
[price_feed]
birdeye_token = "$BIRDEYE_TOKEN"
refresh_interval_secs = 1200 # every 20 min

View File

@ -166,6 +166,13 @@ async fn main() -> Result<(), anyhow::Error> {
let event = bytemuck::from_bytes::<ReferrerWithdrawLog>(&decoded[8..]);
println!("ReferrerWithdrawLog - referer: {:?}, referer_token_account: {:?}, amount: {}", event.referer, event.referer_token_account, event.amount);
}
&CREATE_REFERRAL_LOG_DISCRIMINANT => {
let event = bytemuck::from_bytes::<CreateReferralLog>(&decoded[8..]);
println!(
"CreateReferralLog - referer: {:?}, referee: {:?}, vault: {:?}, mint: {:?}",
event.referer, event.referee, event.vault, event.mint
);
}
_ => panic!("Unknown log discriminant"),
}
}

View File

@ -386,14 +386,14 @@ where
}
let err_str = if is_slippage_error {
"Failed to execute TX : Max Slippage Reached"
"Failed to execute TX : Max Slippage Reached".to_string()
} else if is_cu_error {
"Failed to execute TX : Exceeded CUs meter"
"Failed to execute TX : Exceeded CUs meter".to_string()
} else {
"Failed to execute TX"
format!("Failed to execute TX : {err:?}")
};
return Ok((out_amount, false, accounts, 0, err_str.to_string()));
return Ok((out_amount, false, accounts, 0, err_str));
};
let Some(after_accounts) = simulation_result.value.accounts else {

25
fly-eclipse.toml Normal file
View File

@ -0,0 +1,25 @@
app = "router-eclipse"
primary_region = "fra"
kill_signal = "SIGTERM"
kill_timeout = "30s"
[build]
dockerfile = 'bin/autobahn-router/Dockerfile'
[build.args]
CONFIG_PATH="/app/bin/autobahn-router/template-config-eclipse.toml"
[experimental]
cmd = ["autobahn-router", "/usr/local/bin/template-config.toml"]
[[vm]]
size = "shared-cpu-4x"
memory = "8gb"
[[restart]]
policy = "always"
retries = 10
[metrics]
port = 9091
path = "/metrics"

View File

@ -1,4 +1,4 @@
app = "router-1"
app = "router-2"
primary_region = "ams"
kill_signal = "SIGTERM"
kill_timeout = "30s"
@ -10,8 +10,8 @@ kill_timeout = "30s"
cmd = ["autobahn-router", "/usr/local/bin/template-config.toml"]
[[vm]]
size = "shared-cpu-4x"
memory = "8gb"
size = "performance-16x"
memory = "32gb"
[[restart]]
policy = "always"

View File

@ -24,7 +24,7 @@ async-trait = "0.1.79"
chrono = "0.4.38"
sha2 = "0.10.8"
tracing = "0.1.40"
spl-associated-token-account = "1.0.5"
spl-associated-token-account = { version = "1.0.5", features = ["no-entrypoint"] }
# infinity
solana-readonly-account = { version = "1.1.0", features=["solana-sdk"] }

View File

@ -1,5 +1,6 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::str::FromStr;
use std::sync::Arc;
use jupiter_amm_interface::{Amm, QuoteParams, SwapMode};
@ -67,6 +68,14 @@ impl DexInterface for InfinityDex {
let lst_mint = lst_data.sol_val_calc.lst_mint();
let account_metas = lst_data.sol_val_calc.ix_accounts();
let num_accounts_for_tx = account_metas.len();
let Ok((lst_state, lst_data)) = amm.find_ready_lst(lst_mint) else {
continue;
};
if lst_state.is_input_disabled != 0 {
continue;
}
for pk in lst_data.sol_val_calc.get_accounts_to_update() {
let edges = vec![
Arc::new(InfinityEdgeIdentifier {
@ -106,6 +115,7 @@ impl DexInterface for InfinityDex {
fn program_ids(&self) -> HashSet<Pubkey> {
[
Pubkey::from_str("5ocnV1qiCgaQR8Jb8xWnVbApfaygJ8tNoZfgPwsgx9kx").unwrap(),
s_controller_lib::program::ID,
sanctum_spl_multi_stake_pool_program::ID,
sanctum_spl_stake_pool_program::ID,

View File

@ -310,10 +310,17 @@ pub async fn fetch_all_whirlpools(
.await?;
let result = whirlpools
.iter()
.map(|account| {
let whirlpool: Whirlpool =
AnchorDeserialize::deserialize(&mut &account.data[8..]).unwrap();
(account.pubkey, whirlpool)
.filter_map(|account| {
let pubkey = account.pubkey;
let whirlpool: Result<Whirlpool, std::io::Error> =
AnchorDeserialize::deserialize(&mut &account.data[8..]);
match whirlpool {
Ok(whirlpool) => Some((account.pubkey, whirlpool)),
Err(e) => {
error!("Error deserializing whirlpool account : {pubkey:?} error: {e:?}");
None
}
}
})
.collect_vec();
Ok(result)

View File

@ -5,7 +5,7 @@ use std::sync::Arc;
use anchor_lang::Id;
use anchor_spl::token::spl_token;
use anchor_spl::token::spl_token::state::AccountState;
use anchor_spl::token::spl_token::state::{Account, AccountState};
use anchor_spl::token_2022::Token2022;
use anyhow::Context;
use itertools::Itertools;
@ -229,7 +229,12 @@ impl OrcaDex {
.iter()
.filter(|x| {
x.1.owner == Token2022::id()
|| spl_token::state::Account::unpack(x.1.data()).unwrap().state
|| spl_token::state::Account::unpack(x.1.data())
.unwrap_or(Account {
state: AccountState::Frozen,
..Default::default()
})
.state
== AccountState::Frozen
})
.map(|x| x.0)
@ -246,7 +251,9 @@ impl OrcaDex {
// TODO: actually need to dynamically adjust subscriptions based on the tick?
let tick_arrays = filtered_pools
.iter()
.map(|(pk, wp)| whirlpool_tick_array_pks(wp, pk, program_id))
.map(|(pk, wp)| {
whirlpool_tick_array_pks(wp, pk, program_id)
})
.collect_vec();
let edge_pairs = filtered_pools

View File

@ -8,6 +8,16 @@ use router_lib::test_tools::{generate_dex_rpc_dump, rpc};
#[tokio::test]
async fn test_dump_input_data_cropper() -> anyhow::Result<()> {
let is_eclipse = std::env::var("ECLIPSE")
.map(|x| {
let value: bool = x.parse().unwrap();
value
})
.unwrap_or_default();
if is_eclipse {
// crooper is not yet on eclipse
return Ok(());
}
let options = HashMap::from([
(
"program_id".to_string(),

View File

@ -14,6 +14,7 @@ pub struct GrpcSourceConfig {
#[derive(Clone, Debug, Default, serde_derive::Deserialize)]
pub struct QuicSourceConfig {
pub name: String,
#[serde(deserialize_with = "serde_string_or_env")]
pub connection_string: String,
pub retry_connection_sleep_secs: u64,
pub enable_gso: Option<bool>,

View File

@ -215,6 +215,47 @@ pub async fn get_compressed_program_account_rpc(
Ok((min_slot, snap_result))
}
// called on startup to get the required accounts, few calls with some 100 thousand accounts
#[tracing::instrument(skip_all, level = "trace")]
pub async fn get_uncompressed_program_account_rpc(
rpc_client: &RpcClient,
filters: &HashSet<Pubkey>,
config: RpcProgramAccountsConfig,
) -> anyhow::Result<(u64, Vec<AccountWrite>)> {
let slot = rpc_client.get_slot().await?;
let config = RpcProgramAccountsConfig {
with_context: Some(true),
account_config: RpcAccountInfoConfig {
encoding: Some(UiAccountEncoding::Base64),
min_context_slot: None,
commitment: config.account_config.commitment,
data_slice: config.account_config.data_slice,
},
filters: config.filters,
};
let mut snap_result = vec![];
let mut min_slot = u64::MAX;
// use getGPA compressed if available
for program_id in filters.iter() {
info!("gPA for {}", program_id);
min_slot = slot.min(min_slot);
let account_snapshot = rpc_client
.get_program_accounts_with_config(&program_id, config.clone())
.await
.map_err_anyhow()?;
tracing::log::debug!("gpa snapshot received {}", program_id);
let iter = account_snapshot.iter().map(|(pk, account)| {
account_write_from(*pk, slot, SNAP_ACCOUNT_WRITE_VERSION, account.clone())
});
snap_result.extend(iter);
}
Ok((min_slot, snap_result))
}
// called on startup to get the required accounts, few calls with some 100 thousand accounts
#[tracing::instrument(skip_all, level = "trace")]
pub async fn get_uncompressed_program_account(

View File

@ -9,7 +9,10 @@ use solana_sdk::account::Account;
use solana_sdk::pubkey::Pubkey;
use crate::account_write::AccountWrite;
use crate::get_program_account::{fetch_multiple_accounts, get_compressed_program_account_rpc};
use crate::get_program_account::{
fetch_multiple_accounts, get_compressed_program_account_rpc,
get_uncompressed_program_account_rpc,
};
use crate::router_rpc_client::RouterRpcClientTrait;
pub struct RouterRpcWrapper {
@ -52,10 +55,21 @@ impl RouterRpcClientTrait for RouterRpcWrapper {
pubkey: &Pubkey,
config: RpcProgramAccountsConfig,
) -> anyhow::Result<Vec<AccountWrite>> {
Ok(
get_compressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
let disable_compressed = std::env::var::<String>("DISABLE_COMRPESSED_GPA".to_string())
.unwrap_or("false".to_string());
let disable_compressed: bool = disable_compressed.trim().parse().unwrap();
if disable_compressed {
Ok(
get_uncompressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
} else {
Ok(
get_compressed_program_account_rpc(&self.rpc, &HashSet::from([*pubkey]), config)
.await?
.1,
)
}
}
}

View File

@ -32,3 +32,4 @@ lz4 = "1.25.0"
async-channel = "1.9.0"
lazy_static = "1.5.0"
anchor-spl = { version = "0.29.0", features = ["associated_token"] }
sha2 = "0.10.8"

View File

@ -12,10 +12,20 @@ use std::sync::Arc;
#[derive(Clone, Serialize, Deserialize)]
pub struct SwapInstruction {
/// Instruction to be executed by the user to swap through an edge.
pub instruction: Instruction,
/// Address of the user's associated token account that will receive
/// the proceeds of the swap after invoking instruction.
pub out_pubkey: Pubkey,
/// Mint of the tokens received from the swap.
pub out_mint: Pubkey,
/// Byte offset in Instruction.data that the onchain executor program
/// will use to replace the input amount with the proceeds of the
/// previous swap before cpi-invocation of this edge.
/// instruction.data\[in_amount_offset..in_amount_offset+8\] = in_amount
pub in_amount_offset: u16,
/// Conservative upper bound estimate of compute cost. If it is too low
/// transactions will fail, if it is too high, they will confirm slower.
pub cu_estimate: Option<u32>,
}
@ -144,6 +154,16 @@ pub type AccountProviderView = Arc<dyn AccountProvider>;
#[async_trait::async_trait]
pub trait DexInterface: Sync + Send {
/// Called on router boot, with the options read from the dex adapters's
/// config file. Can use RPC to initialize. The result contains usually
/// self. After calling initialize the returned DexInterface needs to be
/// able to respond the following methods:
/// - name()
/// - subscription_mode()
/// - edges_per_pk()
/// - program_ids()
/// - supports_exact_out()
/// - load()
async fn initialize(
rpc: &mut RouterRpcClient,
options: HashMap<String, String>,
@ -153,18 +173,42 @@ pub trait DexInterface: Sync + Send {
fn name(&self) -> String;
/// Defines the kind of grpc/quic subscription that should be established
/// to the RPC/Validator to keep this adapter updated. Also defines the
/// accounts included in a snapshot for simulation tests.
/// Right now the subscription mode is static per adapter and changes post
/// initialization have no effect. This might change in the future.
fn subscription_mode(&self) -> DexSubscriptionMode;
/// Defines the relationship between account updates and which
/// DexEdgeIndentifiers will be reloaded. Once a batch of account updates
/// has been added to ChainData the corresponding edge identifies will be
/// passed to DexInterface::load().
/// The identifies are a symbolic representation for edges, meaning they
/// should not store any mutable data related to the generation of actual
/// quotes, but merely expose the immutable description of the possibility
/// to quote a trade from input mint to output mint.
fn edges_per_pk(&self) -> HashMap<Pubkey, Vec<Arc<dyn DexEdgeIdentifier>>>;
/// Defines the programs that should be included in a snapshot for
/// simulation tests.
fn program_ids(&self) -> HashSet<Pubkey>;
/// Initializes an Edge from ChainData (production) or BanksClient (test).
/// The Edge will be dropped once a new Edge for the same EdgeIndentifier
/// has been initialized. After calling initialize the DexInterface needs
/// to be able to respond to quote() and supports_exact_out() calls that
/// pass this Edge. It can store immutable data locally.
/// Performance is critical, optimize implementations well.
fn load(
&self,
id: &Arc<dyn DexEdgeIdentifier>,
// TODO: put behind interface so we can adapt for BanksClient
chain_data: &AccountProviderView,
) -> anyhow::Result<Arc<dyn DexEdge>>;
/// Calculates the output amount for a given input amount, will be called
/// multiple times after an edge has been loaded.
/// Performance is critical, optimize implementations well.
fn quote(
&self,
id: &Arc<dyn DexEdgeIdentifier>,
@ -173,19 +217,9 @@ pub trait DexInterface: Sync + Send {
in_amount: u64,
) -> anyhow::Result<Quote>;
fn build_swap_ix(
&self,
id: &Arc<dyn DexEdgeIdentifier>,
// TODO: put behind interface so we can adapt for BanksClient
chain_data: &AccountProviderView,
wallet_pk: &Pubkey,
in_amount: u64,
out_amount: u64,
max_slippage_bps: i32,
) -> anyhow::Result<SwapInstruction>;
fn supports_exact_out(&self, id: &Arc<dyn DexEdgeIdentifier>) -> bool;
/// Calculates the input amount for a given output amount, will be called
/// multiple times after an edge has been loaded.
/// Performance is critical, optimize implementations well.
fn quote_exact_out(
&self,
id: &Arc<dyn DexEdgeIdentifier>,
@ -193,5 +227,27 @@ pub trait DexInterface: Sync + Send {
chain_data: &AccountProviderView,
out_amount: u64,
) -> anyhow::Result<Quote>;
// TODO: list of all program_ids to fetch for testing
/// Returns true, if the edge supports both quote() and quote_exact_out().
/// Returns false, if the edge only support quote().
fn supports_exact_out(&self, id: &Arc<dyn DexEdgeIdentifier>) -> bool;
/// Constructs a description for call-data passed to the executor program.
/// Once a route has been selected for the end-user to swap through, the
/// router will invoke DexInterface::build_swap_ix for every edge in the
/// route with with it's DexEdgeIdentifier rather than the initialized
/// DexEdge. The build_swap_ix implementation should use the most recent
/// data possible to avoid latency between account update, load & quote.
/// Exact-out is only used during route selection, onchain execution is
/// always using exact input amounts that get adjusted between edge
/// cpi-invocations using the in_amount_offset.
fn build_swap_ix(
&self,
id: &Arc<dyn DexEdgeIdentifier>,
chain_data: &AccountProviderView,
wallet_pk: &Pubkey,
in_amount: u64,
out_amount: u64,
max_slippage_bps: i32,
) -> anyhow::Result<SwapInstruction>;
}

View File

@ -9,6 +9,6 @@ pub mod test_tools;
pub mod utils;
pub mod autobahn_executor {
use solana_sdk::declare_id;
declare_id!("AutobNFLMzX1rFCDgwWpwr3ztG5c1oDbSrGq7Jj2LgE");
use solana_sdk::declare_id;
declare_id!("AutobNFLMzX1rFCDgwWpwr3ztG5c1oDbSrGq7Jj2LgE");
}

View File

@ -8,13 +8,14 @@ use itertools::Itertools;
use mango_feeds_connector::chain_data::AccountData;
use router_feed_lib::router_rpc_client::{RouterRpcClient, RouterRpcClientTrait};
use router_test_lib::{execution_dump, serialize};
use sha2::{Digest, Sha256};
use solana_sdk::account::ReadableAccount;
use solana_sdk::bpf_loader_upgradeable::UpgradeableLoaderState;
use solana_sdk::clock::Clock;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::Keypair;
use solana_sdk::signer::Signer;
use solana_sdk::sysvar::SysvarId;
use std::collections::HashSet;
use std::sync::Arc;
use tracing::{debug, error};
@ -67,7 +68,7 @@ pub async fn run_dump_mainnet_data_with_custom_amount(
let mut skipped = 0;
let mut success = 0;
let mut accounts_needed = HashSet::new();
let mut accounts_needed = dex.program_ids();
for id in edges_identifiers {
accounts_needed.insert(id.input_mint());
accounts_needed.insert(id.output_mint());
@ -123,7 +124,20 @@ pub async fn run_dump_mainnet_data_with_custom_amount(
for x in accounts_needed.iter().take(10) {
println!("- {} ", x);
}
rpc_client.get_multiple_accounts(&accounts_needed).await?;
let accounts = rpc_client.get_multiple_accounts(&accounts_needed).await?;
for (_pk, account) in accounts {
// get buffer for upgradable programs
if account.owner == solana_sdk::bpf_loader_upgradeable::ID {
let state = bincode::deserialize::<UpgradeableLoaderState>(&account.data).unwrap();
if let UpgradeableLoaderState::Program {
programdata_address,
} = state
{
rpc_client.get_account(&programdata_address).await?;
}
}
}
println!("Error count: {}", errors);
println!("Skipped count: {}", skipped);
@ -213,7 +227,7 @@ pub async fn run_dump_swap_ix_with_custom_amount(
continue;
};
debug!(
println!(
"#{} || quote: {} => {} : {} => {}",
success,
id.input_mint(),
@ -232,6 +246,25 @@ pub async fn run_dump_swap_ix_with_custom_amount(
is_exact_out: false,
});
let chain_data_reader = chain_data.read().unwrap();
for account in swap_ix.instruction.accounts {
if let Ok(acc) = chain_data_reader.account(&account.pubkey) {
dump.accounts.insert(account.pubkey, acc.account.clone());
} else {
error!("Missing account (needed for swap) {}", account.pubkey);
}
}
let account = chain_data_reader
.account(&id.input_mint())
.expect("missing mint");
dump.accounts
.insert(id.input_mint(), account.account.clone());
let account = chain_data_reader
.account(&id.output_mint())
.expect("missing mint");
dump.accounts
.insert(id.output_mint(), account.account.clone());
// build exact out tests
if dex.supports_exact_out(&id) {
let Ok(mut quote_exact_out) =
@ -274,9 +307,6 @@ pub async fn run_dump_swap_ix_with_custom_amount(
instruction: bincode::serialize(&swap_exact_out_ix.instruction).unwrap(),
is_exact_out: true,
});
// add exact out accounts
let chain_data_reader = chain_data.read().unwrap();
for account in swap_exact_out_ix.instruction.accounts {
if let Ok(acc) = chain_data_reader.account(&account.pubkey) {
dump.accounts.insert(account.pubkey, acc.account.clone());
@ -284,41 +314,8 @@ pub async fn run_dump_swap_ix_with_custom_amount(
error!("Missing account (needed for swap) {}", account.pubkey);
}
}
let account = chain_data_reader
.account(&id.input_mint())
.expect("missing mint");
dump.accounts
.insert(id.input_mint(), account.account.clone());
let account = chain_data_reader
.account(&id.input_mint())
.expect("missing mint");
dump.accounts
.insert(id.output_mint(), account.account.clone());
}
}
let chain_data_reader = chain_data.read().unwrap();
for account in swap_ix.instruction.accounts {
if let Ok(acc) = chain_data_reader.account(&account.pubkey) {
dump.accounts.insert(account.pubkey, acc.account.clone());
} else {
error!("Missing account (needed for swap) {}", account.pubkey);
}
}
let account = chain_data_reader
.account(&id.input_mint())
.expect("missing mint");
dump.accounts
.insert(id.input_mint(), account.account.clone());
let account = chain_data_reader
.account(&id.input_mint())
.expect("missing mint");
dump.accounts
.insert(id.output_mint(), account.account.clone());
}
println!("Error count: {}", errors);
@ -326,6 +323,38 @@ pub async fn run_dump_swap_ix_with_custom_amount(
println!("Success count: {}", success);
println!("Exactout Success count: {}", exact_out_sucess);
for program in dump.programs.clone() {
let program_account = account_provider.account(&program)?;
dump.accounts
.insert(program, program_account.account.clone());
// use downloaded buffers for the upgradable programs
if *program_account.account.owner() == solana_sdk::bpf_loader_upgradeable::ID {
let state =
bincode::deserialize::<UpgradeableLoaderState>(program_account.account.data())
.unwrap();
if let UpgradeableLoaderState::Program {
programdata_address,
} = state
{
let program_data_account = account_provider.account(&programdata_address)?;
dump.accounts
.insert(programdata_address, program_data_account.account);
}
}
}
for program in &dump.programs {
debug!("program : {program:?}");
}
for (pk, account_data) in &dump.accounts {
let mut hasher = Sha256::new();
hasher.update(account_data.data());
let result = hasher.finalize();
let base64 = base64::encode(result);
debug!("account : {pk:?} dump : {base64:?}");
}
serialize::serialize_to_file(
&dump,
&format!("../../programs/simulator/tests/fixtures/{}", dump_name).to_string(),

View File

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use serde_derive::{Deserialize, Serialize};
use solana_sdk::account::AccountSharedData;
@ -17,7 +17,7 @@ pub struct ExecutionItem {
#[derive(Clone, Serialize, Deserialize)]
pub struct ExecutionDump {
pub wallet_keypair: String,
pub programs: Vec<Pubkey>,
pub programs: HashSet<Pubkey>,
pub cache: Vec<ExecutionItem>,
pub accounts: HashMap<Pubkey, AccountSharedData>,
}

View File

@ -12,6 +12,7 @@ where
let mut writer = lz4::EncoderBuilder::new().build(file_writer).unwrap();
writer.write_all(serialized_data.as_slice()).unwrap();
writer.flush().unwrap();
let _ = writer.finish();
}
pub fn deserialize_from_file<T>(path: &String) -> anyhow::Result<T>

View File

@ -1,6 +1,6 @@
[package]
name = "autobahn-executor"
version = "1.0.0"
version = "1.0.1"
edition = "2021"
[features]

View File

@ -0,0 +1,66 @@
use solana_program::account_info::AccountInfo;
use solana_program::entrypoint::ProgramResult;
use solana_program::program::invoke;
use solana_program::program::invoke_signed;
use solana_program::pubkey::Pubkey;
use solana_program::rent::Rent;
use solana_program::system_instruction;
/// Creates associated token account using Program Derived Address for the given
/// seeds
/// source: https://github.com/solana-labs/solana-program-library/blob/87b905c91f9c17581f6696512f20abb43ade13c8/associated-token-account/program/src/tools/account.rs#L19
pub fn create_pda_account<'a>(
payer: &AccountInfo<'a>,
rent: &Rent,
space: usize,
owner: &Pubkey,
system_program: &AccountInfo<'a>,
new_pda_account: &AccountInfo<'a>,
new_pda_signer_seeds: &[&[u8]],
) -> ProgramResult {
if new_pda_account.lamports() > 0 {
let required_lamports = rent
.minimum_balance(space)
.max(1)
.saturating_sub(new_pda_account.lamports());
if required_lamports > 0 {
invoke(
&system_instruction::transfer(payer.key, new_pda_account.key, required_lamports),
&[
payer.clone(),
new_pda_account.clone(),
system_program.clone(),
],
)?;
}
invoke_signed(
&system_instruction::allocate(new_pda_account.key, space as u64),
&[new_pda_account.clone(), system_program.clone()],
&[new_pda_signer_seeds],
)?;
invoke_signed(
&system_instruction::assign(new_pda_account.key, owner),
&[new_pda_account.clone(), system_program.clone()],
&[new_pda_signer_seeds],
)
} else {
invoke_signed(
&system_instruction::create_account(
payer.key,
new_pda_account.key,
rent.minimum_balance(space).max(1),
space as u64,
owner,
),
&[
payer.clone(),
new_pda_account.clone(),
system_program.clone(),
],
&[new_pda_signer_seeds],
)
}
}

View File

@ -1,12 +1,16 @@
use solana_program::account_info::AccountInfo;
use solana_program::entrypoint::ProgramResult;
use solana_program::program::{invoke, invoke_signed};
use solana_program::program::invoke;
use solana_program::program_error::ProgramError;
use solana_program::program_pack::Pack;
use solana_program::pubkey::Pubkey;
use solana_program::rent::Rent;
use solana_program::system_instruction;
use solana_program::system_program;
use solana_program::sysvar::Sysvar;
use crate::create_pda::create_pda_account;
use crate::logs::{emit_stack, CreateReferralLog};
pub fn execute_create_referral(accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult {
if let [payer, referrer, vault, mint, system_program, token_program] = accounts {
@ -34,34 +38,33 @@ pub fn execute_create_referral(accounts: &[AccountInfo], instruction_data: &[u8]
return Err(ProgramError::InvalidSeeds);
}
// fund account with rent
let space = spl_token::state::Account::LEN;
let lamports = Rent::default().minimum_balance(space);
create_pda_account(
payer,
&Rent::get()?,
spl_token::state::Account::LEN,
&spl_token::ID,
system_program,
vault,
&vault_seeds,
)?;
let create_account_ix = system_instruction::create_account(
payer.key,
vault.key,
lamports,
space as u64,
token_program.key,
);
let create_account_infos = [payer.clone(), vault.clone(), system_program.clone()];
invoke_signed(&create_account_ix, &create_account_infos, &[&vault_seeds])?;
// Initialize the token account for the vault
let initialize_ix = spl_token::instruction::initialize_account3(
token_program.key,
&spl_token::ID,
vault.key,
mint.key,
vault.key,
)?;
let initialize_account_infos = [vault.clone(), mint.clone(), token_program.clone()];
invoke(&initialize_ix, &initialize_account_infos)?;
emit_stack(CreateReferralLog {
referee: *payer.key,
referer: *referrer.key,
vault: *vault.key,
mint: *mint.key,
})?;
Ok(())
} else {
Err(ProgramError::NotEnoughAccountKeys)

View File

@ -1,3 +1,4 @@
pub mod create_pda;
mod instructions;
pub mod logs;
pub mod swap_ix;
@ -13,7 +14,6 @@ use solana_program::program_error::ProgramError;
use solana_program::program_pack::Pack;
use solana_program::{account_info::AccountInfo, pubkey::Pubkey};
#[cfg(not(feature = "no-entrypoint"))]
use {default_env::default_env, solana_program::entrypoint, solana_security_txt::security_txt};

View File

@ -84,10 +84,20 @@ pub struct ReferrerWithdrawLog {
pub amount: u64,
}
#[repr(C)]
#[derive(Clone, Copy, Zeroable, Pod)]
pub struct CreateReferralLog {
pub referee: Pubkey,
pub referer: Pubkey,
pub vault: Pubkey,
pub mint: Pubkey,
}
pub const PLATFORM_FEE_LOG_DISCRIMINANT: [u8; 8] = [160, 183, 104, 34, 255, 190, 119, 188];
pub const REFERRER_FEE_LOG_DISCRIMINANT: [u8; 8] = [198, 149, 221, 27, 28, 103, 76, 95];
pub const REFERRER_WITHDRAW_LOG_DISCRIMINANT: [u8; 8] = [25, 7, 239, 41, 67, 36, 141, 92];
pub const SWAP_EVENT_DISCRIMINANT: [u8; 8] = [56, 178, 48, 245, 42, 152, 27, 75];
pub const CREATE_REFERRAL_LOG_DISCRIMINANT: [u8; 8] = [114, 188, 157, 65, 100, 179, 129, 169];
discriminant!(
PlatformFeeLog,
@ -106,6 +116,13 @@ discriminant!(
REFERRER_WITHDRAW_LOG_DISCRIMINANT,
test_referrer_withdraw_log
);
discriminant!(
CreateReferralLog,
CREATE_REFERRAL_LOG_DISCRIMINANT,
test_create_referral_log
);
discriminant!(SwapEvent, SWAP_EVENT_DISCRIMINANT, test_swap_event);
/// Canonical discriminant of the given struct. It is the hash of program ID and

View File

@ -23,6 +23,7 @@ test-case = "*"
tokio = "1.37.0"
solana-address-lookup-table-program = "1.17"
solana-program-test = "1.17"
solana-program-runtime = "1.17"
solana-sdk = "1.17"
spl-token = { version = "^3.0.0", features = ["no-entrypoint"] }
spl-token-2022 = { version = "1.0.0", features = ["no-entrypoint"] }
@ -31,3 +32,11 @@ bonfida-test-utils = "0.4.4"
log = "0.4.14"
env_logger = "0.9.0"
bincode = "1.3.3"
sha2 = "0.10.8"
base64 = "0.12.3"
litesvm = { git = "https://github.com/blockworks-foundation/litesvm.git", branch = "v0.1.0+solana_1.7" }
tracing = "0.1.37"
tracing-subscriber = "0.3.16"
[profile.test]
inherits = "release"

View File

@ -1,27 +1,29 @@
use anyhow::{Context, Error};
use bonfida_test_utils::error::TestError;
use bonfida_test_utils::ProgramTestContextExt;
use log::{debug, error, info, warn};
use anyhow::Error;
use litesvm::LiteSVM;
use log::{error, info, warn};
use router_test_lib::execution_dump::{ExecutionDump, ExecutionItem};
use router_test_lib::{execution_dump, serialize};
use solana_program::clock::{Clock, Epoch};
use sha2::Digest;
use sha2::Sha256;
use solana_program::clock::Clock;
use solana_program::instruction::Instruction;
use solana_program::program_pack::Pack;
use solana_program::program_stubs::{set_syscall_stubs, SyscallStubs};
use solana_program::pubkey::Pubkey;
use solana_program::sysvar::SysvarId;
use solana_program_test::BanksClientError;
use solana_program_test::{ProgramTest, ProgramTestContext};
use solana_sdk::account::{Account, AccountSharedData, ReadableAccount};
use solana_sdk::epoch_info::EpochInfo;
use solana_sdk::bpf_loader_upgradeable::UpgradeableLoaderState;
use solana_sdk::message::{Message, VersionedMessage};
use solana_sdk::signature::Keypair;
use solana_sdk::signer::Signer;
use solana_sdk::transaction::Transaction;
use spl_associated_token_account::get_associated_token_address;
use solana_sdk::transaction::VersionedTransaction;
use spl_associated_token_account::{
get_associated_token_address, get_associated_token_address_with_program_id,
};
use spl_token::state::AccountState;
use spl_token_2022::state::AccountState as AccountState2022;
use std::collections::HashMap;
use std::process::exit;
use std::path::PathBuf;
use std::str::FromStr;
struct TestLogSyscallStubs;
@ -76,13 +78,15 @@ async fn test_quote_match_swap_for_invariant() -> anyhow::Result<()> {
}
async fn run_all_swap_from_dump(dump_name: &str) -> Result<Result<(), Error>, Error> {
tracing_subscriber::fmt::init();
let mut skip_count = option_env!("SKIP_COUNT")
.map(|x| u32::from_str(x).unwrap_or(0))
.unwrap_or(0);
let mut stop_at = u32::MAX;
let skip_ixs_index = vec![];
let mut run_lot_size = option_env!("RUN_LOT_SIZE")
let run_lot_size = option_env!("RUN_LOT_SIZE")
.map(|x| u32::from_str(x).unwrap_or(500))
.unwrap_or(500);
@ -107,6 +111,14 @@ async fn run_all_swap_from_dump(dump_name: &str) -> Result<Result<(), Error>, Er
.ok_or("invalid dump doesnt contain clock sysvar")
.unwrap();
let clock = clock_account.deserialize_data::<Clock>()?;
let simulate = option_env!("SIMULATE")
.map(|x| bool::from_str(x).unwrap_or(false))
.unwrap_or_default();
let debug_hashes = option_env!("DEBUG_HASHES")
.map(|x| bool::from_str(x).unwrap_or(false))
.unwrap_or_default();
let mut ctx = setup_test_chain(&clock, &data)?;
let mut cus_required = vec![];
for quote in &data.cache {
@ -126,16 +138,15 @@ async fn run_all_swap_from_dump(dump_name: &str) -> Result<Result<(), Error>, Er
continue;
}
let mut ctx = setup_test_chain(&data.programs, &clock).await;
let instruction = deserialize_instruction(&quote.instruction)?;
create_wallet(&mut ctx, wallet.pubkey());
let initial_in_balance = quote.input_amount * 2;
let initial_out_balance = 1_000_000;
let instruction = deserialize_instruction(&quote.instruction)?;
initialize_instruction_accounts(&mut ctx, &data, &instruction).await?;
// let slot = ctx.banks_client.get_root_slot().await.unwrap();
// ctx.warp_to_slot(slot+3).unwrap();
let input_mint_is_2022 = is_2022(&data.accounts, quote.input_mint).await;
let output_mint_is_2022 = is_2022(&data.accounts, quote.output_mint).await;
@ -146,19 +157,38 @@ async fn run_all_swap_from_dump(dump_name: &str) -> Result<Result<(), Error>, Er
quote.input_mint,
initial_in_balance,
input_mint_is_2022,
)
.await?;
)?;
set_balance(
&mut ctx,
wallet.pubkey(),
quote.output_mint,
initial_out_balance,
output_mint_is_2022,
)
.await?;
)?;
if let Some(cus) = simulate_cu_usage(&mut ctx, &wallet, &instruction).await {
cus_required.push(cus);
for meta in &instruction.accounts {
let Some(account) = ctx.get_account(&meta.pubkey) else {
log::warn!("missing account : {:?}", meta.pubkey);
continue;
};
if debug_hashes {
let mut hasher = Sha256::new();
hasher.update(account.data());
let result = hasher.finalize();
let base64 = base64::encode(result);
log::debug!(
"account : {:?} dump : {base64:?} executable : {}",
meta.pubkey,
account.executable()
);
}
}
if simulate {
if let Some(cus) = simulate_cu_usage(&mut ctx, &wallet, &instruction).await {
cus_required.push(cus);
}
}
match swap(&mut ctx, &wallet, &instruction).await {
@ -235,6 +265,17 @@ async fn run_all_swap_from_dump(dump_name: &str) -> Result<Result<(), Error>, Er
}
success += 1;
// reset the mutable accounts for next test
reinitialize_accounts(
&mut ctx,
&data,
&instruction
.accounts
.iter()
.filter_map(|x| if x.is_writable { Some(x.pubkey) } else { None })
.collect(),
)?;
}
cus_required.sort();
@ -266,7 +307,7 @@ async fn debug_print_ix(
success: &mut i32,
index: &mut u32,
quote: &ExecutionItem,
ctx: &mut ProgramTestContext,
ctx: &mut LiteSVM,
instruction: &Instruction,
input_mint_is_2022: bool,
output_mint_is_2022: bool,
@ -298,13 +339,8 @@ async fn debug_print_ix(
for acc in &instruction.accounts {
let account = ctx
.banks_client
.get_account(acc.pubkey)
.await
.map(|x| {
x.map(|y| (y.executable, y.owner.to_string()))
.unwrap_or((false, "???".to_string()))
})
.get_account(&acc.pubkey)
.map(|x| (x.executable, x.owner.to_string()))
.unwrap_or((false, "???".to_string()));
warn!(
@ -329,131 +365,196 @@ fn deserialize_instruction(swap_ix: &Vec<u8>) -> anyhow::Result<Instruction> {
Ok(instruction)
}
async fn initialize_instruction_accounts(
ctx: &mut ProgramTestContext,
fn reinitialize_accounts(
program_test: &mut LiteSVM,
dump: &ExecutionDump,
instruction: &Instruction,
accounts_list: &Vec<Pubkey>,
) -> anyhow::Result<()> {
for account_meta in &instruction.accounts {
if dump.programs.contains(&account_meta.pubkey) {
log::debug!("reinitializing accounts : {:?}", accounts_list.len());
for pk in accounts_list {
let Some(account) = dump.accounts.get(&pk) else {
continue;
}
if let Some(account) = dump.accounts.get(&account_meta.pubkey) {
if account.executable() {
continue;
}
debug!("Setting data for {}", account_meta.pubkey);
ctx.set_account(&account_meta.pubkey, account);
} else {
if ctx
.banks_client
.get_account(account_meta.pubkey)
.await?
.is_none()
};
log::debug!(
"Setting data for {} with owner {} and is executable {}",
pk,
account.owner(),
account.executable()
);
log::debug!("Setting data for {}", pk);
program_test.set_account(
*pk,
solana_sdk::account::Account {
lamports: account.lamports(),
owner: *account.owner(),
data: account.data().to_vec(),
rent_epoch: account.rent_epoch(),
executable: account.executable(),
},
)?;
}
Ok(())
}
fn initialize_accounts(program_test: &mut LiteSVM, dump: &ExecutionDump) -> anyhow::Result<()> {
log::debug!("initializing accounts : {:?}", dump.accounts.len());
let mut accounts_list = dump.programs.clone();
accounts_list.extend(dump.accounts.iter().map(|x| x.0.clone()));
for pk in accounts_list {
let Some(account) = dump.accounts.get(&pk) else {
continue;
};
if *account.owner() == solana_sdk::bpf_loader_upgradeable::ID {
log::debug!("{pk:?} has upgradable loader");
let state = bincode::deserialize::<UpgradeableLoaderState>(&account.data()).unwrap();
if let UpgradeableLoaderState::Program {
programdata_address,
} = state
{
debug!("Missing data for {}", account_meta.pubkey); // Can happen for empty oracle account...
// load buffer accounts first
match dump.accounts.get(&programdata_address) {
Some(program_buffer) => {
log::debug!("loading buffer: {programdata_address:?}");
program_test.set_account(
programdata_address,
solana_sdk::account::Account {
lamports: program_buffer.lamports(),
owner: *program_buffer.owner(),
data: program_buffer.data().to_vec(),
rent_epoch: program_buffer.rent_epoch(),
executable: program_buffer.executable(),
},
)?;
}
None => {
error!("{programdata_address:?} is not there");
}
}
}
}
log::debug!(
"Setting data for {} with owner {} and is executable {}",
pk,
account.owner(),
account.executable()
);
log::debug!("Setting data for {}", pk);
program_test.set_account(
pk,
solana_sdk::account::Account {
lamports: account.lamports(),
owner: *account.owner(),
data: account.data().to_vec(),
rent_epoch: account.rent_epoch(),
executable: account.executable(),
},
)?;
}
Ok(())
}
async fn simulate_cu_usage(
ctx: &mut ProgramTestContext,
ctx: &mut LiteSVM,
owner: &Keypair,
instruction: &Instruction,
) -> Option<u64> {
let mut transaction =
Transaction::new_with_payer(&[instruction.clone()], Some(&ctx.payer.pubkey()));
let tx = VersionedTransaction::try_new(
VersionedMessage::Legacy(Message::new(&[instruction.clone()], Some(&owner.pubkey()))),
&[owner],
)
.unwrap();
transaction.sign(&[&ctx.payer, owner], ctx.last_blockhash);
let sim = ctx
.banks_client
.simulate_transaction(transaction.clone())
.await;
let sim = ctx.simulate_transaction(tx);
match sim {
Ok(sim) => {
log::debug!("{:?}", sim.result);
if sim.result.is_some() && sim.result.unwrap().is_ok() {
let simulation_details = sim.simulation_details.unwrap();
let cus = simulation_details.units_consumed;
log::debug!("units consumed : {}", cus);
log::debug!("----logs");
for log in simulation_details.logs {
log::debug!("{log:?}");
}
let cus = sim.compute_units_consumed;
log::debug!("----logs");
for log in sim.logs {
log::debug!("{log:?}");
}
if cus > 0 {
Some(cus)
} else {
None
}
}
Err(e) => {
log::warn!("Error simulating : {}", e);
log::warn!("Error simulating : {:?}", e);
None
}
}
}
async fn swap(
ctx: &mut ProgramTestContext,
owner: &Keypair,
instruction: &Instruction,
) -> anyhow::Result<()> {
ctx.get_new_latest_blockhash().await?;
log::info!("swapping");
let result = ctx
.sign_send_instructions(&[instruction.clone()], &[&owner])
.await;
async fn swap(ctx: &mut LiteSVM, owner: &Keypair, instruction: &Instruction) -> anyhow::Result<()> {
let tx = VersionedTransaction::try_new(
VersionedMessage::Legacy(Message::new(&[instruction.clone()], Some(&owner.pubkey()))),
&[owner],
)
.unwrap();
let result = ctx.send_transaction(tx);
match result {
Ok(()) => Ok(()),
Err(e) => Err(anyhow::format_err!("Failed to swap {:?}", e)),
Ok(_) => Ok(()),
Err(e) => {
log::error!("------------- LOGS ------------------");
for log in &e.meta.logs {
log::error!("{log:?}");
}
Err(anyhow::format_err!("Failed to swap {:?}", e.err))
}
}
}
async fn get_balance(
ctx: &mut ProgramTestContext,
ctx: &mut LiteSVM,
owner: Pubkey,
mint: Pubkey,
is_2022: bool,
) -> anyhow::Result<u64> {
let ata_address = get_associated_token_address(&owner, &mint);
let Some(ata) = ctx.get_account(&ata_address) else {
return Ok(0);
};
if is_2022 {
let Ok(ata) = ctx.banks_client.get_account(ata_address).await else {
return Ok(0);
};
let Some(ata) = ata else {
return Ok(0);
};
let ata = spl_token_2022::state::Account::unpack(&ata.data);
if let Ok(ata) = ata {
return Ok(ata.amount);
}
};
if let Ok(ata) = ctx.get_token_account(ata_address).await {
if let Ok(ata) = spl_token::state::Account::unpack(&ata.data) {
Ok(ata.amount)
} else {
Ok(0u64)
}
}
async fn set_balance(
ctx: &mut ProgramTestContext,
fn set_balance(
ctx: &mut LiteSVM,
owner: Pubkey,
mint: Pubkey,
amount: u64,
is_2022: bool,
) -> anyhow::Result<()> {
let ata_address = get_associated_token_address(&owner, &mint);
let token_program_id = if is_2022 {
spl_token_2022::ID
} else {
spl_token::ID
};
let ata_address =
get_associated_token_address_with_program_id(&owner, &mint, &token_program_id);
let mut data = vec![0u8; 165];
if is_2022 {
let mut data = vec![0u8; 165];
// TODO: to properly setup extensions, this is not sufficient
let account = spl_token_2022::state::Account {
mint,
owner,
@ -465,91 +566,76 @@ async fn set_balance(
close_authority: Default::default(),
};
account.pack_into_slice(data.as_mut_slice());
ctx.set_account(
&ata_address,
&AccountSharedData::from(Account {
lamports: 1_000_000_000,
data: data,
owner: spl_token_2022::ID,
executable: false,
rent_epoch: 0,
}),
);
return Ok(());
}
let mut data = vec![0u8; 165];
let account = spl_token::state::Account {
mint,
owner,
amount,
delegate: Default::default(),
state: AccountState::Initialized,
is_native: Default::default(),
delegated_amount: 0,
close_authority: Default::default(),
} else {
let account = spl_token::state::Account {
mint,
owner,
amount,
delegate: Default::default(),
state: AccountState::Initialized,
is_native: Default::default(),
delegated_amount: 0,
close_authority: Default::default(),
};
account.pack_into_slice(data.as_mut_slice());
};
account.pack_into_slice(data.as_mut_slice());
ctx.set_account(
&ata_address,
&AccountSharedData::from(Account {
ata_address,
Account {
lamports: 1_000_000_000,
data: data,
owner: spl_token::ID,
owner: token_program_id,
executable: false,
rent_epoch: 0,
}),
);
rent_epoch: u64::MAX,
},
)?;
Ok(())
}
fn create_wallet(ctx: &mut ProgramTestContext, address: Pubkey) {
ctx.set_account(
&address,
&AccountSharedData::from(Account {
lamports: 1_000_000_000,
data: vec![],
owner: address,
executable: false,
rent_epoch: 0,
}),
);
fn create_wallet(ctx: &mut LiteSVM, address: Pubkey) {
let _ = ctx.airdrop(&address, 1_000_000_000);
}
async fn setup_test_chain(programs: &Vec<Pubkey>, clock: &Clock) -> ProgramTestContext {
// We need to intercept logs to capture program log output
let log_filter = "solana_rbpf=trace,\
solana_runtime::message_processor=debug,\
solana_runtime::system_instruction_processor=trace,\
solana_program_test=info,\
solana_metrics::metrics=warn,\
tarpc=error,\
info";
let env_logger =
env_logger::Builder::from_env(env_logger::Env::new().default_filter_or(log_filter))
.format_timestamp_nanos()
.build();
let _ = log::set_boxed_logger(Box::new(env_logger));
let mut program_test = ProgramTest::default();
for &key in programs {
program_test.add_program(key.to_string().as_str(), key, None);
pub fn find_file(filename: &str) -> Option<PathBuf> {
for dir in default_shared_object_dirs() {
let candidate = dir.join(filename);
if candidate.exists() {
return Some(candidate);
}
}
program_test.add_program("autobahn_executor", autobahn_executor::ID, None);
None
}
fn default_shared_object_dirs() -> Vec<PathBuf> {
let mut search_path = vec![];
if let Ok(bpf_out_dir) = std::env::var("BPF_OUT_DIR") {
search_path.push(PathBuf::from(bpf_out_dir));
} else if let Ok(bpf_out_dir) = std::env::var("SBF_OUT_DIR") {
search_path.push(PathBuf::from(bpf_out_dir));
}
search_path.push(PathBuf::from("tests/fixtures"));
if let Ok(dir) = std::env::current_dir() {
search_path.push(dir);
}
log::trace!("SBF .so search path: {:?}", search_path);
search_path
}
fn setup_test_chain(clock: &Clock, dump: &ExecutionDump) -> anyhow::Result<LiteSVM> {
let mut program_test = LiteSVM::new();
program_test.set_sysvar(clock);
initialize_accounts(&mut program_test, dump)?;
let path = find_file(format!("autobahn_executor.so").as_str()).unwrap();
log::debug!("Adding program: {:?} at {path:?}", autobahn_executor::ID);
program_test.add_program_from_file(autobahn_executor::ID, path)?;
// TODO: make this dynamic based on routes
program_test.set_compute_max_units(1_400_000);
let mut cb = solana_program_runtime::compute_budget::ComputeBudget::default();
cb.compute_unit_limit = 1_400_000;
program_test.set_compute_budget(cb);
let program_test_context = program_test.start_with_context().await;
// Set clock
program_test_context.set_sysvar(clock);
info!("Setting clock to: {}", clock.unix_timestamp);
program_test_context
Ok(program_test)
}

View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
for filename in programs/simulator/tests/fixtures/*.so; do
filename=$(basename $filename)
filename="${filename%.*}"
echo $filename
solana program dump -um $filename $SOURCE/$filename.so
done

View File

@ -8,7 +8,9 @@ export DUMP_MAINNET_DATA=1 RUST_LOG=info
# define in addition
# RPC_HTTP_URL="http://fcs-ams1._peer.internal:18899"
# for eclipse
# export ECLIPSE=true
# export DISABLE_COMRPESSED_GPA=true
# saber
DUMP_SABER_START=$(date)