refactor: create structs folder

This commit is contained in:
dboures 2023-03-13 00:03:58 -05:00
parent 352bc32210
commit 6e8ddd1ef5
No known key found for this signature in database
GPG Key ID: AB3790129D478852
20 changed files with 342 additions and 207 deletions

1
Cargo.lock generated
View File

@ -3498,6 +3498,7 @@ dependencies = [
"async-trait",
"borsh 0.9.3",
"chrono",
"derive_more",
"dotenv",
"env_logger 0.10.0",
"jsonrpc-core-client",

View File

@ -44,6 +44,7 @@ serde_json = "1.0"
serde_derive = "1.0"
strum = { version = "0.24", features = ["derive"] }
num-traits = "0.2"
derive_more = "0.99.17"
serum_dex = { version = "0.5.10", git = "https://github.com/openbook-dex/program.git", default-features=false, features = ["no-entrypoint", "program"] }
anchor-lang = ">=0.25.0"

View File

@ -4,11 +4,10 @@ use sqlx::{types::Decimal, Pool, Postgres};
use std::cmp::{max, min};
use crate::{
candle_creation::candle_batching::day,
database::{
fetch::{fetch_candles_from, fetch_earliest_candle, fetch_latest_finished_candle},
Candle, Resolution,
},
Candle,
}, structs::resolution::{day, Resolution},
};
pub async fn batch_higher_order_candles(
@ -16,8 +15,7 @@ pub async fn batch_higher_order_candles(
market_name: &str,
resolution: Resolution,
) -> anyhow::Result<Vec<Candle>> {
let latest_candle =
fetch_latest_finished_candle(pool, market_name, resolution).await?;
let latest_candle = fetch_latest_finished_candle(pool, market_name, resolution).await?;
match latest_candle {
Some(candle) => {
@ -45,12 +43,9 @@ pub async fn batch_higher_order_candles(
Ok(combined_candles)
}
None => {
let constituent_candle = fetch_earliest_candle(
pool,
market_name,
resolution.get_constituent_resolution(),
)
.await?;
let constituent_candle =
fetch_earliest_candle(pool, market_name, resolution.get_constituent_resolution())
.await?;
if constituent_candle.is_none() {
println!(
"Batching {}, but no candles found for: {:?}, {}",
@ -103,8 +98,10 @@ fn combine_into_higher_order_candles(
let candles_len = constituent_candles.len();
let empty_candle =
Candle::create_empty_candle(constituent_candles[0].market_name.clone(), target_resolution);
let empty_candle = Candle::create_empty_candle(
constituent_candles[0].market_name.clone(),
target_resolution,
);
let mut combined_candles =
vec![empty_candle; (day().num_minutes() / duration.num_minutes()) as usize];

View File

@ -4,12 +4,10 @@ use chrono::{DateTime, Duration, DurationRound, Utc};
use num_traits::{FromPrimitive, Zero};
use sqlx::{types::Decimal, Pool, Postgres};
use crate::database::{
use crate::{database::{
fetch::{fetch_earliest_fill, fetch_fills_from, fetch_latest_finished_candle},
Candle, MarketInfo, PgOpenBookFill, Resolution,
};
use super::day;
Candle, PgOpenBookFill,
}, structs::{markets::MarketInfo, resolution::{Resolution, day}}};
pub async fn batch_1m_candles(
pool: &Pool<Postgres>,
@ -17,8 +15,7 @@ pub async fn batch_1m_candles(
) -> anyhow::Result<Vec<Candle>> {
let market_name = &market.name;
let market_address = &market.address;
let latest_candle =
fetch_latest_finished_candle(pool, market_name, Resolution::R1m).await?;
let latest_candle = fetch_latest_finished_candle(pool, market_name, Resolution::R1m).await?;
match latest_candle {
Some(candle) => {
@ -27,8 +24,7 @@ pub async fn batch_1m_candles(
start_time + day(),
Utc::now().duration_trunc(Duration::minutes(1))?,
);
let mut fills =
fetch_fills_from(pool, market_address, start_time, end_time).await?;
let mut fills = fetch_fills_from(pool, market_address, start_time, end_time).await?;
let candles = combine_fills_into_1m_candles(
&mut fills,
market,
@ -54,8 +50,7 @@ pub async fn batch_1m_candles(
start_time + day(),
Utc::now().duration_trunc(Duration::minutes(1))?,
);
let mut fills =
fetch_fills_from(pool, market_address, start_time, end_time).await?;
let mut fills = fetch_fills_from(pool, market_address, start_time, end_time).await?;
let candles =
combine_fills_into_1m_candles(&mut fills, market, start_time, end_time, None);
Ok(candles)

View File

@ -8,14 +8,12 @@ use tokio::{sync::mpsc::Sender, time::sleep};
use crate::{
candle_creation::candle_batching::minute_candles::batch_1m_candles,
database::{Candle, MarketInfo, Resolution},
database::{Candle}, structs::{markets::MarketInfo, resolution::Resolution},
};
use self::higher_order_candles::batch_higher_order_candles;
pub fn day() -> Duration {
Duration::days(1)
}
pub async fn batch_candles(
pool: Pool<Postgres>,

View File

@ -1,18 +1,19 @@
use std::{collections::HashMap, str::FromStr};
use dotenv;
use openbook_candles::candle_creation::candle_batching::batch_candles;
use openbook_candles::database::{
insert::{persist_candles, persist_fill_events},
initialize::{connect_to_database, setup_database},
Candle,
};
use openbook_candles::candle_creation::trade_fetching::{
backfill::backfill,
parsing::OpenBookFillEventLog,
scrape::{fetch_market_infos, scrape},
};
use openbook_candles::utils::{Config, load_markets};
use dotenv;
use openbook_candles::database::{
initialize::{connect_to_database, setup_database},
insert::{persist_candles, persist_fill_events},
Candle,
};
use openbook_candles::structs::markets::load_markets;
use openbook_candles::structs::openbook::OpenBookFillEventLog;
use openbook_candles::utils::{Config};
use solana_sdk::pubkey::Pubkey;
use std::{collections::HashMap, str::FromStr};
use tokio::sync::mpsc;
#[tokio::main]
@ -21,11 +22,12 @@ async fn main() -> anyhow::Result<()> {
let rpc_url: String = dotenv::var("RPC_URL").unwrap();
let database_url: String = dotenv::var("DATABASE_URL").unwrap();
let max_pg_pool_connections: u32 = dotenv::var("MAX_PG_POOL_CONNS_WORKER").unwrap().parse::<u32>().unwrap();
let config = Config {
rpc_url: rpc_url.clone(),
database_url,
max_pg_pool_connections: 5,
max_pg_pool_connections,
};
let markets = load_markets("/Users/dboures/dev/openbook-candles/markets.json");

View File

@ -1,2 +1,2 @@
pub mod candle_batching;
pub mod trade_fetching;
pub mod trade_fetching;

View File

@ -5,9 +5,7 @@ use solana_transaction_status::UiTransactionEncoding;
use std::collections::HashMap;
use tokio::sync::mpsc::Sender;
use crate::candle_creation::trade_fetching::scrape::scrape_transactions;
use super::parsing::OpenBookFillEventLog;
use crate::{candle_creation::trade_fetching::scrape::scrape_transactions, structs::openbook::OpenBookFillEventLog};
pub async fn backfill(
rpc_url: &String,

View File

@ -7,81 +7,10 @@ use std::{collections::HashMap, io::Error};
use anchor_lang::{event, AnchorDeserialize, AnchorSerialize};
use solana_sdk::pubkey::Pubkey;
use crate::structs::openbook::OpenBookFillEventLog;
const PROGRAM_DATA: &str = "Program data: ";
#[event]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct OpenBookFillEventLog {
pub market: Pubkey,
pub open_orders: Pubkey,
pub open_orders_owner: Pubkey,
pub bid: bool,
pub maker: bool,
pub native_qty_paid: u64,
pub native_qty_received: u64,
pub native_fee_or_rebate: u64,
pub order_id: u128,
pub owner_slot: u8,
pub fee_tier: u8,
pub client_order_id: Option<u64>,
pub referrer_rebate: Option<u64>,
}
#[derive(Copy, Clone, AnchorDeserialize)]
#[cfg_attr(target_endian = "little", derive(Debug))]
#[repr(packed)]
pub struct MarketState {
// 0
pub account_flags: u64, // Initialized, Market
// 1
pub own_address: [u64; 4],
// 5
pub vault_signer_nonce: u64,
// 6
pub coin_mint: [u64; 4],
// 10
pub pc_mint: [u64; 4],
// 14
pub coin_vault: [u64; 4],
// 18
pub coin_deposits_total: u64,
// 19
pub coin_fees_accrued: u64,
// 20
pub pc_vault: [u64; 4],
// 24
pub pc_deposits_total: u64,
// 25
pub pc_fees_accrued: u64,
// 26
pub pc_dust_threshold: u64,
// 27
pub req_q: [u64; 4],
// 31
pub event_q: [u64; 4],
// 35
pub bids: [u64; 4],
// 39
pub asks: [u64; 4],
// 43
pub coin_lot_size: u64,
// 44
pub pc_lot_size: u64,
// 45
pub fee_rate_bps: u64,
// 46
pub referrer_rebates_accrued: u64,
}
pub fn parse_trades_from_openbook_txns(
txns: &mut Vec<ClientResult<EncodedConfirmedTransactionWithStatusMeta>>,
target_markets: &HashMap<Pubkey, u8>,

View File

@ -14,12 +14,10 @@ use std::{collections::HashMap, str::FromStr, time::Duration as WaitDuration};
use tokio::sync::mpsc::Sender;
use crate::{
database::MarketInfo,
candle_creation::trade_fetching::parsing::MarketState,
utils::{Config, MarketConfig},
utils::{Config}, structs::{openbook::{OpenBookFillEventLog, MarketState}, markets::{MarketInfo, MarketConfig}},
};
use super::parsing::{parse_trades_from_openbook_txns, OpenBookFillEventLog};
use super::parsing::{parse_trades_from_openbook_txns};
pub async fn scrape(
config: &Config,

View File

@ -122,7 +122,7 @@ pub async fn fetch_earliest_candle(
pub async fn fetch_candles_from(
pool: &Pool<Postgres>,
market_address_string: &str,
market_name: &str,
resolution: Resolution,
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
@ -147,7 +147,43 @@ pub async fn fetch_candles_from(
and end_time <= $4
and complete = true
ORDER BY start_time asc"#,
market_address_string,
market_name,
resolution.to_string(),
start_time,
end_time
)
.fetch_all(pool)
.await
.map_err_anyhow()
}
pub async fn fetch_tradingview_candles(
pool: &Pool<Postgres>,
market_name: &str,
resolution: Resolution,
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
) -> anyhow::Result<Vec<Candle>> {
sqlx::query_as!(
Candle,
r#"SELECT
start_time as "start_time!",
end_time as "end_time!",
resolution as "resolution!",
market_name as "market_name!",
open as "open!",
close as "close!",
high as "high!",
low as "low!",
volume as "volume!",
complete as "complete!"
from candles
where market_name = $1
and resolution = $2
and start_time >= $3
and end_time <= $4
ORDER BY start_time asc"#, // TODO: order?
market_name,
resolution.to_string(),
start_time,
end_time

View File

@ -7,7 +7,7 @@ use std::{
};
use tokio::sync::mpsc::{error::TryRecvError, Receiver};
use crate::{candle_creation::trade_fetching::parsing::OpenBookFillEventLog, utils::AnyhowWrap};
use crate::{utils::AnyhowWrap, structs::openbook::OpenBookFillEventLog};
use super::Candle;

View File

@ -1,11 +1,8 @@
use std::fmt;
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use chrono::{DateTime, NaiveDateTime, Utc};
use num_traits::Zero;
use sqlx::types::Decimal;
use strum::EnumIter;
use crate::candle_creation::candle_batching::day;
use crate::structs::resolution::Resolution;
pub mod fetch;
pub mod initialize;
@ -15,65 +12,6 @@ pub trait Summary {
fn summarize(&self) -> String;
}
#[derive(EnumIter, Copy, Clone, Eq, PartialEq)]
pub enum Resolution {
R1m,
R3m,
R5m,
R15m,
R30m,
R1h,
R2h,
R4h,
R1d,
}
impl fmt::Display for Resolution {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Resolution::R1m => write!(f, "1M"),
Resolution::R3m => write!(f, "3M"),
Resolution::R5m => write!(f, "5M"),
Resolution::R15m => write!(f, "15M"),
Resolution::R30m => write!(f, "30M"),
Resolution::R1h => write!(f, "1H"),
Resolution::R2h => write!(f, "2H"),
Resolution::R4h => write!(f, "4H"),
Resolution::R1d => write!(f, "1D"),
}
}
}
impl Resolution {
pub fn get_constituent_resolution(self) -> Resolution {
match self {
Resolution::R1m => panic!("have to use fills to make 1M candles"),
Resolution::R3m => Resolution::R1m,
Resolution::R5m => Resolution::R1m,
Resolution::R15m => Resolution::R5m,
Resolution::R30m => Resolution::R15m,
Resolution::R1h => Resolution::R30m,
Resolution::R2h => Resolution::R1h,
Resolution::R4h => Resolution::R2h,
Resolution::R1d => Resolution::R4h,
}
}
pub fn get_duration(self) -> Duration {
match self {
Resolution::R1m => Duration::minutes(1),
Resolution::R3m => Duration::minutes(3),
Resolution::R5m => Duration::minutes(5),
Resolution::R15m => Duration::minutes(15),
Resolution::R30m => Duration::minutes(30),
Resolution::R1h => Duration::hours(1),
Resolution::R2h => Duration::hours(2),
Resolution::R4h => Duration::hours(4),
Resolution::R1d => day(),
}
}
}
#[derive(Clone, Debug)]
pub struct Candle {
pub market_name: String,
@ -115,14 +53,4 @@ pub struct PgOpenBookFill {
pub native_fee_or_rebate: Decimal,
}
#[derive(Debug, Clone)]
pub struct MarketInfo {
pub name: String,
pub address: String,
pub base_decimals: u8,
pub quote_decimals: u8,
pub base_mint_key: String,
pub quote_mint_key: String,
pub base_lot_size: u64,
pub quote_lot_size: u64,
}

View File

@ -1,3 +1,4 @@
pub mod database;
pub mod candle_creation;
pub mod utils;
pub mod database;
pub mod utils;
pub mod structs;

25
src/structs/markets.rs Normal file
View File

@ -0,0 +1,25 @@
use std::fs::File;
use serde::Deserialize;
#[derive(Debug, Clone)]
pub struct MarketInfo {
pub name: String,
pub address: String,
pub base_decimals: u8,
pub quote_decimals: u8,
pub base_mint_key: String,
pub quote_mint_key: String,
pub base_lot_size: u64,
pub quote_lot_size: u64,
}
#[derive(Clone, Debug, Deserialize)]
pub struct MarketConfig {
pub name: String,
pub address: String,
}
pub fn load_markets(path: &str) -> Vec<MarketConfig> {
let reader = File::open(path).unwrap();
serde_json::from_reader(reader).unwrap()
}

4
src/structs/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod markets;
pub mod resolution;
pub mod tradingview;
pub mod openbook;

75
src/structs/openbook.rs Normal file
View File

@ -0,0 +1,75 @@
use anchor_lang::{event, AnchorDeserialize, AnchorSerialize};
use solana_sdk::pubkey::Pubkey;
#[event]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct OpenBookFillEventLog {
pub market: Pubkey,
pub open_orders: Pubkey,
pub open_orders_owner: Pubkey,
pub bid: bool,
pub maker: bool,
pub native_qty_paid: u64,
pub native_qty_received: u64,
pub native_fee_or_rebate: u64,
pub order_id: u128,
pub owner_slot: u8,
pub fee_tier: u8,
pub client_order_id: Option<u64>,
pub referrer_rebate: Option<u64>,
}
#[derive(Copy, Clone, AnchorDeserialize)]
#[cfg_attr(target_endian = "little", derive(Debug))]
#[repr(packed)]
pub struct MarketState {
// 0
pub account_flags: u64, // Initialized, Market
// 1
pub own_address: [u64; 4],
// 5
pub vault_signer_nonce: u64,
// 6
pub coin_mint: [u64; 4],
// 10
pub pc_mint: [u64; 4],
// 14
pub coin_vault: [u64; 4],
// 18
pub coin_deposits_total: u64,
// 19
pub coin_fees_accrued: u64,
// 20
pub pc_vault: [u64; 4],
// 24
pub pc_deposits_total: u64,
// 25
pub pc_fees_accrued: u64,
// 26
pub pc_dust_threshold: u64,
// 27
pub req_q: [u64; 4],
// 31
pub event_q: [u64; 4],
// 35
pub bids: [u64; 4],
// 39
pub asks: [u64; 4],
// 43
pub coin_lot_size: u64,
// 44
pub pc_lot_size: u64,
// 45
pub fee_rate_bps: u64,
// 46
pub referrer_rebates_accrued: u64,
}

81
src/structs/resolution.rs Normal file
View File

@ -0,0 +1,81 @@
use std::fmt;
use chrono::Duration;
use strum::EnumIter;
#[derive(EnumIter, Copy, Clone, Eq, PartialEq)]
pub enum Resolution {
R1m,
R3m,
R5m,
R15m,
R30m,
R1h,
R2h,
R4h,
R1d,
}
pub fn day() -> Duration {
Duration::days(1)
}
impl fmt::Display for Resolution {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Resolution::R1m => write!(f, "1M"),
Resolution::R3m => write!(f, "3M"),
Resolution::R5m => write!(f, "5M"),
Resolution::R15m => write!(f, "15M"),
Resolution::R30m => write!(f, "30M"),
Resolution::R1h => write!(f, "1H"),
Resolution::R2h => write!(f, "2H"),
Resolution::R4h => write!(f, "4H"),
Resolution::R1d => write!(f, "1D"),
}
}
}
impl Resolution {
pub fn get_constituent_resolution(self) -> Resolution {
match self {
Resolution::R1m => panic!("have to use fills to make 1M candles"),
Resolution::R3m => Resolution::R1m,
Resolution::R5m => Resolution::R1m,
Resolution::R15m => Resolution::R5m,
Resolution::R30m => Resolution::R15m,
Resolution::R1h => Resolution::R30m,
Resolution::R2h => Resolution::R1h,
Resolution::R4h => Resolution::R2h,
Resolution::R1d => Resolution::R4h,
}
}
pub fn get_duration(self) -> Duration {
match self {
Resolution::R1m => Duration::minutes(1),
Resolution::R3m => Duration::minutes(3),
Resolution::R5m => Duration::minutes(5),
Resolution::R15m => Duration::minutes(15),
Resolution::R30m => Duration::minutes(30),
Resolution::R1h => Duration::hours(1),
Resolution::R2h => Duration::hours(2),
Resolution::R4h => Duration::hours(4),
Resolution::R1d => day(),
}
}
pub fn from_str(v: &str) -> Result<Self, ()> {
match v {
"1" => Ok(Resolution::R1m),
"3" => Ok(Resolution::R3m),
"5" => Ok(Resolution::R5m),
"15" => Ok(Resolution::R15m),
"30" => Ok(Resolution::R30m),
"60" => Ok(Resolution::R1h),
"120" => Ok(Resolution::R2h),
"240" => Ok(Resolution::R4h),
"D" => Ok(Resolution::R1d),
_ => Err(()),
}
}
}

View File

@ -0,0 +1,67 @@
use chrono::Utc;
use num_traits::ToPrimitive;
use serde::Serialize;
use crate::database::Candle;
#[derive(Serialize)]
pub struct TvResponse {
/// ok, error, no_data
#[serde(rename(serialize = "s"))]
pub status: String,
#[serde(rename(serialize = "errmsg"), skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
pub time: Vec<u64>,
pub close: Vec<f64>,
pub open: Vec<f64>,
pub high: Vec<f64>,
pub low: Vec<f64>,
pub volume: Vec<u64>,
/// Only Some if s == no_data
#[serde(
rename(serialize = "nextTime"),
skip_serializing_if = "Option::is_none"
)]
pub next_time: Option<u64>,
}
impl TvResponse {
pub fn candles_to_tv(candles: Vec<Candle>) -> Self {
let mut time: Vec<u64> = Vec::new();
let mut close: Vec<f64> = Vec::new();
let mut open: Vec<f64> = Vec::new();
let mut low: Vec<f64> = Vec::new();
let mut high: Vec<f64> = Vec::new();
let mut volume: Vec<u64> = Vec::new();
for c in candles.into_iter() {
time.push(chrono::DateTime::<Utc>::timestamp(&c.start_time) as u64);
close.push(c.close.to_f64().unwrap());
open.push(c.open.to_f64().unwrap());
high.push(c.high.to_f64().unwrap());
low.push(c.low.to_f64().unwrap());
volume.push(c.volume.to_u64().unwrap());
}
// Debug checks
assert_eq!(time.len(), close.len());
assert_eq!(close.len(), open.len());
assert_eq!(open.len(), low.len());
assert_eq!(low.len(), high.len());
assert_eq!(volume.len(), time.len());
let len = time.len();
TvResponse {
status: "ok".to_owned(),
error_message: None,
time,
close,
open,
low,
high,
volume,
next_time: None,
}
}
}

View File

@ -1,6 +1,9 @@
use serde_derive::Deserialize;
use sqlx::{Pool, Postgres};
use std::fs::File;
use crate::structs::markets::MarketInfo;
pub trait AnyhowWrap {
type Value;
fn map_err_anyhow(self) -> anyhow::Result<Self::Value>;
@ -20,13 +23,9 @@ pub struct Config {
pub max_pg_pool_connections: u32,
}
#[derive(Clone, Debug, Deserialize)]
pub struct MarketConfig {
pub name: String,
pub address: String,
pub struct WebContext {
pub markets: Vec<MarketInfo>,
pub pool: Pool<Postgres>
}
pub fn load_markets(path: &str) -> Vec<MarketConfig> {
let reader = File::open(path).unwrap();
serde_json::from_reader(reader).unwrap()
}