Initial commit.
This commit is contained in:
commit
e0a8e971e9
|
@ -0,0 +1,7 @@
|
||||||
|
dist
|
||||||
|
node_modules
|
||||||
|
.vscode
|
||||||
|
logs.*
|
||||||
|
yarn.lock
|
||||||
|
*.env
|
||||||
|
*.log
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Mango Transaction Scraper V3
|
||||||
|
|
||||||
|
### Run
|
||||||
|
```
|
||||||
|
yarn install
|
||||||
|
yarn start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment variables:
|
||||||
|
|
||||||
|
```
|
||||||
|
REQUEST_WAIT_TIME
|
||||||
|
MANGO_PROGRAM_ID
|
||||||
|
CONNECTION_STRING_RAW
|
||||||
|
CONNECTION_STRING_PARSED
|
||||||
|
CLUSTER_URL
|
||||||
|
WEBHOOK_URL
|
||||||
|
```
|
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"name": "mango-instruction-stats",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "ts-node src/index.ts",
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1"
|
||||||
|
},
|
||||||
|
"repository": "https://github.com/blockworks-foundation/mango-transaction-scraper-v3",
|
||||||
|
"author": "Nicholas Clarke",
|
||||||
|
"license": "MIT",
|
||||||
|
"homepage": "https://github.com/blockworks-foundation/mango-transaction-scraper-v3#readme",
|
||||||
|
"dependencies": {
|
||||||
|
"@blockworks-foundation/mango-client": "^3.0.24",
|
||||||
|
"@solana/web3.js": "^1.2.7",
|
||||||
|
"axios": "^0.21.1",
|
||||||
|
"bs58": "^4.0.1",
|
||||||
|
"package.json": "^2.0.1",
|
||||||
|
"pg": "^8.6.0",
|
||||||
|
"pg-format": "^1.0.4",
|
||||||
|
"pg-promise": "^10.10.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ts-node": "^10.0.0",
|
||||||
|
"typescript": "^4.3.2"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,151 @@
|
||||||
|
import {Connection, PublicKey} from '@solana/web3.js';
|
||||||
|
import {sleep} from '@blockworks-foundation/mango-client';
|
||||||
|
import {insertNewSignatures} from './signatures';
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
import { notify } from './utils';
|
||||||
|
import {populateTransactions} from './insertTransactions';
|
||||||
|
import {parseTransactions} from './parseTransactions';
|
||||||
|
|
||||||
|
const pgp = require('pg-promise')({
|
||||||
|
capSQL: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const mangoProgramId = process.env.MANGO_PROGRAM_ID || '5fNfvyp5czQVX77yoACa3JJVEhdRaWjPuazuWgjhTqEH'
|
||||||
|
|
||||||
|
async function insertMangoTransactions(rawTransactionsPool, parsedTransactionsPool, schema, processStates, parsedTransactions) {
|
||||||
|
// Insert parsed transactions to appropriate tables on timescaledb
|
||||||
|
// Update process states on transactions table - only once parsed transactions are sucessfully completed (can't use the same db transaction as they are on different databases)
|
||||||
|
|
||||||
|
let columnSets = {}
|
||||||
|
let tableName
|
||||||
|
let inserts
|
||||||
|
for ([tableName, inserts] of Object.entries(parsedTransactions)) {
|
||||||
|
if (inserts.length > 0) {
|
||||||
|
let table = new pgp.helpers.TableName({table: tableName, schema: schema})
|
||||||
|
columnSets[tableName] = new pgp.helpers.ColumnSet(Object.keys(inserts[0]), {table: table});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let batchSize = 1000;
|
||||||
|
let client = await parsedTransactionsPool.connect()
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN')
|
||||||
|
|
||||||
|
for ([tableName, inserts] of Object.entries(parsedTransactions)) {
|
||||||
|
if (inserts.length > 0) {
|
||||||
|
console.log(tableName + ' insert started')
|
||||||
|
for (let i = 0, j = inserts.length; i < j; i += batchSize) {
|
||||||
|
let insertsBatch = inserts.slice(i, i + batchSize);
|
||||||
|
let insertsSql = pgp.helpers.insert(insertsBatch, columnSets[tableName]);
|
||||||
|
await client.query(insertsSql)
|
||||||
|
}
|
||||||
|
console.log(tableName + ' inserted')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await client.query('COMMIT')
|
||||||
|
} catch (e) {
|
||||||
|
await client.query('ROLLBACK')
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
tableName = 'transactions'
|
||||||
|
let table = new pgp.helpers.TableName({table: tableName, schema: schema})
|
||||||
|
const processStateCs = new pgp.helpers.ColumnSet(['?signature', 'process_state'], {table: table});
|
||||||
|
|
||||||
|
client = await rawTransactionsPool.connect()
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN')
|
||||||
|
|
||||||
|
for (let i = 0, j = processStates.length; i < j; i += batchSize) {
|
||||||
|
let updatesBatch = processStates.slice(i, i + batchSize);
|
||||||
|
let updatedSql = pgp.helpers.update(updatesBatch, processStateCs) + ' WHERE v.signature = t.signature';
|
||||||
|
await client.query(updatedSql)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('process states updated')
|
||||||
|
await client.query('COMMIT')
|
||||||
|
} catch (e) {
|
||||||
|
await client.query('ROLLBACK')
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function processMangoTransactions(address, rawTransactionsPool, parsedTransactionsPool, schema, limit) {
|
||||||
|
const client = await rawTransactionsPool.connect();
|
||||||
|
let res;
|
||||||
|
try {
|
||||||
|
res = await client.query("select transaction, signature from " + schema + ".transactions where process_state = 'ready for parsing' and program_pk = $1 order by id asc limit $2", [address, limit])
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
let transactions = res.rows.map(e => [e.transaction, e.signature]);
|
||||||
|
let [processStates, parsedTransactions] = parseTransactions(transactions, mangoProgramId);
|
||||||
|
await insertMangoTransactions(rawTransactionsPool, parsedTransactionsPool, schema, processStates, parsedTransactions)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function consumeTransactions() {
|
||||||
|
const clusterUrl = process.env.CLUSTER_URL || "https://api.mainnet-beta.solana.com";
|
||||||
|
let requestWaitTime = parseInt(process.env.REQUEST_WAIT_TIME!) || 500;
|
||||||
|
const rawConnectionString = process.env.CONNECTION_STRING_RAW
|
||||||
|
const parsedConnectionString = process.env.CONNECTION_STRING_PARSED
|
||||||
|
|
||||||
|
let schema = 'transactions_v3';
|
||||||
|
|
||||||
|
console.log(clusterUrl);
|
||||||
|
|
||||||
|
let connection = new Connection(clusterUrl, 'finalized');
|
||||||
|
const rawTransactionsPool = new Pool(
|
||||||
|
{
|
||||||
|
connectionString: rawConnectionString,
|
||||||
|
ssl: {
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
const parsedTransactionsPool = new Pool(
|
||||||
|
{
|
||||||
|
connectionString: parsedConnectionString,
|
||||||
|
ssl: {
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log('Initialized')
|
||||||
|
notify('v3: Initialized')
|
||||||
|
while (true) {
|
||||||
|
console.log('Refreshing transactions ' + Date())
|
||||||
|
|
||||||
|
await insertNewSignatures(mangoProgramId, connection, rawTransactionsPool, requestWaitTime, schema)
|
||||||
|
await populateTransactions(connection, mangoProgramId, rawTransactionsPool, requestWaitTime, schema);
|
||||||
|
|
||||||
|
let transactionsParsingLimit = 50000;
|
||||||
|
await processMangoTransactions(mangoProgramId, rawTransactionsPool, parsedTransactionsPool,schema, transactionsParsingLimit);
|
||||||
|
|
||||||
|
console.log('Refresh complete')
|
||||||
|
// Probably unnecessary but let's give the servers a break
|
||||||
|
await sleep(5*1000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
await consumeTransactions()
|
||||||
|
}
|
||||||
|
catch(e: any) {
|
||||||
|
notify('v3: ' + e.toString())
|
||||||
|
console.log(e, e.stack)
|
||||||
|
// Wait for 10 mins
|
||||||
|
await sleep(10*60*1000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
|
@ -0,0 +1,113 @@
|
||||||
|
const pgp = require('pg-promise')({
|
||||||
|
capSQL: true
|
||||||
|
});
|
||||||
|
|
||||||
|
import {sleep} from '@blockworks-foundation/mango-client';
|
||||||
|
|
||||||
|
export async function populateTransactions(connection, address, pool, requestWaitTime, schema) {
|
||||||
|
|
||||||
|
let transactions = await getNewAddressSignaturesWithoutTransactions(connection, address, requestWaitTime, pool, schema)
|
||||||
|
|
||||||
|
let [transactionInserts, transactionErrors] = getTransactionInserts(transactions)
|
||||||
|
|
||||||
|
await insertTransactions(pool, schema, transactionInserts, transactionErrors)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getNewAddressSignaturesWithoutTransactions(connection, address, requestWaitTime, pool, schema) {
|
||||||
|
|
||||||
|
let limit = 25000;
|
||||||
|
|
||||||
|
let signaturesToProcess = (await getSignaturesWithoutTransactions(pool, address, schema, limit))
|
||||||
|
|
||||||
|
let promises: Promise<void>[] = [];
|
||||||
|
let transactions: any[] = [];
|
||||||
|
let counter = 1;
|
||||||
|
for (let signature of signaturesToProcess) {
|
||||||
|
// Want to store the raw json returned from the rpc - so have to bypass the regular client methods here (which transform the json)
|
||||||
|
let args = [signature, {encoding: 'jsonParsed', commitment: 'finalized'}]
|
||||||
|
let promise = connection._rpcRequest('getConfirmedTransaction', args).then(confirmedTransaction => transactions.push([signature, confirmedTransaction]));
|
||||||
|
|
||||||
|
console.log('requested ', counter, ' of ', signaturesToProcess.length);
|
||||||
|
counter++;
|
||||||
|
|
||||||
|
promises.push(promise);
|
||||||
|
|
||||||
|
// Limit request frequency to avoid request failures due to rate limiting
|
||||||
|
await sleep(requestWaitTime);
|
||||||
|
|
||||||
|
}
|
||||||
|
await (Promise as any).allSettled(promises);
|
||||||
|
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getSignaturesWithoutTransactions(pool, programPk, schema, limit) {
|
||||||
|
const client = await pool.connect();
|
||||||
|
let signatures;
|
||||||
|
try {
|
||||||
|
// TODO: add back in order by id asc - but why does it make it so much slower?
|
||||||
|
const res = await client.query("select signature from " + schema + ".transactions where process_state = 'unprocessed' and program_pk = $1 limit " + limit, [programPk])
|
||||||
|
|
||||||
|
signatures = res.rows.map(e => e['signature'])
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
return signatures;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getTransactionInserts(transactions) {
|
||||||
|
let transactionInserts: any[] = [];
|
||||||
|
let processStates: any[] = [];
|
||||||
|
|
||||||
|
for (let transaction of transactions) {
|
||||||
|
let [signature, confirmedTransaction] = transaction;
|
||||||
|
try {
|
||||||
|
let transactionInsert = {
|
||||||
|
transaction: JSON.stringify(confirmedTransaction),
|
||||||
|
log_messages: confirmedTransaction.result!.meta!.logMessages!.join('\n'),
|
||||||
|
signature: signature
|
||||||
|
}
|
||||||
|
transactionInserts.push(transactionInsert)
|
||||||
|
processStates.push({signature: signature, process_state: 'ready for parsing'})
|
||||||
|
} catch(e: any) {
|
||||||
|
console.log(e.stack)
|
||||||
|
processStates.push({signature: signature, process_state: 'transaction download error'})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [transactionInserts, processStates]
|
||||||
|
}
|
||||||
|
|
||||||
|
async function insertTransactions(pool, schema, transactionInserts, processStates) {
|
||||||
|
const transactionsTable = new pgp.helpers.TableName({table: 'transactions', schema: schema})
|
||||||
|
|
||||||
|
const transactionCs = new pgp.helpers.ColumnSet(['?signature', 'log_messages', 'transaction'], {table: transactionsTable});
|
||||||
|
const processStatesCs = new pgp.helpers.ColumnSet(['?signature', 'process_state'], {table: transactionsTable});
|
||||||
|
|
||||||
|
let batchSize = 1000;
|
||||||
|
let client = await pool.connect()
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN')
|
||||||
|
|
||||||
|
for (let i = 0, j = transactionInserts.length; i < j; i += batchSize) {
|
||||||
|
let updatesBatch = transactionInserts.slice(i, i + batchSize);
|
||||||
|
let updatedSql = pgp.helpers.update(updatesBatch, transactionCs) + ' WHERE v.signature = t.signature';
|
||||||
|
await client.query(updatedSql)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0, j = processStates.length; i < j; i += batchSize) {
|
||||||
|
let updatesBatch = processStates.slice(i, i + batchSize);
|
||||||
|
let updatedSql = pgp.helpers.update(updatesBatch, processStatesCs) + ' WHERE v.signature = t.signature';
|
||||||
|
await client.query(updatedSql)
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.query('COMMIT')
|
||||||
|
} catch (e) {
|
||||||
|
await client.query('ROLLBACK')
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,902 @@
|
||||||
|
const bs58 = require('bs58')
|
||||||
|
import {MangoInstructionLayout, IDS, PerpMarket} from '@blockworks-foundation/mango-client';
|
||||||
|
|
||||||
|
// Unfortunately ids.json does not correspond to the token indexes in the log - so keep a map here for reference
|
||||||
|
// mango group -> token index -> mint key
|
||||||
|
// TODO: is there a better way?
|
||||||
|
var tokenIndexesMap = {
|
||||||
|
"98pjRuQjK3qA6gXts96PqZT4Ze5QmnCmt3QYjhbUSPue": {
|
||||||
|
0: 'MangoCzJ36AjZyKwVj3VnYU4GTonjfVEnJmvvWaxLac',
|
||||||
|
1: '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E',
|
||||||
|
2: '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk',
|
||||||
|
3: 'So11111111111111111111111111111111111111112',
|
||||||
|
4: 'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB',
|
||||||
|
5: 'SRMuApVNdxXokk5GT7XD5cUUgXMBCoAz2LHeuAoKWRt',
|
||||||
|
6: '4k3Dyjzvzp8eMZWUXbBCjEvwSkkk59S5iCNLY3QrkX6R',
|
||||||
|
7: '8HGyAAB1yoM1ttS7pXjHMa3dukTFGQggnFFH3hJZgzQh',
|
||||||
|
15: 'EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v'
|
||||||
|
},
|
||||||
|
'4yJ2Vx3kZnmHTNCrHzdoj5nCwriF2kVhfKNvqC6gU8tr': {
|
||||||
|
0:'MangoCzJ36AjZyKwVj3VnYU4GTonjfVEnJmvvWaxLac',
|
||||||
|
1:'9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E',
|
||||||
|
2:'2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk',
|
||||||
|
3:'So11111111111111111111111111111111111111112',
|
||||||
|
4:'SRMuApVNdxXokk5GT7XD5cUUgXMBCoAz2LHeuAoKWRt',
|
||||||
|
5:'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB',
|
||||||
|
15:'EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mango group -> token index -> mint key
|
||||||
|
var oracleIndexesMap = {
|
||||||
|
"98pjRuQjK3qA6gXts96PqZT4Ze5QmnCmt3QYjhbUSPue": {
|
||||||
|
0: '49cnp1ejyvQi3CJw3kKXNCDGnNbWDuZd3UG3Y2zGvQkX',
|
||||||
|
1: 'GVXRSBjFk6e6J3NbVPXohDJetcTjaeeuykUpbQF8UoMU',
|
||||||
|
2: 'JBu1AL4obBcCMqKBBxhpWCNUt136ijcuMZLFvTP7iWdB',
|
||||||
|
3: 'H6ARHf6YXhGYeQfUzQNGk6rDNnLBQKrenN712K4AQJEG',
|
||||||
|
4: '3vxLXJqLqF3JG5TCbYycbKWRBbCJQLxQmBGCkyqEEefL',
|
||||||
|
5: '3NBReDRTLKMQEKiLD5tGcx4kXbTf88b7f2xLS9UuGjym',
|
||||||
|
6: 'AnLf8tVYCM816gmBjiy8n53eXKKEDydT5piYjjQDPgTB',
|
||||||
|
7: '9xYBiDWYsh2fHzpsz3aaCnNHCKWBNtfEDLtU6kS4aFD9'
|
||||||
|
},
|
||||||
|
'4yJ2Vx3kZnmHTNCrHzdoj5nCwriF2kVhfKNvqC6gU8tr': {
|
||||||
|
0:'49cnp1ejyvQi3CJw3kKXNCDGnNbWDuZd3UG3Y2zGvQkX',
|
||||||
|
1:'GVXRSBjFk6e6J3NbVPXohDJetcTjaeeuykUpbQF8UoMU',
|
||||||
|
2:'JBu1AL4obBcCMqKBBxhpWCNUt136ijcuMZLFvTP7iWdB',
|
||||||
|
3:'H6ARHf6YXhGYeQfUzQNGk6rDNnLBQKrenN712K4AQJEG',
|
||||||
|
4:'3NBReDRTLKMQEKiLD5tGcx4kXbTf88b7f2xLS9UuGjym',
|
||||||
|
5:'3vxLXJqLqF3JG5TCbYycbKWRBbCJQLxQmBGCkyqEEefL'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var ids = IDS;
|
||||||
|
|
||||||
|
export function parseTransactions(transactionsResult, mangoProgramId) {
|
||||||
|
|
||||||
|
let processStates: any = []
|
||||||
|
|
||||||
|
let parsedTransactions: any = {
|
||||||
|
|
||||||
|
'deposits_withdraws': [],
|
||||||
|
|
||||||
|
'cache_prices': [],
|
||||||
|
'cache_indexes': [],
|
||||||
|
|
||||||
|
'settle_pnl': [],
|
||||||
|
'settle_fees': [],
|
||||||
|
// 'force_settle_quote_positions': [], // Deprecated?
|
||||||
|
|
||||||
|
'liquidate_token_and_token': [],
|
||||||
|
'liquidate_token_and_perp': [],
|
||||||
|
'liquidate_perp_market': [],
|
||||||
|
|
||||||
|
'token_bankruptcy': [],
|
||||||
|
'perp_bankruptcy': [],
|
||||||
|
|
||||||
|
'fill_events': [],
|
||||||
|
|
||||||
|
'net_balances': [],
|
||||||
|
'redeem_mngo': []
|
||||||
|
|
||||||
|
// Mango: PlacePerpOrder
|
||||||
|
// Mango: CancelPerpOrderByClientI
|
||||||
|
// Mango: CancelPerpOrder
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let transactionResult of transactionsResult) {
|
||||||
|
let [transactionJson, signature] = transactionResult;
|
||||||
|
try {
|
||||||
|
let transaction = JSON.parse(transactionJson);
|
||||||
|
let result = transaction.result;
|
||||||
|
|
||||||
|
if (result.meta.err !== null) {
|
||||||
|
processStates.push({signature: signature, process_state: 'transaction error'});
|
||||||
|
} else {
|
||||||
|
let slot = result.slot;
|
||||||
|
let blockTime = result.blockTime;
|
||||||
|
let blockDatetime = (new Date(blockTime * 1000)).toISOString();
|
||||||
|
|
||||||
|
let instructions = result.transaction.message.instructions;
|
||||||
|
// Can have multiple inserts per signature so add instructionNum column to allow a primary key
|
||||||
|
let instructionNum = 1;
|
||||||
|
for (let instruction of instructions) {
|
||||||
|
|
||||||
|
// Old mango group - not in ids.json so have to hardcode here to ignore
|
||||||
|
if ('accounts' in instruction) {
|
||||||
|
if (instruction.accounts[0] === '2WNLfEMzhgwBPn6QptT43SdZy9cXTUbVJCMdCfimg4oi') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip attempting to parse serum instructions, etc
|
||||||
|
if (instruction.programId == mangoProgramId) {
|
||||||
|
let decodeData = bs58.decode(instruction.data);
|
||||||
|
let decodedInstruction = MangoInstructionLayout.decode(decodeData);
|
||||||
|
let instructionName = Object.keys(decodedInstruction)[0];
|
||||||
|
|
||||||
|
if (instructionName === 'CachePrices') {
|
||||||
|
parsedTransactions.cache_prices.push(...
|
||||||
|
parseCachePrices(instructionNum, result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'CacheRootBanks') {
|
||||||
|
parsedTransactions.cache_indexes.push(...
|
||||||
|
parseCacheRootBanks(instructionNum, result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'Deposit') {
|
||||||
|
parsedTransactions.deposits_withdraws.push(...
|
||||||
|
parseDepositWithDraw(instruction, instructionNum, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'Withdraw') {
|
||||||
|
parsedTransactions.deposits_withdraws.push(...
|
||||||
|
parseDepositWithDraw(instruction, instructionNum, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'LiquidateTokenAndToken') {
|
||||||
|
parsedTransactions.liquidate_token_and_token.push(...
|
||||||
|
parseLiquidateTokenAndToken(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'LiquidateTokenAndPerp') {
|
||||||
|
parsedTransactions.liquidate_token_and_perp.push(...
|
||||||
|
parseLiquidateTokenAndPerp(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'LiquidatePerpMarket') {
|
||||||
|
parsedTransactions.liquidate_perp_market.push(...
|
||||||
|
parseLiquidatePerpMarket(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'ResolveTokenBankruptcy') {
|
||||||
|
parsedTransactions.token_bankruptcy.push(...
|
||||||
|
parseResolveTokenBankruptcy(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'ResolvePerpBankruptcy') {
|
||||||
|
parsedTransactions.perp_bankruptcy.push(...
|
||||||
|
parseResolvePerpBankruptcy(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'SettlePnl') {
|
||||||
|
parsedTransactions.settle_pnl.push(...
|
||||||
|
parseSettlePnl(instructionNum, result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'SettleFees') {
|
||||||
|
parsedTransactions.settle_fees.push(...
|
||||||
|
parseSettleFees(instructionNum, result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'ConsumeEvents') {
|
||||||
|
parsedTransactions.fill_events.push(...
|
||||||
|
parseConsumeEvents(result.meta.logMessages, instruction.accounts, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
} else if (instructionName === 'RedeemMngo') {
|
||||||
|
parsedTransactions.redeem_mngo.push(...
|
||||||
|
parseRedeemMngo(instruction, result.meta.innerInstructions, instructionNum, signature, blockTime, slot, blockDatetime)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
instructionNum ++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Need to know the mango group pk of the transaction for information not tied to instructions
|
||||||
|
// Transaction will only have one mango group - so check which one it is by iterating over mango group pks in IDS
|
||||||
|
// TODO: add mango group to appropriate log messages and remove this workaround
|
||||||
|
let ids = IDS;
|
||||||
|
console.log()
|
||||||
|
let mangoGroupPk
|
||||||
|
let accountKeys = result.transaction.message.accountKeys.map(e => e.pubkey)
|
||||||
|
for (let pk of ids.groups.map(e => e.publicKey)) {
|
||||||
|
if (accountKeys.includes(pk)) {
|
||||||
|
mangoGroupPk = pk;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let allNetBalances: any = []
|
||||||
|
// Some information is not tied to instructions specifically
|
||||||
|
for (let logMessage of result.meta.logMessages) {
|
||||||
|
if (logMessage.startsWith('Program log: checked_sub_net details: ') || logMessage.startsWith('Program log: checked_add_net details: ')) {
|
||||||
|
let parsedNetAmounts = parseNetAmounts(logMessage, mangoGroupPk, signature, blockTime, slot, blockDatetime)
|
||||||
|
allNetBalances.push(...parsedNetAmounts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Only want to store the latest deposit/borrow amounts per marginAccount/symbol pair for each instruction
|
||||||
|
parsedTransactions.net_balances.push(...getLatestObjPerCombination(allNetBalances, ['mango_account', 'symbol']))
|
||||||
|
|
||||||
|
processStates.push({signature: signature, process_state: 'processed'});
|
||||||
|
}
|
||||||
|
} catch(e: any) {
|
||||||
|
console.log(e.stack)
|
||||||
|
processStates.push({signature: signature, process_state: 'parsing error'});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [processStates, parsedTransactions]
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLatestObjPerCombination(arr, combinationFields) {
|
||||||
|
// Utility function - iterates over arr and return the element with the highest index per set of combinationFields
|
||||||
|
|
||||||
|
let latestCombinations = {}
|
||||||
|
for (let values of arr) {
|
||||||
|
let combination = combinationFields.map(e => values[e])
|
||||||
|
latestCombinations[combination] = values
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.values(latestCombinations)
|
||||||
|
}
|
||||||
|
|
||||||
|
function insertQuotesAroundField(jsonString, field) {
|
||||||
|
// Utility function to fix malformed json (json with quotes around strings)
|
||||||
|
// Assumes fields have a single space before the key
|
||||||
|
|
||||||
|
let firstQuotePosition = jsonString.search('"' + field + '": ') + ('"' + field + '": ').length
|
||||||
|
let secondQuotePosition = firstQuotePosition + jsonString.slice(firstQuotePosition).search(',')
|
||||||
|
|
||||||
|
return [
|
||||||
|
jsonString.slice(0, firstQuotePosition),
|
||||||
|
'"',
|
||||||
|
jsonString.slice(firstQuotePosition, secondQuotePosition),
|
||||||
|
'"',
|
||||||
|
jsonString.slice(secondQuotePosition)
|
||||||
|
].join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
function getJsonStringsFromArray(logMessages, jsonStartStr) {
|
||||||
|
|
||||||
|
let jsonStrings: any = []
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(jsonStartStr)) {
|
||||||
|
jsonStrings.push(logMessage.slice(jsonStartStr.length))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonStrings
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseConsumeEvents(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
// instructionNum is used here to form a primary key on the db table (with signature)
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
|
||||||
|
let events: any = []
|
||||||
|
let startDetailsStr = 'Program log: FillEvent details: ';
|
||||||
|
let eventNum = 1
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let eventDetails
|
||||||
|
try {
|
||||||
|
eventDetails = JSON.parse(logMessage.slice(startDetailsStr.length))
|
||||||
|
} catch {
|
||||||
|
let jsonString = logMessage.slice(startDetailsStr.length);
|
||||||
|
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'maker')
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'taker')
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'taker_side')
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'maker_order_id')
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'taker_order_id')
|
||||||
|
|
||||||
|
eventDetails = JSON.parse(jsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
events.push({
|
||||||
|
event_num: eventNum,
|
||||||
|
maker: eventDetails['maker'],
|
||||||
|
maker_fee: eventDetails['maker_fee'],
|
||||||
|
maker_order_id: eventDetails['maker_order_id'],
|
||||||
|
|
||||||
|
// TODO: Ask Daffy about the source of these multipliers
|
||||||
|
price: eventDetails['price'] / 10,
|
||||||
|
quantity: eventDetails['quantity'] / 10000,
|
||||||
|
|
||||||
|
seq_num: eventDetails['seq_num'],
|
||||||
|
taker: eventDetails['taker'],
|
||||||
|
taker_fee: eventDetails['taker_fee'],
|
||||||
|
taker_order_id: eventDetails['taker_order_id'],
|
||||||
|
taker_side: eventDetails['taker_side'],
|
||||||
|
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
})
|
||||||
|
|
||||||
|
eventNum++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return events;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSettleFees(instructionNum, logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
|
||||||
|
let perpMarkets = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['perpMarkets'];
|
||||||
|
|
||||||
|
let mangoAccount
|
||||||
|
let perpMarketName
|
||||||
|
let settlement
|
||||||
|
let startDetailsStr = 'Program log: settle_fees details: ';
|
||||||
|
// Sometimes SettleFees is called but nothing is settled - see mxK5eEiEUeCcQtHwmUKziyYUZJ3NXdjmZigAR6npWgcpQoJtqvikt5A7osD4y6oiLZJhzYFvqAqDznFjHm77K8V
|
||||||
|
let detailsFound = false
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
detailsFound = true
|
||||||
|
|
||||||
|
// Log JSON is missing quotes around mango accounts
|
||||||
|
// Also trailing comma at end of json
|
||||||
|
// TODO: fix this
|
||||||
|
let settlePnlDetails
|
||||||
|
try {
|
||||||
|
settlePnlDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
} catch {
|
||||||
|
let jsonString = logMessage.slice(startDetailsStr.length);
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'mango_account')
|
||||||
|
|
||||||
|
jsonString = jsonString.replace(', }', ' }')
|
||||||
|
|
||||||
|
settlePnlDetails = JSON.parse(jsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
mangoAccount = settlePnlDetails['mango_account']
|
||||||
|
|
||||||
|
let marketIndex = settlePnlDetails['market_index']
|
||||||
|
let perpMarket = perpMarkets.find(e => e['marketIndex'] === marketIndex)
|
||||||
|
perpMarketName = perpMarket.name
|
||||||
|
|
||||||
|
settlement = settlePnlDetails['settlement'] / Math.pow(10, perpMarket.quoteDecimals)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (detailsFound) {
|
||||||
|
return [{
|
||||||
|
margin_account: mangoAccount,
|
||||||
|
settlement: settlement,
|
||||||
|
perp_market_name: perpMarketName,
|
||||||
|
instruction_num: instructionNum,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
} else {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSettlePnl(instructionNum, logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
|
||||||
|
let perpMarkets = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['perpMarkets'];
|
||||||
|
|
||||||
|
let mangoAccountA
|
||||||
|
let mangoAccountB
|
||||||
|
let perpMarketName
|
||||||
|
let settlementA
|
||||||
|
let settlementB
|
||||||
|
let startDetailsStr = 'Program log: settle_pnl details: ';
|
||||||
|
// Sometimes SettlePnl is called but nothing is settled - see 5fWGMQECxDgvvffBuzVCsig7WcREP7bqcFN7Y5ndAC5tdxhFHJ8oSQkHcLzuPoVmMMs3o1V8pr7T7sAHTnRzMoan
|
||||||
|
let detailsFound = false
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
detailsFound = true
|
||||||
|
|
||||||
|
// Log JSON is missing quotes around mango accounts
|
||||||
|
// TODO: fix this
|
||||||
|
let settlePnlDetails
|
||||||
|
try {
|
||||||
|
settlePnlDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
} catch {
|
||||||
|
let jsonString = logMessage.slice(startDetailsStr.length);
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'mango_account_a')
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'mango_account_b')
|
||||||
|
|
||||||
|
settlePnlDetails = JSON.parse(jsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
mangoAccountA = settlePnlDetails['mango_account_a']
|
||||||
|
mangoAccountB = settlePnlDetails['mango_account_b']
|
||||||
|
|
||||||
|
let marketIndex = settlePnlDetails['market_index']
|
||||||
|
let perpMarket = perpMarkets.find(e => e['marketIndex'] === marketIndex)
|
||||||
|
perpMarketName = perpMarket.name
|
||||||
|
|
||||||
|
let settlement = settlePnlDetails['settlement']
|
||||||
|
|
||||||
|
// A's quote position is reduced by settlement and B's quote position is increased by settlement
|
||||||
|
settlementA = -1 * settlement / Math.pow(10, perpMarket.quoteDecimals);
|
||||||
|
settlementB = settlement / Math.pow(10, perpMarket.quoteDecimals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (detailsFound) {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
margin_account: mangoAccountA,
|
||||||
|
settlement: settlementA,
|
||||||
|
perp_market_name: perpMarketName,
|
||||||
|
counterparty: mangoAccountB,
|
||||||
|
instruction_num: instructionNum,
|
||||||
|
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
},
|
||||||
|
{
|
||||||
|
margin_account: mangoAccountB,
|
||||||
|
settlement: settlementB,
|
||||||
|
perp_market_name: perpMarketName,
|
||||||
|
counterparty: mangoAccountA,
|
||||||
|
instruction_num: instructionNum,
|
||||||
|
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
},
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseResolvePerpBankruptcy(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
let liqee = accounts[2];
|
||||||
|
let liqor = accounts[3];
|
||||||
|
|
||||||
|
let perpMarkets = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['perpMarkets'];
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
// Either bankruptcy or socialized loss (or both) will be logged
|
||||||
|
// So initialize variables as null - nulls can be outputted to json if variables are not set
|
||||||
|
let perpMarketName;
|
||||||
|
let insuranceFundTransfer : number|null = null;
|
||||||
|
let loss : number|null = null
|
||||||
|
|
||||||
|
// TODO: validate this when an example comes along
|
||||||
|
let startDetailsStr = 'Program log: perp_bankruptcy details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let bankruptcyDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
let perpMarket = perpMarkets.find(e => e['marketIndex'] === bankruptcyDetails['liab_index'])
|
||||||
|
perpMarketName = perpMarket.name
|
||||||
|
|
||||||
|
insuranceFundTransfer = bankruptcyDetails['insurance_transfer'] / Math.pow(10, quoteDecimals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
startDetailsStr = 'Program log: perp_socialized_loss details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let socializedLossDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
let perpMarket = perpMarkets.find(e => e['marketIndex'] === socializedLossDetails['liab_index'])
|
||||||
|
perpMarketName = perpMarket.name
|
||||||
|
|
||||||
|
// loss is on quote position
|
||||||
|
loss = socializedLossDetails['socialized_loss'] / Math.pow(10, quoteDecimals)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [{
|
||||||
|
liqor: liqor, liqee: liqee,
|
||||||
|
perp_market_name: perpMarketName, insurance_fund_transfer: insuranceFundTransfer, loss: loss,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseResolveTokenBankruptcy(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
let liqee = accounts[2];
|
||||||
|
let liqor = accounts[3];
|
||||||
|
|
||||||
|
let tokens = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'];
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
// Either bankruptcy or socialized loss (or both) will be logged
|
||||||
|
// So initialize variables as null - nulls can be outputted to json if variables are not set
|
||||||
|
let symbol;
|
||||||
|
let insuranceFundTransfer : number|null = null;
|
||||||
|
let loss : number|null = null
|
||||||
|
let percentageLoss : number|null = null
|
||||||
|
let depositIndex : number|null = null
|
||||||
|
|
||||||
|
// TODO: validate this when an example comes along
|
||||||
|
let startDetailsStr = 'Program log: token_bankruptcy details: ';
|
||||||
|
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let bankruptcyDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
let tokenIndex = bankruptcyDetails['liab_index']
|
||||||
|
let tokenPk = tokenIndexesMap[mangoGroupPk][tokenIndex];
|
||||||
|
let token = tokens.find(e => e['mintKey'] === tokenPk);
|
||||||
|
symbol = token.symbol
|
||||||
|
|
||||||
|
insuranceFundTransfer = bankruptcyDetails['insurance_transfer'] / Math.pow(10, quoteDecimals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
startDetailsStr = 'Program log: token_socialized_loss details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let socializedLossDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
let tokenIndex = socializedLossDetails['liab_index']
|
||||||
|
let tokenPk = tokenIndexesMap[mangoGroupPk][tokenIndex];
|
||||||
|
let token = tokens.find(e => e['mintKey'] === tokenPk);
|
||||||
|
symbol = token.symbol
|
||||||
|
|
||||||
|
loss = socializedLossDetails['native_loss'] / Math.pow(10, token.decimals)
|
||||||
|
percentageLoss = socializedLossDetails['percentage_loss']
|
||||||
|
|
||||||
|
// Deposit index was added to the logging after launch
|
||||||
|
// TODO: remove when we've parsed the logs without deposit_index
|
||||||
|
// TODO: does this need to be parsed from native units?
|
||||||
|
try {
|
||||||
|
depositIndex = socializedLossDetails['deposit_index']
|
||||||
|
} catch {
|
||||||
|
// pass
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [{
|
||||||
|
liqor: liqor, liqee: liqee,
|
||||||
|
symbol: symbol, insurance_fund_transfer: insuranceFundTransfer, loss: loss,
|
||||||
|
percentage_loss: percentageLoss, deposit_index: depositIndex,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseLiquidatePerpMarket(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
if (logMessages.includes('Program log: Account init_health above zero.')) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0]
|
||||||
|
let liqee = accounts[4]
|
||||||
|
let liqor = accounts[5]
|
||||||
|
|
||||||
|
let perpMarkets = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['perpMarkets'];
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
|
||||||
|
let perpMarketName
|
||||||
|
let liabSymbol
|
||||||
|
let assetSymbol
|
||||||
|
let baseTransfer
|
||||||
|
let quoteTransfer
|
||||||
|
let bankruptcy
|
||||||
|
let startDetailsStr = 'Program log: liquidate_perp_market details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let liquidationDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
let perpMarket = perpMarkets.find(e => e['marketIndex'] === liquidationDetails['market_index'])
|
||||||
|
perpMarketName = perpMarket.name
|
||||||
|
let liabDecimals = perpMarket.baseDecimals;
|
||||||
|
let assetDecimals = perpMarket.quoteDecimals;
|
||||||
|
|
||||||
|
liabSymbol = perpMarket.baseSymbol;
|
||||||
|
assetSymbol = quoteSymbol;
|
||||||
|
baseTransfer = liquidationDetails['base_transfer'] / Math.pow(10, liabDecimals);
|
||||||
|
// TODO: quoteTransfer is -base_transfer * pmi.base_lot_size - but I don't really know what this means
|
||||||
|
quoteTransfer = liquidationDetails['quote_transfer'] / Math.pow(10, assetDecimals);
|
||||||
|
bankruptcy = liquidationDetails['bankruptcy'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [{
|
||||||
|
liqor: liqor, liqee: liqee, perp_market: perpMarketName,
|
||||||
|
liab_symbol: liabSymbol, liab_amount: baseTransfer,
|
||||||
|
asset_symbol: assetSymbol, asset_amount: quoteTransfer,
|
||||||
|
bankruptcy: bankruptcy,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function parseLiquidateTokenAndPerp(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
if (logMessages.includes('Program log: Account init_health above zero.')) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0]
|
||||||
|
let liqee = accounts[2]
|
||||||
|
let liqor = accounts[3]
|
||||||
|
|
||||||
|
let tokens = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'];
|
||||||
|
let perpMarkets = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['perpMarkets'];
|
||||||
|
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
let perpMarket;
|
||||||
|
let assetSymbol;
|
||||||
|
let liabSymbol;
|
||||||
|
let assetType
|
||||||
|
let liabType
|
||||||
|
let assetTransfer
|
||||||
|
let assetPrice
|
||||||
|
let liabTransfer
|
||||||
|
let liabPrice
|
||||||
|
let startDetailsStr = 'Program log: liquidate_token_and_perp details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let liquidationDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
assetType = liquidationDetails['asset_type'];
|
||||||
|
let assetIndex = liquidationDetails['asset_index'];
|
||||||
|
|
||||||
|
liabType = liquidationDetails['liab_type'];
|
||||||
|
let liabIndex = liquidationDetails['liab_index'];
|
||||||
|
|
||||||
|
let assetDecimals;
|
||||||
|
let liabDecimals;
|
||||||
|
if (assetType === 'Token') {
|
||||||
|
// asset is token and liab is perp
|
||||||
|
let assetTokenPk = tokenIndexesMap[mangoGroupPk][assetIndex];
|
||||||
|
let assetToken = tokens.find(e => e['mintKey'] === assetTokenPk);
|
||||||
|
assetSymbol = assetToken.symbol;
|
||||||
|
assetDecimals = assetToken.decimals;
|
||||||
|
|
||||||
|
let liabPerpMarket = perpMarkets.find(e => e['marketIndex'] === liabIndex)
|
||||||
|
// Liquidation can only occur on quote position on perp side
|
||||||
|
// So I'll set the asset symbol to the quote symbol (as that is what is transferred)
|
||||||
|
liabSymbol = quoteSymbol;
|
||||||
|
liabDecimals = liabPerpMarket.quoteDecimals;
|
||||||
|
perpMarket = liabPerpMarket.name;
|
||||||
|
} else {
|
||||||
|
// asset is perp and liab is token
|
||||||
|
let assetPerpMarket = perpMarkets.find(e => e['marketIndex'] === assetIndex)
|
||||||
|
// Liquidation can only occur on quote position on perp side
|
||||||
|
// So I'll set the asset symbol to the quote symbol (as that is what is transferred)
|
||||||
|
assetSymbol = quoteSymbol;
|
||||||
|
assetDecimals = assetPerpMarket.quoteDecimals;
|
||||||
|
perpMarket = assetPerpMarket.name;
|
||||||
|
|
||||||
|
let liabTokenPk = tokenIndexesMap[mangoGroupPk][liabIndex];
|
||||||
|
let liabToken = tokens.find(e => e['mintKey'] === liabTokenPk);
|
||||||
|
liabSymbol = liabToken.symbol;
|
||||||
|
liabDecimals = liabToken.decimals;
|
||||||
|
}
|
||||||
|
|
||||||
|
assetTransfer = liquidationDetails['asset_transfer'] / Math.pow(10, assetDecimals);
|
||||||
|
assetPrice = liquidationDetails['asset_price'] * Math.pow(10, assetDecimals - quoteDecimals);
|
||||||
|
|
||||||
|
liabTransfer = liquidationDetails['actual_liab_transfer'] / Math.pow(10, liabDecimals);;
|
||||||
|
liabPrice = liquidationDetails['liab_price'] * Math.pow(10, liabDecimals - quoteDecimals);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [{
|
||||||
|
liqor: liqor, liqee: liqee, perp_market: perpMarket,
|
||||||
|
liab_symbol: liabSymbol, liab_amount: liabTransfer, liab_price: liabPrice, liab_type: liabType,
|
||||||
|
asset_symbol: assetSymbol, asset_amount: assetTransfer, asset_price: assetPrice, asset_type: assetType,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseLiquidateTokenAndToken(logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
if (logMessages.includes('Program log: Account init_health above zero.')) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
let mangoGroup = accounts[0]
|
||||||
|
let liqee = accounts[2]
|
||||||
|
let liqor = accounts[3]
|
||||||
|
let assetRootPk = accounts[5]
|
||||||
|
let liabRootPk = accounts[7]
|
||||||
|
|
||||||
|
let assetToken = ids['groups'].find(e => e['publicKey'] === mangoGroup)['tokens'].find(e => e.rootKey === assetRootPk);
|
||||||
|
let liabToken = ids['groups'].find(e => e['publicKey'] === mangoGroup)['tokens'].find(e => e.rootKey === liabRootPk);
|
||||||
|
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroup)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroup)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
let assetPrice;
|
||||||
|
let liabPrice;
|
||||||
|
let assetTransfer;
|
||||||
|
let liabTransfer;
|
||||||
|
let bankruptcy;
|
||||||
|
let startDetailsStr = 'Program log: liquidate_token_and_token details: ';
|
||||||
|
for (let logMessage of logMessages) {
|
||||||
|
if (logMessage.startsWith(startDetailsStr)) {
|
||||||
|
let liquidationDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
|
||||||
|
assetPrice = liquidationDetails['asset_price'] * Math.pow(10, assetToken.decimals - quoteDecimals);
|
||||||
|
liabPrice = liquidationDetails['liab_price'] * Math.pow(10, liabToken.decimals - quoteDecimals);
|
||||||
|
|
||||||
|
assetTransfer = liquidationDetails['asset_transfer'] / Math.pow(10, assetToken.decimals);
|
||||||
|
liabTransfer = liquidationDetails['liab_transfer'] / Math.pow(10, liabToken.decimals);
|
||||||
|
|
||||||
|
bankruptcy = liquidationDetails['bankruptcy'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [{
|
||||||
|
liqor: liqor, liqee: liqee,
|
||||||
|
liab_symbol: liabToken.symbol, liab_amount: liabTransfer, liab_price: liabPrice,
|
||||||
|
asset_symbol: assetToken.symbol, asset_amount: assetTransfer, asset_price: assetPrice,
|
||||||
|
bankruptcy: bankruptcy,
|
||||||
|
mango_group: mangoGroup, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRedeemMngo(instruction, innerInstructions, instructionNum, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroup = instruction.accounts[0]
|
||||||
|
let marginAccount = instruction.accounts[2]
|
||||||
|
|
||||||
|
let decimals = ids['groups'].find(e => e['publicKey'] === mangoGroup)['tokens'].find(e => e.symbol === 'MNGO').decimals;
|
||||||
|
|
||||||
|
// TODO: This would be simpler to just parse logs
|
||||||
|
let transferInstruction = innerInstructions.find(e => e.index === instructionNum-1).instructions[0]
|
||||||
|
|
||||||
|
let quantity = parseInt(transferInstruction.parsed.info.amount) / Math.pow(10, decimals)
|
||||||
|
|
||||||
|
return [{margin_account: marginAccount, quantity: quantity, instruction_num: instructionNum,
|
||||||
|
mango_group: mangoGroup, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime}]
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseDepositWithDraw(instruction, instructionNum, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let decodedInstruction = MangoInstructionLayout.decode(bs58.decode(instruction.data));
|
||||||
|
let instructionName = Object.keys(decodedInstruction)[0];
|
||||||
|
|
||||||
|
let mangoGroup = instruction.accounts[0]
|
||||||
|
let marginAccount = instruction.accounts[1]
|
||||||
|
let owner = instruction.accounts[2]
|
||||||
|
let rootPk = instruction.accounts[4]
|
||||||
|
|
||||||
|
let token = ids['groups'].find(e => e['publicKey'] === mangoGroup)['tokens'].find(e => e.rootKey === rootPk);
|
||||||
|
let mintDecimals = token.decimals;
|
||||||
|
let symbol = token.symbol;
|
||||||
|
|
||||||
|
let quantity = decodedInstruction[instructionName].quantity.toNumber() / Math.pow(10, mintDecimals)
|
||||||
|
|
||||||
|
return [{margin_account: marginAccount, owner: owner, symbol: symbol, side: instructionName, quantity: quantity, instruction_num: instructionNum,
|
||||||
|
mango_group: mangoGroup, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime}]
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseNetAmounts(logMessage, mangoGroupPk, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let tokens = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'];
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
let startDetailsStr;
|
||||||
|
if (logMessage.startsWith('Program log: checked_sub_net details: ')) {
|
||||||
|
startDetailsStr = 'Program log: checked_sub_net details: '
|
||||||
|
} else if (logMessage.startsWith('Program log: checked_add_net details: ')) {
|
||||||
|
startDetailsStr = 'Program log: checked_add_net details: '
|
||||||
|
} else {
|
||||||
|
throw 'Unexpected startDetailsStr'
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix this in the rust code and remove this workaround
|
||||||
|
// The json is missing quotes around the mango_account_pk
|
||||||
|
let netAmountDetails
|
||||||
|
try {
|
||||||
|
netAmountDetails = JSON.parse(logMessage.slice(startDetailsStr.length));
|
||||||
|
} catch {
|
||||||
|
|
||||||
|
let jsonString = logMessage.slice(startDetailsStr.length);
|
||||||
|
jsonString = insertQuotesAroundField(jsonString, 'mango_account_pk')
|
||||||
|
netAmountDetails = JSON.parse(jsonString)
|
||||||
|
}
|
||||||
|
|
||||||
|
let mangoAccountPk = netAmountDetails['mango_account_pk']
|
||||||
|
let tokenIndex = netAmountDetails['token_index']
|
||||||
|
|
||||||
|
let tokenPk = tokenIndexesMap[mangoGroupPk][tokenIndex];
|
||||||
|
let token = tokens.find(e => e['mintKey'] === tokenPk);
|
||||||
|
let symbol = token.symbol;
|
||||||
|
|
||||||
|
let deposit = netAmountDetails['deposit'] / Math.pow(10, token.decimals - quoteDecimals);
|
||||||
|
let borrow = netAmountDetails['borrow'] / Math.pow(10, token.decimals - quoteDecimals);
|
||||||
|
|
||||||
|
return [{mango_account: mangoAccountPk, symbol: symbol, deposit: deposit, borrow: borrow,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime}]
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseCacheRootBanks(instructionNum, logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
|
||||||
|
let tokens = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'];
|
||||||
|
|
||||||
|
let cacheIndexes: any = [];
|
||||||
|
let startDetailsStr = 'Program log: cache_root_banks details: ';
|
||||||
|
let jsons = getJsonStringsFromArray(logMessages, startDetailsStr)
|
||||||
|
|
||||||
|
// Nothing cached
|
||||||
|
if (jsons.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix this in the rust code and remove this workaround
|
||||||
|
// The json is missing a comma before "borrow_indexes"
|
||||||
|
let cacheIndexDetails
|
||||||
|
try {
|
||||||
|
cacheIndexDetails = JSON.parse(jsons[0]);
|
||||||
|
} catch {
|
||||||
|
let jsonString = jsons[0];
|
||||||
|
let insertPosition = jsonString.search('"borrow_indexes"')
|
||||||
|
let fixedJsonString = [jsonString.slice(0, insertPosition), ',', jsonString.slice(insertPosition)].join('');
|
||||||
|
|
||||||
|
cacheIndexDetails = JSON.parse(fixedJsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
let tokenIndexes = cacheIndexDetails['token_indexes'];
|
||||||
|
let depositIndexes = cacheIndexDetails['deposit_indexes'];
|
||||||
|
let borrowIndexes = cacheIndexDetails['borrow_indexes'];
|
||||||
|
|
||||||
|
for (let i = 0; i < tokenIndexes.length; i++){
|
||||||
|
let tokenIndex = tokenIndexes[i];
|
||||||
|
|
||||||
|
let tokenPk = tokenIndexesMap[mangoGroupPk][tokenIndex];
|
||||||
|
let token = tokens.find(e => e['mintKey'] === tokenPk);
|
||||||
|
let symbol = token.symbol;
|
||||||
|
|
||||||
|
let depositIndex = depositIndexes[i];
|
||||||
|
let borrowIndex = borrowIndexes[i];
|
||||||
|
|
||||||
|
cacheIndexes.push({symbol: symbol, deposit_index: depositIndex, borrow_index: borrowIndex,
|
||||||
|
instruction_num: instructionNum,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime})
|
||||||
|
}
|
||||||
|
|
||||||
|
return cacheIndexes
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseCachePrices(instructionNum, logMessages, accounts, signature, blockTime, slot, blockDatetime) {
|
||||||
|
|
||||||
|
let mangoGroupPk = accounts[0];
|
||||||
|
|
||||||
|
let tokens = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'];
|
||||||
|
let oracles = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['oracles'];
|
||||||
|
let quoteSymbol = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['quoteSymbol'];
|
||||||
|
let quoteDecimals = ids['groups'].find(e => e['publicKey'] === mangoGroupPk)['tokens'].find(e => e.symbol === quoteSymbol).decimals;
|
||||||
|
|
||||||
|
let cachePrices: any = [];
|
||||||
|
let startDetailsStr = 'Program log: cache_prices details: ';
|
||||||
|
let jsons = getJsonStringsFromArray(logMessages, startDetailsStr)
|
||||||
|
let cachePriceDetails = JSON.parse(jsons[0])
|
||||||
|
|
||||||
|
let oracleIndexes = cachePriceDetails['oracle_indexes']
|
||||||
|
let oraclePrices = cachePriceDetails['oracle_prices']
|
||||||
|
|
||||||
|
for (let [i, oracleIndex] of oracleIndexes.entries()) {
|
||||||
|
|
||||||
|
let oraclePk = oracleIndexesMap[mangoGroupPk][oracleIndex];
|
||||||
|
let oracle = oracles.find(e => e['publicKey'] === oraclePk);
|
||||||
|
let symbol = oracle.symbol;
|
||||||
|
|
||||||
|
let token = tokens.find(e => e.symbol === symbol);
|
||||||
|
let baseDecimals = token.decimals;
|
||||||
|
|
||||||
|
let rawPrice = oraclePrices[i];
|
||||||
|
let price = rawPrice * Math.pow(10, baseDecimals) / Math.pow(10, quoteDecimals);
|
||||||
|
|
||||||
|
cachePrices.push({symbol: symbol, price: price, oracle_pk: oraclePk,
|
||||||
|
instruction_num: instructionNum,
|
||||||
|
mango_group: mangoGroupPk, block_datetime: blockDatetime,
|
||||||
|
slot: slot, signature: signature, blocktime: blockTime})
|
||||||
|
}
|
||||||
|
|
||||||
|
return cachePrices
|
||||||
|
}
|
|
@ -0,0 +1,96 @@
|
||||||
|
import { Connection, PublicKey, ConfirmedSignatureInfo } from '@solana/web3.js';
|
||||||
|
import { sleep} from '@blockworks-foundation/mango-client';
|
||||||
|
import { bulkBatchInsert } from './utils';
|
||||||
|
|
||||||
|
|
||||||
|
export async function getNewSignatures(afterSlot: number, connection: Connection, addressPk: PublicKey, requestWaitTime: number) {
|
||||||
|
// Fetches all signatures associated with the account - working backwards in time until it encounters the "afterSlot" slot
|
||||||
|
|
||||||
|
let signatures;
|
||||||
|
let slots;
|
||||||
|
const limit = 1000;
|
||||||
|
let before = null;
|
||||||
|
let options;
|
||||||
|
let allSignaturesInfo: ConfirmedSignatureInfo[] = [];
|
||||||
|
while (true) {
|
||||||
|
|
||||||
|
if (before === null) {
|
||||||
|
options = {limit: limit};
|
||||||
|
} else {
|
||||||
|
options = {limit: limit, before: before};
|
||||||
|
}
|
||||||
|
|
||||||
|
let signaturesInfo = (await connection.getConfirmedSignaturesForAddress2(addressPk, options));
|
||||||
|
signatures = signaturesInfo.map(x => x['signature'])
|
||||||
|
slots = signaturesInfo.map(x => x['slot']);
|
||||||
|
|
||||||
|
// Stop when we reach a slot we have already stored in the database
|
||||||
|
// Use slot instead of signature here as can have multiple signatures per slot and signatures are
|
||||||
|
// stored in a arbitray order per slot - leading to attempting to insert a duplicate signature
|
||||||
|
// If a slot is already seen - will have all signatures in that slot in the db
|
||||||
|
let afterSlotIndex = slots.indexOf(afterSlot);
|
||||||
|
if (afterSlotIndex !== -1) {
|
||||||
|
allSignaturesInfo = allSignaturesInfo.concat(signaturesInfo.slice(0, afterSlotIndex));
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
// if afterSignatureIndex is not found then we should have gotten signaturesInfo of length limit
|
||||||
|
// otherwise we have an issue where the rpc endpoint does not have enough history
|
||||||
|
if (signaturesInfo.length !== limit) {
|
||||||
|
throw 'rpc endpoint does not have sufficient signature history to reach afterSignature ' + afterSlot
|
||||||
|
}
|
||||||
|
allSignaturesInfo = allSignaturesInfo.concat(signaturesInfo);
|
||||||
|
}
|
||||||
|
before = signatures[signatures.length-1];
|
||||||
|
|
||||||
|
console.log(new Date(signaturesInfo[signaturesInfo.length-1].blockTime! * 1000).toISOString());
|
||||||
|
|
||||||
|
await sleep(requestWaitTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
return allSignaturesInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function insertNewSignatures(address: string, connection: Connection, pool, requestWaitTime: number, schema: string) {
|
||||||
|
let client = await pool.connect()
|
||||||
|
let latestSlotRows = await client.query('select max(slot) as max_slot from ' + schema + '.transactions where program_pk = $1', [address])
|
||||||
|
|
||||||
|
client.release();
|
||||||
|
|
||||||
|
let latestSlot = latestSlotRows.rows[0]['max_slot'];
|
||||||
|
|
||||||
|
let newSignatures = await getNewSignatures(latestSlot, connection, new PublicKey(address), requestWaitTime);
|
||||||
|
|
||||||
|
// By default the signatures returned by getConfirmedSignaturesForAddress2 will be ordered newest -> oldest
|
||||||
|
// We reverse the order to oldest -> newest here
|
||||||
|
// This is useful for our purposes as by inserting oldest -> newest if inserts are interrupted for some reason the process can pick up where it left off seamlessly (with no gaps)
|
||||||
|
// Also ensures that the auto increment id in our table is incremented oldest -> newest
|
||||||
|
newSignatures = newSignatures.reverse();
|
||||||
|
|
||||||
|
const inserts = newSignatures.map(signatureInfo => ({
|
||||||
|
signature: signatureInfo.signature,
|
||||||
|
program_pk: address,
|
||||||
|
block_time: signatureInfo.blockTime,
|
||||||
|
block_datetime: (new Date(signatureInfo.blockTime! * 1000)).toISOString(),
|
||||||
|
slot: signatureInfo.slot,
|
||||||
|
err: signatureInfo.err === null ? 0 : 1,
|
||||||
|
process_state: 'unprocessed'
|
||||||
|
}))
|
||||||
|
// Seems to be a bug in getConfirmedSignaturesForAddress2 where very rarely I can get duplicate signatures (separated by a few signatures in between)
|
||||||
|
// So redup here
|
||||||
|
let uniqueInserts = uniqueOnSignature(inserts);
|
||||||
|
|
||||||
|
let columns = ['signature', 'program_pk', 'block_time', 'block_datetime', 'slot', 'err', 'process_state'];
|
||||||
|
let table = 'transactions'
|
||||||
|
let batchSize = 10000
|
||||||
|
|
||||||
|
await bulkBatchInsert(pool, table, columns, uniqueInserts, batchSize, schema);
|
||||||
|
|
||||||
|
console.log('inserted ' + newSignatures.length + ' signatures')
|
||||||
|
}
|
||||||
|
|
||||||
|
function uniqueOnSignature(inserts) {
|
||||||
|
var seen = {};
|
||||||
|
return inserts.filter(function(e) {
|
||||||
|
return seen.hasOwnProperty(e.signature) ? false : (seen[e.signature] = true);
|
||||||
|
});
|
||||||
|
}
|
|
@ -0,0 +1,77 @@
|
||||||
|
import { Pool } from 'pg'
|
||||||
|
import {parseTransactions} from '../parseTransactions';
|
||||||
|
|
||||||
|
const mangoProgramId = process.env.MANGO_PROGRAM_ID || '5fNfvyp5czQVX77yoACa3JJVEhdRaWjPuazuWgjhTqEH'
|
||||||
|
|
||||||
|
async function processMangoTransactions(rawTransactionsPool, schema, limit) {
|
||||||
|
|
||||||
|
const client = await rawTransactionsPool.connect();
|
||||||
|
let res;
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Below are examples of different types of transactions
|
||||||
|
let signatures = [
|
||||||
|
"4cYaxHEMAycyjRBfD2Va1cnaNcKSCxsLaYF47EnpUH4aqCEsZR9c9BJMeMr9NiPR7grN6puBYqWL6FdeX3ZgHrjj", // Deposit
|
||||||
|
"49ajp59Gtpr5Q4kD4y2rMZrQSgkFNrferoC5nUq6mAZsveFf3e12DXtCR1GMFSyhR6Aypd6tJG7SKveNwGHGBDYQ", // Withdraw
|
||||||
|
|
||||||
|
"5E9Jqz3nPtTx5rHTHmWyvw5RGTodPp4jRBx158dpe24b4QtpyAEVcEPuwvL6cwQ5MAcKHDiukU45L5xJ8fLR9Y1J", // CachePrices and CacheRootBanks
|
||||||
|
"2o3hH59r6Pggg8oTTEjtXyJ9xHmx2z1pPXG4SWoPeogW95sjQoxps5fu4LbaTWfTyMvK1epsvz1nLiZ9CTPAgyD1", // LiquidateTokenAndToken
|
||||||
|
"1F47ZsgeLNpDsHCqnNRbG8hi6C2rEeyT8YME61g84HSTRHrheXX69Zya2Dz3fW14SC1y84cAbLVg7jDae38Vp3a", // LiquidateTokenAndPerp
|
||||||
|
"4RBxzncCHW8XSHR1mFJrxtUMitYyYiSc9jvHQ4CPA8mqCukgw3dQNoYpgm5GRozyxMJP1j4ew9gNPkzCnrnkmaud", // LiquidatePerpMarket
|
||||||
|
|
||||||
|
"56y2iUGUyQ8BcqLpL5N1QwR3QQF37NtaWRnd5XjQ25BNWKfNxvpnhwwqtD7B88dAvJEykTHNCLvUKdxY4jhQ2uhQ", //LiquidateTokenAndPerp example 2
|
||||||
|
"4ErqDTV11imwDnoA6AAj3VMZC9FQvyq2GA4emfpiSxVBifAQE9EDwPnHxoTMxULHfyyCf8GBBzoczGSqYJMFEV5D", //LiquidateTokenAndPerp example 3
|
||||||
|
"59DFSbsN1DbnqUiMsStS2xQn4tBncG8w3gg8aikBvQZKnWhf1yRt87EU4WMvXtnnVe18zWaRDFVfnVYp3ASgF7tV", // ResolveTokenBankruptcy
|
||||||
|
"4Xx7gVesMQQZqprJYLu5gNEYRLA5GTXKURrkc8jG3CLKynRwhEM93MownyAMhxpdFTvfXQ9kkxkRcemZ8Fn5WHyk", // ResolvePerpBankruptcy
|
||||||
|
"3bzj3KkA3FSZHJuCmRgHhSgqeaEzD32sCHkYdRLcZm1vcuB4ra5NbU5EZqBhW6QjeKRV9QRWC4SHxK2hS54s79Zx", // settle_pnl
|
||||||
|
"5TmhvKQJmjUD9dZgCszBF8gNKUohpxwjrYu1RngZVh1hEToGMtjPtXJF89QLHXzANMWWQRfMomsgCg8353CpYgBb", // settle_fees
|
||||||
|
"4qV6PTD1nGj5qq89FQK8QKwN231pGgtayD7uX4B6y83b19gcVXB5ByLCvApSJjCRrboiCg7RVT2p2e1CtP3zuXDb", // force_settle_quote_positions
|
||||||
|
"5qDPBrFjCcaZthjRCqisHRw1mFEkHFHRFWi5jbKCmpAgpAXNdEkSv8L472D12VB5AukYaGsWhAy5bcvvUGJ1Sgtv", // FillEvent
|
||||||
|
"3YXaEG95w5eG7jBBjz8hW9auXVAv9z2MH8yw51tL8nqSqmKgXtrD1hgE7LCqK2hpFwcrpjeWtBeVqGsbCHLh3kSe", // redeem mango
|
||||||
|
"2HNnZmThkFUsG1pw9bNaJoeeGUZJun3hkcpwBJt3ZU9FKe3CY17wrJgk1BZ8txm13RJ512ThbZVZxaqsxNFn4xVs", // checked_add_net details
|
||||||
|
'4ebib6h5kQHpcpK4A4UpH7ThJVEtui2X7vVvTfCW8iuJgjHMocH7nymN3zVrrbwZL9HQYJY1tHdnGjo7ZSgrL7M6', // error example
|
||||||
|
'59DFSbsN1DbnqUiMsStS2xQn4tBncG8w3gg8aikBvQZKnWhf1yRt87EU4WMvXtnnVe18zWaRDFVfnVYp3ASgF7tV', //token socialized loss
|
||||||
|
'4RCvRY8BWPB6FixyfufYKojUdnE91DiqmFE2b8e4FCyuWCdT1ipSzPBaWUgaajKucFr1jsveiMvTft5iiWbctCFk' // settle_pnl_multiple
|
||||||
|
]
|
||||||
|
|
||||||
|
let signaturesSql = signatures.map(e => "'" + e + "'").join(',')
|
||||||
|
|
||||||
|
res = await client.query("select transaction, signature from " + schema + ".transactions where signature in (" + signaturesSql + ") order by id desc limit $1", [limit])
|
||||||
|
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
let transactions = res.rows.map(e => [e.transaction, e.signature]);
|
||||||
|
let [processStates, parsedTransactions] = parseTransactions(transactions, mangoProgramId);
|
||||||
|
|
||||||
|
// Set a breakpoint here to examine parsed transactions
|
||||||
|
console.log(parsedTransactions)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function consumeTransactions() {
|
||||||
|
const rawConnectionString = process.env.CONNECTION_STRING_RAW
|
||||||
|
|
||||||
|
let schema = 'transactions_v3';
|
||||||
|
|
||||||
|
const rawTransactionsPool = new Pool(
|
||||||
|
{
|
||||||
|
connectionString: rawConnectionString,
|
||||||
|
ssl: {
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log('Initialized')
|
||||||
|
while (true) {
|
||||||
|
console.log('Refreshing transactions ' + Date())
|
||||||
|
|
||||||
|
let transactionsParsingLimit = 50000;
|
||||||
|
await processMangoTransactions(rawTransactionsPool, schema, transactionsParsingLimit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
consumeTransactions()
|
|
@ -0,0 +1,50 @@
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
const pgp = require('pg-promise')({
|
||||||
|
capSQL: true
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function bulkBatchInsert(pool, tablename, columns, inserts, batchSize, schema) {
|
||||||
|
// Creates bulk insert statements from an array of inserts - avoids performance cost of insert roundtrips to the server
|
||||||
|
// Batches the inserts to stop the individual statements getting too large
|
||||||
|
// All inserts are done in a transaction - so if one fails they all will
|
||||||
|
|
||||||
|
const table = new pgp.helpers.TableName({table: tablename, schema: schema})
|
||||||
|
|
||||||
|
if (inserts.length === 0) {
|
||||||
|
return
|
||||||
|
} else if (batchSize < 1) {
|
||||||
|
throw 'batchSize must be at least 1'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
let client = await pool.connect()
|
||||||
|
const cs = new pgp.helpers.ColumnSet(columns, {table: table});
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN')
|
||||||
|
|
||||||
|
for (let i = 0, j = inserts.length; i < j; i += batchSize) {
|
||||||
|
let insertsBatch = inserts.slice(i, i + batchSize);
|
||||||
|
let insertsSql = pgp.helpers.insert(insertsBatch, cs);
|
||||||
|
await client.query(insertsSql)
|
||||||
|
|
||||||
|
console.log('inserted ' + (i + insertsBatch.length) + ' of ' + inserts.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.query('COMMIT')
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
await client.query('ROLLBACK')
|
||||||
|
throw e
|
||||||
|
} finally {
|
||||||
|
client.release()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function notify(content) {
|
||||||
|
if (process.env.WEBHOOK_URL) {
|
||||||
|
axios.post(process.env.WEBHOOK_URL, {content});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
{
|
||||||
|
"compileOnSave": true,
|
||||||
|
"compilerOptions": {
|
||||||
|
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||||
|
|
||||||
|
/* Basic Options */
|
||||||
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
|
"target": "es2019", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */
|
||||||
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
|
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
|
||||||
|
"allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||||
|
// "declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||||
|
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||||
|
"sourceMap": true, /* Generates corresponding '.map' file. */
|
||||||
|
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||||
|
"outDir": "dist", /* Redirect output structure to the directory. */
|
||||||
|
"rootDir": "src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
// "composite": true, /* Enable project compilation */
|
||||||
|
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||||
|
// "removeComments": true, /* Do not emit comments to output. */
|
||||||
|
// "noEmit": true, /* Do not emit outputs. */
|
||||||
|
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||||
|
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||||
|
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||||
|
|
||||||
|
/* Strict Type-Checking Options */
|
||||||
|
"strict": true, /* Enable all strict type-checking options. */
|
||||||
|
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||||
|
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||||
|
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||||
|
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||||
|
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||||
|
"noImplicitThis": false, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||||
|
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||||
|
|
||||||
|
/* Additional Checks */
|
||||||
|
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||||
|
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||||
|
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||||
|
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||||
|
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||||
|
|
||||||
|
/* Module Resolution Options */
|
||||||
|
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||||
|
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||||
|
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||||
|
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||||
|
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||||
|
// "types": [], /* Type declaration files to be included in compilation. */
|
||||||
|
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||||
|
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||||
|
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||||
|
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||||
|
|
||||||
|
/* Source Map Options */
|
||||||
|
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||||
|
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||||
|
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||||
|
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||||
|
|
||||||
|
/* Experimental Options */
|
||||||
|
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||||
|
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||||
|
|
||||||
|
/* Advanced Options */
|
||||||
|
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||||
|
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue