From 83f192fd97f220414ae9165c096ee0444a7a5eae Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Sun, 10 Dec 2023 18:15:55 +0100 Subject: [PATCH 01/20] dump tx details UI structure --- transaction_details_database.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/transaction_details_database.py b/transaction_details_database.py index 78c5ada..4bbe177 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -34,6 +34,7 @@ def find_transaction_details_by_sig(tx_sig: str): assert len(maprows) <= 1, "Tx Sig is primary key - find zero or one" + # FIXME: maprows for row in maprows: transaction_database.map_jsons_in_row(row) accounts = json.loads(row['accounts_used']) @@ -105,6 +106,21 @@ def find_transaction_details_by_sig(tx_sig: str): row['write_lock_info'] = invert_by_slot(wai) row['read_lock_info'] = invert_by_slot(rai) + print("- transaction details for sig: " + tx_sig) + print("- relevant slots: " + str(relevant_slots)) + for relevant_slot in relevant_slots: + print("- slot: ", relevant_slot) + print(" - errors in slot:") + for error in row['errors_array']: + if error['slot'] == relevant_slot: + print(" - " + error['error']) + print(" - write-locked accounts: ") + for account in row['write_lock_info'][relevant_slot]: + print(" - " + account['key']) + print(" - read-locked accounts:") + for account in row['read_lock_info'][relevant_slot]: + print(" - " + account['key']) + return maprows From 0c5c9564096421d938ce8ecef955a2df51c8960c Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Sun, 10 Dec 2023 23:05:20 +0100 Subject: [PATCH 02/20] map timestamp+errors --- transaction_database.py | 61 +++++++++++++++++++++++------------------ 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/transaction_database.py b/transaction_database.py index 26328a9..6718f37 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -5,30 +5,30 @@ import json def run_query(): maprows = postgres_connection.query( """ - WITH tx_aggregated AS ( - SELECT - signature as sig, - min(first_notification_slot) as min_slot, - ARRAY_AGG(errors) as all_errors - FROM banking_stage_results.transaction_infos - WHERE true - GROUP BY signature - ORDER BY min(utc_timestamp) DESC - LIMIT 50 - ) SELECT signature, - tx_aggregated.all_errors, - is_executed, - is_confirmed, - first_notification_slot, + ( + SELECT ARRAY_AGG(json_object('error' VALUE err.error,'count':count)::text) + FROM banking_stage_results_2.transaction_slot txslot + INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error + WHERE txslot.transaction_id=txi.transaction_id + ) AS all_errors, + --is_executed, + --is_confirmed, + processed_slot, + --first_notification_slot, cu_requested, prioritization_fees, - utc_timestamp, - -- e.g. "OCT 17 12:29:17.5127" - to_char(utc_timestamp, 'MON DD HH24:MI:SS.MS') as timestamp_formatted - FROM banking_stage_results.transaction_infos txi - INNER JOIN tx_aggregated ON tx_aggregated.sig=txi.signature AND tx_aggregated.min_slot=txi.first_notification_slot + ( + SELECT min(utc_timestamp) + FROM banking_stage_results_2.transaction_slot txslot + WHERE txslot.transaction_id=txi.transaction_id + ) AS utc_timestamp + FROM banking_stage_results_2.transaction_infos txi + INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id + WHERE signature='5sCSTNuqvnFdgvryusZQPyTyz85JUYC37iL3cb88X6vTrwEPQJm9D1TsJetcgAyZEgFWrpza77Uvji3CrupGw1SU' + ORDER BY processed_slot DESC + LIMIT 50 """) # print some samples @@ -39,6 +39,7 @@ def run_query(): for index, row in enumerate(maprows): row['pos'] = index + 1 map_jsons_in_row(row) + map_timestamp(row) return maprows @@ -63,9 +64,7 @@ def find_transaction_by_sig(tx_sig: str): first_notification_slot, cu_requested, prioritization_fees, - utc_timestamp, - -- e.g. "OCT 17 12:29:17.5127" - to_char(utc_timestamp, 'MON DD HH24:MI:SS.MS') as timestamp_formatted + utc_timestamp FROM banking_stage_results.transaction_infos txi INNER JOIN tx_aggregated ON tx_aggregated.sig=txi.signature AND tx_aggregated.min_slot=txi.first_notification_slot """, args=[tx_sig]) @@ -74,15 +73,25 @@ def find_transaction_by_sig(tx_sig: str): for row in maprows: map_jsons_in_row(row) + map_timestamp(row) + return maprows +# TODO format to MON DD HH24:MI:SS.MS +def map_timestamp(row): + row['timestamp_formatted'] = row['utc_timestamp'] + return row + + def map_jsons_in_row(row): errors = [] - # flatmap postgres array of json strings which contain array (of errors, usually one) + if row["all_errors"] is None: + row["all_errors"] = [] + return for errors_json in row["all_errors"]: - for error in json.loads(errors_json): - errors.append(error) + # {"{\"error\" : \"TransactionError::AccountInUse\", \"count\" : 1}"} + errors.append(json.loads(errors_json)) row["errors_array"] = errors def main(): From cd6c91e1f4836f4c065e14dbaa5fbdce679b48ec Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Sun, 10 Dec 2023 23:17:16 +0100 Subject: [PATCH 03/20] workaround for sort --- templates/_txlist.html | 11 ++++++++-- transaction_database.py | 47 ++++++++++++++++++++++------------------- 2 files changed, 34 insertions(+), 24 deletions(-) diff --git a/templates/_txlist.html b/templates/_txlist.html index 03fd87f..dd2fe6e 100644 --- a/templates/_txlist.html +++ b/templates/_txlist.html @@ -15,9 +15,9 @@ {{ tx.pos }} - {% if tx.is_executed %} + {% if tx.is_successful %} Included - {% elif not tx.is_executed %} + {% else %} Excluded {% endif %} @@ -46,6 +46,13 @@ {% endfor %} + {% if not tx.errors_array %} + + + -- + + + {% endif %} diff --git a/transaction_database.py b/transaction_database.py index 6718f37..1a2aa16 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -6,28 +6,31 @@ def run_query(): maprows = postgres_connection.query( """ SELECT - signature, - ( - SELECT ARRAY_AGG(json_object('error' VALUE err.error,'count':count)::text) - FROM banking_stage_results_2.transaction_slot txslot - INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error - WHERE txslot.transaction_id=txi.transaction_id - ) AS all_errors, - --is_executed, - --is_confirmed, - processed_slot, - --first_notification_slot, - cu_requested, - prioritization_fees, - ( - SELECT min(utc_timestamp) - FROM banking_stage_results_2.transaction_slot txslot - WHERE txslot.transaction_id=txi.transaction_id - ) AS utc_timestamp - FROM banking_stage_results_2.transaction_infos txi - INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id - WHERE signature='5sCSTNuqvnFdgvryusZQPyTyz85JUYC37iL3cb88X6vTrwEPQJm9D1TsJetcgAyZEgFWrpza77Uvji3CrupGw1SU' - ORDER BY processed_slot DESC + * + FROM ( + SELECT + signature, + ( + SELECT ARRAY_AGG(json_object('error' VALUE err.error,'count':count)::text) + FROM banking_stage_results_2.transaction_slot txslot + INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error + WHERE txslot.transaction_id=txi.transaction_id + ) AS all_errors, + is_successful, + processed_slot, + --first_notification_slot, + cu_requested, + prioritization_fees, + ( + SELECT min(utc_timestamp) + FROM banking_stage_results_2.transaction_slot txslot + WHERE txslot.transaction_id=txi.transaction_id + ) AS utc_timestamp + FROM banking_stage_results_2.transaction_infos txi + INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id + WHERE true + ) as data + ORDER BY processed_slot, utc_timestamp, signature DESC LIMIT 50 """) From 55eea4488b494dac79959306d47194eaa92da7d9 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 07:04:38 +0100 Subject: [PATCH 04/20] details query-wip --- transaction_details_database.py | 119 ++++---------------------------- 1 file changed, 15 insertions(+), 104 deletions(-) diff --git a/transaction_details_database.py b/transaction_details_database.py index 4bbe177..73350cc 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -8,118 +8,29 @@ def find_transaction_details_by_sig(tx_sig: str): # transaction table primary key is used maprows = postgres_connection.query( """ - WITH tx_aggregated AS ( - SELECT - signature as sig, - min(first_notification_slot) as min_slot, - ARRAY_AGG(errors) as all_errors - FROM banking_stage_results.transaction_infos - WHERE signature = %s - GROUP BY signature - ) SELECT signature, - tx_aggregated.all_errors, - is_executed, - is_confirmed, - first_notification_slot, + '{}'::text[] all_errors,-- FIXME + is_successful, + processed_slot, + --first_notification_slot, cu_requested, prioritization_fees, - processed_slot, - to_char(utc_timestamp, 'MON DD HH24:MI:SS.MS') as timestamp_formatted, - accounts_used - FROM banking_stage_results.transaction_infos txi - INNER JOIN tx_aggregated ON tx_aggregated.sig=txi.signature AND tx_aggregated.min_slot=txi.first_notification_slot + ( + SELECT min(utc_timestamp) + FROM banking_stage_results_2.transaction_slot txslot + WHERE txslot.transaction_id=txi.transaction_id + ) AS utc_timestamp + FROM banking_stage_results_2.transaction_infos txi + INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id + WHERE signature=%s """, args=[tx_sig]) assert len(maprows) <= 1, "Tx Sig is primary key - find zero or one" - # FIXME: maprows - for row in maprows: - transaction_database.map_jsons_in_row(row) - accounts = json.loads(row['accounts_used']) - row['writelocked_accounts'] = list(filter(lambda acc : acc['writable'], accounts)) - row['readlocked_accounts'] = list(filter(lambda acc : not acc['writable'], accounts)) - relevent_slots_dict = {row['first_notification_slot']} - for error in row['errors_array']: - relevent_slots_dict.add(error['slot']) - relevant_slots = list(relevent_slots_dict) - row['relevant_slots'] = relevant_slots - - blockrows = postgres_connection.query( - """ - SELECT * FROM ( - SELECT - slot, - heavily_writelocked_accounts, - heavily_readlocked_accounts - FROM banking_stage_results.blocks - -- see pg8000 docs for unnest hack - WHERE slot IN (SELECT unnest(CAST(%s as bigint[]))) - ) AS data - """, args=[relevant_slots]) - - wai = [] - rai = [] - for block_data in blockrows: - hwl = json.loads(block_data['heavily_writelocked_accounts']) - hrl = json.loads(block_data['heavily_readlocked_accounts']) - for writed in row['writelocked_accounts']: - info = {'slot' : block_data['slot'], 'key' : writed['key'] } - acc = list(filter(lambda acc_: acc_['key'] == writed['key'], hwl)) - if len(acc) > 1: - print("WARNING: multiple accounts with same key in same block") - if len(acc) > 0: - acc = defaultdict(lambda: 0, acc[0]) - info['cu_requested'] = acc['cu_requested'] - info['cu_consumed'] = acc['cu_consumed'] - info['min_pf'] = acc['min_pf'] - info['median_pf'] = acc['median_pf'] - info['max_pf'] = acc['max_pf'] - else: - info['cu_requested'] = 0 - info['cu_consumed'] = 0 - info['min_pf'] = 0 - info['median_pf'] = 0 - info['max_pf'] = 0 - wai.append(info) - - for readed in row['readlocked_accounts']: - info = {'slot' : block_data['slot'], 'key' : readed['key'] } - acc = list(filter(lambda x: x['key'] == readed['key'],hrl)) - if len(acc) > 1: - print("WARNING: multiple accounts with same key in same block") - if len(acc) > 0: - acc = defaultdict(lambda: 0, acc[0]) - info['cu_requested'] = acc['cu_requested'] - info['cu_consumed'] = acc['cu_consumed'] - info['min_pf'] = acc['min_pf'] - info['median_pf'] = acc['median_pf'] - info['max_pf'] = acc['max_pf'] - else: - info['cu_requested'] = 0 - info['cu_consumed'] = 0 - info['min_pf'] = 0 - info['median_pf'] = 0 - info['max_pf'] = 0 - rai.append(info) - row['write_lock_info'] = invert_by_slot(wai) - row['read_lock_info'] = invert_by_slot(rai) - - print("- transaction details for sig: " + tx_sig) - print("- relevant slots: " + str(relevant_slots)) - for relevant_slot in relevant_slots: - print("- slot: ", relevant_slot) - print(" - errors in slot:") - for error in row['errors_array']: - if error['slot'] == relevant_slot: - print(" - " + error['error']) - print(" - write-locked accounts: ") - for account in row['write_lock_info'][relevant_slot]: - print(" - " + account['key']) - print(" - read-locked accounts:") - for account in row['read_lock_info'][relevant_slot]: - print(" - " + account['key']) + # TODO this is only one row + if maprows: + row = maprows[0] return maprows From 34d93c811c0a76343ab64d52d76a1c5d3d58dac9 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 08:07:07 +0100 Subject: [PATCH 05/20] read strcuture --- transaction_details_database.py | 45 +++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/transaction_details_database.py b/transaction_details_database.py index 73350cc..50f53b3 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -9,6 +9,7 @@ def find_transaction_details_by_sig(tx_sig: str): maprows = postgres_connection.query( """ SELECT + txi.transaction_id, signature, '{}'::text[] all_errors,-- FIXME is_successful, @@ -32,6 +33,50 @@ def find_transaction_details_by_sig(tx_sig: str): if maprows: row = maprows[0] + # {'transaction_id': 1039639, 'slot': 234765028, 'error': 34, 'count': 1, 'utc_timestamp': datetime.datetime(2023, 12, 8, 18, 29, 23, 861619)} + tx_slots = postgres_connection.query( + """ + SELECT + txslot.slot, + txslot.error, + err.error + FROM banking_stage_results_2.transaction_slot txslot + INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error + WHERE transaction_id=%s + """, args=[row["transaction_id"]]) + # ordered by slots ascending + relevant_slots = set([txslot["slot"] for txslot in tx_slots]) + + accountinfos_per_slot =( + invert_by_slot( + postgres_connection.query( + """ + SELECT + amb.*, + acc.account_key + FROM banking_stage_results_2.accounts_map_blocks amb + INNER JOIN banking_stage_results_2.accounts acc ON acc.acc_id=amb.acc_id + WHERE slot IN (SELECT unnest(CAST(%s as bigint[]))) + """, args=[relevant_slots])) + ) + + print("- transaction details for sig: " + tx_sig) + print("- relevant slots: " + str(relevant_slots)) + for relevant_slot in relevant_slots: + accountinfos = accountinfos_per_slot.get(relevant_slot, []) + print(" - slot: ", relevant_slot) + print(" - errors in slot:") + for tx_slots_row in tx_slots: + if tx_slots_row['slot'] == relevant_slot: + print(" - " + tx_slots_row['error']) + print(" - write-locked accounts: ") + for account in accountinfos: + if account['is_write_locked']: + print(" - ", account['account_key']) + print(" - read-locked accounts:") + for account in accountinfos: + if not account['is_write_locked']: + print(" - ", account['account_key']) return maprows From c66e4677d62f93157795212e5d28621de92a32e4 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 08:17:21 +0100 Subject: [PATCH 06/20] map json --- transaction_details_database.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/transaction_details_database.py b/transaction_details_database.py index 50f53b3..fdee6d0 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -62,6 +62,8 @@ def find_transaction_details_by_sig(tx_sig: str): print("- transaction details for sig: " + tx_sig) print("- relevant slots: " + str(relevant_slots)) + heavy_write_accounts = [] + heavy_read_accounts = [] for relevant_slot in relevant_slots: accountinfos = accountinfos_per_slot.get(relevant_slot, []) print(" - slot: ", relevant_slot) @@ -70,13 +72,25 @@ def find_transaction_details_by_sig(tx_sig: str): if tx_slots_row['slot'] == relevant_slot: print(" - " + tx_slots_row['error']) print(" - write-locked accounts: ") - for account in accountinfos: - if account['is_write_locked']: - print(" - ", account['account_key']) + for writed in accountinfos: + if writed['is_write_locked']: + prio_fee_data = json.loads(writed['prioritization_fees_info']) + info = { + 'slot': writed['slot'], + 'key': writed['account_key'], + 'cu_requested': writed['total_cu_requested'], + 'cu_consumed': writed['total_cu_consumed'], + 'min_pf': prio_fee_data['min'], + 'median_pf': prio_fee_data['med'], + 'max_pf': prio_fee_data['max'] + } + heavy_write_accounts.append(info) print(" - read-locked accounts:") - for account in accountinfos: - if not account['is_write_locked']: - print(" - ", account['account_key']) + for readed in accountinfos: + if not readed['is_write_locked']: + print(" - ", readed['account_key']) + row["write_lock_info"] = heavy_write_accounts + row["read_lock_info"] = heavy_read_accounts return maprows From 3bfea1ab215799389c888e397621d25052a2f784 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 08:35:02 +0100 Subject: [PATCH 07/20] tx details - some are missing --- templates/transaction_details.html | 2 +- transaction_details_database.py | 65 +++++++++++++++++------------- 2 files changed, 37 insertions(+), 30 deletions(-) diff --git a/templates/transaction_details.html b/templates/transaction_details.html index 1b45c96..f36bdfd 100644 --- a/templates/transaction_details.html +++ b/templates/transaction_details.html @@ -55,7 +55,7 @@ First notification slot - {{ transaction.first_notification_slot | slotnumber }} + ?????? TODO CU Requested diff --git a/transaction_details_database.py b/transaction_details_database.py index fdee6d0..eada028 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -47,6 +47,8 @@ def find_transaction_details_by_sig(tx_sig: str): # ordered by slots ascending relevant_slots = set([txslot["slot"] for txslot in tx_slots]) + row["relevant_slots"] = relevant_slots + accountinfos_per_slot =( invert_by_slot( postgres_connection.query( @@ -60,37 +62,42 @@ def find_transaction_details_by_sig(tx_sig: str): """, args=[relevant_slots])) ) - print("- transaction details for sig: " + tx_sig) - print("- relevant slots: " + str(relevant_slots)) - heavy_write_accounts = [] - heavy_read_accounts = [] + # print("- transaction details for sig: " + tx_sig) + # print("- relevant slots: " + str(relevant_slots)) + + write_lock_info = dict() + read_lock_info = dict() for relevant_slot in relevant_slots: accountinfos = accountinfos_per_slot.get(relevant_slot, []) - print(" - slot: ", relevant_slot) - print(" - errors in slot:") - for tx_slots_row in tx_slots: - if tx_slots_row['slot'] == relevant_slot: - print(" - " + tx_slots_row['error']) - print(" - write-locked accounts: ") - for writed in accountinfos: - if writed['is_write_locked']: - prio_fee_data = json.loads(writed['prioritization_fees_info']) - info = { - 'slot': writed['slot'], - 'key': writed['account_key'], - 'cu_requested': writed['total_cu_requested'], - 'cu_consumed': writed['total_cu_consumed'], - 'min_pf': prio_fee_data['min'], - 'median_pf': prio_fee_data['med'], - 'max_pf': prio_fee_data['max'] - } - heavy_write_accounts.append(info) - print(" - read-locked accounts:") - for readed in accountinfos: - if not readed['is_write_locked']: - print(" - ", readed['account_key']) - row["write_lock_info"] = heavy_write_accounts - row["read_lock_info"] = heavy_read_accounts + # print(" - slot: ", relevant_slot) + # print(" - errors in slot:") + # for tx_slots_row in tx_slots: + # if tx_slots_row['slot'] == relevant_slot: + # print(" - " + tx_slots_row['error']) + + account_info_expanded = [] + for account_info in accountinfos: + prio_fee_data = json.loads(account_info['prioritization_fees_info']) + info = { + 'slot': account_info['slot'], + 'key': account_info['account_key'], + 'is_write_locked': account_info['is_write_locked'], + 'cu_requested': account_info['total_cu_requested'], + 'cu_consumed': account_info['total_cu_consumed'], + 'min_pf': prio_fee_data['min'], + 'median_pf': prio_fee_data['med'], + 'max_pf': prio_fee_data['max'] + } + account_info_expanded.append(info) + write_lock_info[relevant_slot] = [acc for acc in account_info_expanded if acc['is_write_locked'] is True] + read_lock_info[relevant_slot] = [acc for acc in account_info_expanded if acc['is_write_locked'] is False] + # for wli in write_lock_info[relevant_slot]: + # print("- write lock info: " + wli["key"]) + # for rli in read_lock_info[relevant_slot]: + # print("- read lock info: " + rli["key"]) + + row["write_lock_info"] = write_lock_info + row["read_lock_info"] = read_lock_info return maprows From ec982d44e4cd093a9e9ebadaa37321c8899c3e25 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 09:56:29 +0100 Subject: [PATCH 08/20] recent block list note: where are the banking_stage --- recent_blocks_database.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/recent_blocks_database.py b/recent_blocks_database.py index eb436d5..08e008c 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -60,12 +60,11 @@ def run_query(to_slot=None): slot, processed_transactions, successful_transactions, - banking_stage_errors, + 999 as banking_stage_errors, total_cu_used, total_cu_requested, supp_infos - FROM banking_stage_results.blocks - -- this critera uses index idx_blocks_slot_errors + FROM banking_stage_results_2.blocks WHERE -- short circuit if true (%s or slot <= %s) From 313c782bd932effd66c42f8130a6b93b5e8118b3 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:20:09 +0100 Subject: [PATCH 09/20] add table alias convention to readme --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index 390a382..d6f2a99 100644 --- a/README.md +++ b/README.md @@ -64,3 +64,15 @@ Open Firefox Browser and navigate to ... | Hard Limit of HTTP Requests | fly.io | hard_limit | fly.toml | | Python HTTP Server | gunicorn | --workers, --threads | Dockerfile | +### Data Model +Conventions: + +| Table Name | Alias | +|------------------|---------| +| transaction_slot | tx_slot | +| accounts_map_blocks | amb | +| transaction_infos | txi | +| blocks | blocks | +| accounts_map_transaction | amt | +| transaction_slot | tx_slot | +| | | \ No newline at end of file From 750f4362ef11bbe70a02e10a613e3aa082e69770 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:20:23 +0100 Subject: [PATCH 10/20] add banking errors count --- block_details_database.py | 65 ++++++++++++++++++++++++++------- recent_blocks_database.py | 12 +++--- transaction_details_database.py | 1 + 3 files changed, 58 insertions(+), 20 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index d26aae2..9128b59 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -3,6 +3,7 @@ import json def find_block_by_slotnumber(slot_number: int): + # TODO where to get banking_stage_errors from? maprows = postgres_connection.query( """ SELECT * FROM ( @@ -12,33 +13,69 @@ def find_block_by_slotnumber(slot_number: int): leader_identity, processed_transactions, successful_transactions, - banking_stage_errors, + ( + SELECT + count(tx_slot.error) + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.slot=blocks.slot + ) AS banking_stage_errors, total_cu_used, total_cu_requested, - heavily_writelocked_accounts, - heavily_readlocked_accounts, supp_infos - FROM banking_stage_results.blocks + FROM banking_stage_results_2.blocks -- this critera uses index idx_blocks_slot WHERE slot = %s ) AS data """, args=[slot_number]) assert len(maprows) <= 1, "Slot is primary key - find zero or one" + print("maprows", maprows) for row in maprows: - # format see BankingStageErrorsTrackingSidecar -> block_info.rs - # parse (k:GubTBrbgk9JwkwX1FkXvsrF1UC2AP7iTgg8SGtgH14QE, cu_req:600000, cu_con:2243126) + slot = row["slot"] - parsed_accounts = json.loads(row["heavily_writelocked_accounts"]) - row['supp_infos'] =json.loads(row['supp_infos']) - parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) - row["heavily_writelocked_accounts_parsed"] = parsed_accounts - # TODO need new parser + row['supp_infos'] = json.loads(row['supp_infos']) - parsed_accounts = json.loads(row["heavily_readlocked_accounts"]) - parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) - row["heavily_readlocked_accounts_parsed"] = parsed_accounts + # note: sort order is undefined + accountinfos =( + postgres_connection.query( + """ + SELECT + amb.*, + acc.account_key + FROM banking_stage_results_2.accounts_map_blocks amb + INNER JOIN banking_stage_results_2.accounts acc ON acc.acc_id=amb.acc_id + WHERE slot = %s + """, args=[slot]) + ) + # TODO sort by cu_consumed ref + # parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) + account_info_expanded = [] + for account_info in accountinfos: + prio_fee_data = json.loads(account_info['prioritization_fees_info']) + info = { + 'slot': account_info['slot'], + 'key': account_info['account_key'], + 'is_write_locked': account_info['is_write_locked'], + 'cu_requested': account_info['total_cu_requested'], + 'cu_consumed': account_info['total_cu_consumed'], + 'min_pf': prio_fee_data['min'], + 'median_pf': prio_fee_data['med'], + 'max_pf': prio_fee_data['max'] + } + account_info_expanded.append(info) + + row["heavily_writelocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is True] + row["heavily_readlocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is False] + + # parsed_accounts = json.loads(row["heavily_writelocked_accounts"]) + # row['supp_infos'] = json.loads(row['supp_infos']) + # row["heavily_writelocked_accounts_parsed"] = parsed_accounts + # # TODO need new parser + # + # parsed_accounts = json.loads(row["heavily_readlocked_accounts"]) + # parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) + # row["heavily_readlocked_accounts_parsed"] = parsed_accounts return maprows diff --git a/recent_blocks_database.py b/recent_blocks_database.py index 08e008c..1419d7c 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -60,7 +60,12 @@ def run_query(to_slot=None): slot, processed_transactions, successful_transactions, - 999 as banking_stage_errors, + ( + SELECT + count(tx_slot.error) + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.slot=blocks.slot + ) AS banking_stage_errors, total_cu_used, total_cu_requested, supp_infos @@ -74,11 +79,6 @@ def run_query(to_slot=None): """, [to_slot is None, to_slot]) - # print some samples - # for row in maprows[:3]: - # print(row) - # print("...") - for row in maprows: fixup_row(row) calc_bars(row) diff --git a/transaction_details_database.py b/transaction_details_database.py index eada028..803ef66 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -49,6 +49,7 @@ def find_transaction_details_by_sig(tx_sig: str): row["relevant_slots"] = relevant_slots + # note: sort order is undefined accountinfos_per_slot =( invert_by_slot( postgres_connection.query( From 438e781b97e2d484aab2211845bf9e5d1c65698f Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:22:21 +0100 Subject: [PATCH 11/20] order by cu_consumed --- block_details_database.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index 9128b59..5b624dd 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -3,7 +3,6 @@ import json def find_block_by_slotnumber(slot_number: int): - # TODO where to get banking_stage_errors from? maprows = postgres_connection.query( """ SELECT * FROM ( @@ -48,8 +47,6 @@ def find_block_by_slotnumber(slot_number: int): WHERE slot = %s """, args=[slot]) ) - # TODO sort by cu_consumed ref - # parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) account_info_expanded = [] for account_info in accountinfos: prio_fee_data = json.loads(account_info['prioritization_fees_info']) @@ -64,6 +61,7 @@ def find_block_by_slotnumber(slot_number: int): 'max_pf': prio_fee_data['max'] } account_info_expanded.append(info) + account_info_expanded.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) row["heavily_writelocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is True] row["heavily_readlocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is False] From 73e2eccee886a785ff80ded3b1afbe592d05380f Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:32:26 +0100 Subject: [PATCH 12/20] fail hard on query errors --- postgres_connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/postgres_connection.py b/postgres_connection.py index 19e5545..4cb9c23 100644 --- a/postgres_connection.py +++ b/postgres_connection.py @@ -62,8 +62,8 @@ def query(statement, args=[]): keys = [k[0] for k in cursor.description] maprows = [dict(zip(keys, row)) for row in cursor] except Exception as ex: - print("Exception executing query:", ex) - return [] + print("Exception executing statement:", ex, statement) + raise ex if elapsed_total > .2: print("Database Query took", elapsed_total, "secs", "(", elapsed_connect, ")") From ff3f82fa3939d0e02b91d2322ae347c05e529d99 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:36:52 +0100 Subject: [PATCH 13/20] calc first notification slot --- block_details_database.py | 9 -------- recent_blocks_database.py | 1 - templates/transaction_details.html | 2 +- transaction_database.py | 16 ++++++++----- transaction_details_database.py | 36 +++++++++++++++--------------- 5 files changed, 29 insertions(+), 35 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index 5b624dd..e843ba1 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -66,15 +66,6 @@ def find_block_by_slotnumber(slot_number: int): row["heavily_writelocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is True] row["heavily_readlocked_accounts_parsed"] = [acc for acc in account_info_expanded if acc['is_write_locked'] is False] - # parsed_accounts = json.loads(row["heavily_writelocked_accounts"]) - # row['supp_infos'] = json.loads(row['supp_infos']) - # row["heavily_writelocked_accounts_parsed"] = parsed_accounts - # # TODO need new parser - # - # parsed_accounts = json.loads(row["heavily_readlocked_accounts"]) - # parsed_accounts.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) - # row["heavily_readlocked_accounts_parsed"] = parsed_accounts - return maprows diff --git a/recent_blocks_database.py b/recent_blocks_database.py index 1419d7c..e06c6a3 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -112,7 +112,6 @@ def find_block_by_slotnumber(slot_number: int): calc_bars(row) calc_figures(row) - return maprows diff --git a/templates/transaction_details.html b/templates/transaction_details.html index f36bdfd..1b45c96 100644 --- a/templates/transaction_details.html +++ b/templates/transaction_details.html @@ -55,7 +55,7 @@ First notification slot - ?????? TODO + {{ transaction.first_notification_slot | slotnumber }} CU Requested diff --git a/transaction_database.py b/transaction_database.py index 1a2aa16..07e86dd 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -12,19 +12,23 @@ def run_query(): signature, ( SELECT ARRAY_AGG(json_object('error' VALUE err.error,'count':count)::text) - FROM banking_stage_results_2.transaction_slot txslot - INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error - WHERE txslot.transaction_id=txi.transaction_id + FROM banking_stage_results_2.transaction_slot tx_slot + INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error + WHERE tx_slot.transaction_id=txi.transaction_id ) AS all_errors, is_successful, processed_slot, - --first_notification_slot, + ( + SELECT min(slot) + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.transaction_id=txi.transaction_id + ) AS first_notification_slot, cu_requested, prioritization_fees, ( SELECT min(utc_timestamp) - FROM banking_stage_results_2.transaction_slot txslot - WHERE txslot.transaction_id=txi.transaction_id + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.transaction_id=txi.transaction_id ) AS utc_timestamp FROM banking_stage_results_2.transaction_infos txi INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id diff --git a/transaction_details_database.py b/transaction_details_database.py index 803ef66..52c67b6 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -4,6 +4,10 @@ import transaction_database from collections import defaultdict +def format_timestamps(row): + return print(row["utc_timestamp"]) + + def find_transaction_details_by_sig(tx_sig: str): # transaction table primary key is used maprows = postgres_connection.query( @@ -11,16 +15,19 @@ def find_transaction_details_by_sig(tx_sig: str): SELECT txi.transaction_id, signature, - '{}'::text[] all_errors,-- FIXME is_successful, processed_slot, - --first_notification_slot, + ( + SELECT min(slot) + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.transaction_id=txi.transaction_id + ) AS first_notification_slot, cu_requested, prioritization_fees, ( SELECT min(utc_timestamp) - FROM banking_stage_results_2.transaction_slot txslot - WHERE txslot.transaction_id=txi.transaction_id + FROM banking_stage_results_2.transaction_slot tx_slot + WHERE tx_slot.transaction_id=txi.transaction_id ) AS utc_timestamp FROM banking_stage_results_2.transaction_infos txi INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id @@ -29,19 +36,20 @@ def find_transaction_details_by_sig(tx_sig: str): assert len(maprows) <= 1, "Tx Sig is primary key - find zero or one" - # TODO this is only one row if maprows: row = maprows[0] + + format_timestamps(row) # {'transaction_id': 1039639, 'slot': 234765028, 'error': 34, 'count': 1, 'utc_timestamp': datetime.datetime(2023, 12, 8, 18, 29, 23, 861619)} tx_slots = postgres_connection.query( """ SELECT - txslot.slot, - txslot.error, + tx_slot.slot, + tx_slot.error, err.error - FROM banking_stage_results_2.transaction_slot txslot - INNER JOIN banking_stage_results_2.errors err ON err.error_code=txslot.error + FROM banking_stage_results_2.transaction_slot tx_slot + INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error WHERE transaction_id=%s """, args=[row["transaction_id"]]) # ordered by slots ascending @@ -70,11 +78,6 @@ def find_transaction_details_by_sig(tx_sig: str): read_lock_info = dict() for relevant_slot in relevant_slots: accountinfos = accountinfos_per_slot.get(relevant_slot, []) - # print(" - slot: ", relevant_slot) - # print(" - errors in slot:") - # for tx_slots_row in tx_slots: - # if tx_slots_row['slot'] == relevant_slot: - # print(" - " + tx_slots_row['error']) account_info_expanded = [] for account_info in accountinfos: @@ -90,12 +93,9 @@ def find_transaction_details_by_sig(tx_sig: str): 'max_pf': prio_fee_data['max'] } account_info_expanded.append(info) + account_info_expanded.sort(key=lambda acc: int(acc['cu_consumed']), reverse=True) write_lock_info[relevant_slot] = [acc for acc in account_info_expanded if acc['is_write_locked'] is True] read_lock_info[relevant_slot] = [acc for acc in account_info_expanded if acc['is_write_locked'] is False] - # for wli in write_lock_info[relevant_slot]: - # print("- write lock info: " + wli["key"]) - # for rli in read_lock_info[relevant_slot]: - # print("- read lock info: " + rli["key"]) row["write_lock_info"] = write_lock_info row["read_lock_info"] = read_lock_info From bd4992114397c196be1259db99bd85ac52a92a26 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:39:33 +0100 Subject: [PATCH 14/20] remove unused mappings --- block_details_database.py | 6 ------ recent_blocks_database.py | 9 +-------- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index e843ba1..d72ab1d 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -69,12 +69,6 @@ def find_block_by_slotnumber(slot_number: int): return maprows - -# parse (k:GubTBrbgk9JwkwX1FkXvsrF1UC2AP7iTgg8SGtgH14QE, cu_req:600000, cu_con:2243126) -# def parse_accounts(acc): -# groups = re.match(r"\((k:)(?P[a-zA-Z0-9]+)(, cu_req:)(?P[0-9]+)(, cu_con:)(?P[0-9]+)\)", acc) -# return (groups.group('k'), groups.group('cu_req'), groups.group('cu_con')) - def main(): find_block_by_slotnumber(226352855) diff --git a/recent_blocks_database.py b/recent_blocks_database.py index e06c6a3..1d05103 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -80,9 +80,9 @@ def run_query(to_slot=None): [to_slot is None, to_slot]) for row in maprows: - fixup_row(row) calc_bars(row) calc_figures(row) + row["prioritization_fees"] = json.loads(row['supp_infos']) return maprows @@ -108,18 +108,12 @@ def find_block_by_slotnumber(slot_number: int): assert len(maprows) <= 1, "Slot is primary key - find zero or one" for row in maprows: - fixup_row(row) calc_bars(row) calc_figures(row) return maprows -def fixup_row(row): - row['banking_stage_errors'] = row['banking_stage_errors'] or 0 - row['prioritization_fees'] = json.loads(row['supp_infos']) - - def find_block_by_blockhash(block_hash: str): maprows = postgres_connection.query( """ @@ -141,7 +135,6 @@ def find_block_by_blockhash(block_hash: str): assert len(maprows) <= 1, "Block hash is unique - find zero or one" for row in maprows: - fixup_row(row) calc_bars(row) calc_figures(row) From 531bfcb4d4bf2585f6584d7080ba784845a754a6 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:54:23 +0100 Subject: [PATCH 15/20] format timestamp --- block_details_database.py | 3 +-- recent_blocks_database.py | 1 - templates/_txlist.html | 2 +- transaction_database.py | 22 ++++++++-------------- transaction_details_database.py | 16 ++++++++++------ 5 files changed, 20 insertions(+), 24 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index d72ab1d..4c7f053 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -28,7 +28,6 @@ def find_block_by_slotnumber(slot_number: int): """, args=[slot_number]) assert len(maprows) <= 1, "Slot is primary key - find zero or one" - print("maprows", maprows) for row in maprows: slot = row["slot"] @@ -36,7 +35,7 @@ def find_block_by_slotnumber(slot_number: int): row['supp_infos'] = json.loads(row['supp_infos']) # note: sort order is undefined - accountinfos =( + accountinfos = ( postgres_connection.query( """ SELECT diff --git a/recent_blocks_database.py b/recent_blocks_database.py index 1d05103..1cfe837 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -13,7 +13,6 @@ def format_width_percentage(x): def calc_figures(row): successful_transactions = row['successful_transactions'] processed_transactions = row['processed_transactions'] - banking_stage_errors = row['banking_stage_errors'] or 0 txerrors = processed_transactions - successful_transactions row['txerrors'] = txerrors diff --git a/templates/_txlist.html b/templates/_txlist.html index dd2fe6e..53ca85a 100644 --- a/templates/_txlist.html +++ b/templates/_txlist.html @@ -23,7 +23,7 @@
- {{ tx.timestamp_formatted }} + {{ tx.timestamp_formatted or '--' }}
diff --git a/transaction_database.py b/transaction_database.py index 07e86dd..1290cb8 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -5,9 +5,7 @@ import json def run_query(): maprows = postgres_connection.query( """ - SELECT - * - FROM ( + SELECT * FROM ( SELECT signature, ( @@ -38,15 +36,10 @@ def run_query(): LIMIT 50 """) - # print some samples - # for row in maprows[:3]: - # print(row) - # print("...") - for index, row in enumerate(maprows): row['pos'] = index + 1 map_jsons_in_row(row) - map_timestamp(row) + map_timestamps(row) return maprows @@ -80,15 +73,16 @@ def find_transaction_by_sig(tx_sig: str): for row in maprows: map_jsons_in_row(row) - map_timestamp(row) + map_timestamps(row) return maprows -# TODO format to MON DD HH24:MI:SS.MS -def map_timestamp(row): - row['timestamp_formatted'] = row['utc_timestamp'] - return row +def map_timestamps(row): + # type datetime.datetime + dt = row['utc_timestamp'] + if dt is not None: + row['timestamp_formatted'] = dt.strftime('%a %d %H:%M:%S.%f') def map_jsons_in_row(row): diff --git a/transaction_details_database.py b/transaction_details_database.py index 52c67b6..f4dfe68 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -4,10 +4,6 @@ import transaction_database from collections import defaultdict -def format_timestamps(row): - return print(row["utc_timestamp"]) - - def find_transaction_details_by_sig(tx_sig: str): # transaction table primary key is used maprows = postgres_connection.query( @@ -38,8 +34,8 @@ def find_transaction_details_by_sig(tx_sig: str): if maprows: row = maprows[0] - - format_timestamps(row) + + map_timestamps(row) # {'transaction_id': 1039639, 'slot': 234765028, 'error': 34, 'count': 1, 'utc_timestamp': datetime.datetime(2023, 12, 8, 18, 29, 23, 861619)} tx_slots = postgres_connection.query( @@ -110,3 +106,11 @@ def invert_by_slot(rows): for row in rows: inv_indx[row["slot"]].append(row) return inv_indx + +# TODO format to MON DD HH24:MI:SS.MS +def map_timestamps(row): + # type datetime.datetime + dt = row['utc_timestamp'] + if dt is not None: + row['timestamp_formatted'] = dt.strftime('%a %d %H:%M:%S.%f') + From 2d075af21b568cde54a3ac46d042eac41a130fb0 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 10:57:42 +0100 Subject: [PATCH 16/20] move timestamp formating to template filter --- app.py | 12 ++++++++++++ templates/_txlist.html | 2 +- templates/transaction_details.html | 2 +- transaction_database.py | 9 --------- transaction_details_database.py | 8 -------- 5 files changed, 14 insertions(+), 19 deletions(-) diff --git a/app.py b/app.py index d8ad71c..a1ad169 100644 --- a/app.py +++ b/app.py @@ -8,6 +8,7 @@ import recent_blocks_database import block_details_database import config import locale +from datetime import datetime # # MAIN @@ -204,3 +205,14 @@ def mapcount_filter(number: int): print("FIELD_ERROR in template filter") return "FIELD_ERROR" + +@webapp.template_filter('timestamp') +def timestamp_filter(dt: datetime): + if dt is None: + return None + else: + try: + return dt.strftime('%a %d %H:%M:%S.%f') + except TypeError: + print("FIELD_ERROR in template filter") + return "FIELD_ERROR" diff --git a/templates/_txlist.html b/templates/_txlist.html index 53ca85a..7ab5bd0 100644 --- a/templates/_txlist.html +++ b/templates/_txlist.html @@ -23,7 +23,7 @@
- {{ tx.timestamp_formatted or '--' }} + {{ tx.utc_timestamp or '--' }}
diff --git a/templates/transaction_details.html b/templates/transaction_details.html index 1b45c96..a87ed5c 100644 --- a/templates/transaction_details.html +++ b/templates/transaction_details.html @@ -67,7 +67,7 @@ UTC timestamp - {{ transaction.timestamp_formatted }} + {{ transaction.utc_timestamp | timestamp }} diff --git a/transaction_database.py b/transaction_database.py index 1290cb8..02e5f30 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -39,7 +39,6 @@ def run_query(): for index, row in enumerate(maprows): row['pos'] = index + 1 map_jsons_in_row(row) - map_timestamps(row) return maprows @@ -73,18 +72,10 @@ def find_transaction_by_sig(tx_sig: str): for row in maprows: map_jsons_in_row(row) - map_timestamps(row) return maprows -def map_timestamps(row): - # type datetime.datetime - dt = row['utc_timestamp'] - if dt is not None: - row['timestamp_formatted'] = dt.strftime('%a %d %H:%M:%S.%f') - - def map_jsons_in_row(row): errors = [] if row["all_errors"] is None: diff --git a/transaction_details_database.py b/transaction_details_database.py index f4dfe68..e165798 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -35,8 +35,6 @@ def find_transaction_details_by_sig(tx_sig: str): if maprows: row = maprows[0] - map_timestamps(row) - # {'transaction_id': 1039639, 'slot': 234765028, 'error': 34, 'count': 1, 'utc_timestamp': datetime.datetime(2023, 12, 8, 18, 29, 23, 861619)} tx_slots = postgres_connection.query( """ @@ -107,10 +105,4 @@ def invert_by_slot(rows): inv_indx[row["slot"]].append(row) return inv_indx -# TODO format to MON DD HH24:MI:SS.MS -def map_timestamps(row): - # type datetime.datetime - dt = row['utc_timestamp'] - if dt is not None: - row['timestamp_formatted'] = dt.strftime('%a %d %H:%M:%S.%f') From 3bdfef900fb11b39ad9206ffc6f2905da3ab899f Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 11:15:43 +0100 Subject: [PATCH 17/20] fix query error on set --- transaction_details_database.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/transaction_details_database.py b/transaction_details_database.py index e165798..7e2e71c 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -47,12 +47,12 @@ def find_transaction_details_by_sig(tx_sig: str): WHERE transaction_id=%s """, args=[row["transaction_id"]]) # ordered by slots ascending - relevant_slots = set([txslot["slot"] for txslot in tx_slots]) + relevant_slots = [txslot["slot"] for txslot in tx_slots] row["relevant_slots"] = relevant_slots # note: sort order is undefined - accountinfos_per_slot =( + accountinfos_per_slot = ( invert_by_slot( postgres_connection.query( """ @@ -65,12 +65,9 @@ def find_transaction_details_by_sig(tx_sig: str): """, args=[relevant_slots])) ) - # print("- transaction details for sig: " + tx_sig) - # print("- relevant slots: " + str(relevant_slots)) - write_lock_info = dict() read_lock_info = dict() - for relevant_slot in relevant_slots: + for relevant_slot in set(relevant_slots): accountinfos = accountinfos_per_slot.get(relevant_slot, []) account_info_expanded = [] From fb0e11c80326939c1cc6810fb9f36d8dc6f31afb Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 21:33:57 +0100 Subject: [PATCH 18/20] consolidated queries --- recent_blocks_database.py | 59 ++++++++------------------------------- transaction_database.py | 37 ++++++------------------ 2 files changed, 20 insertions(+), 76 deletions(-) diff --git a/recent_blocks_database.py b/recent_blocks_database.py index 1cfe837..0078748 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -51,7 +51,7 @@ def calc_bars(row): row['hide_bar'] = True -def run_query(to_slot=None): +def run_query(to_slot=None, filter_slot=None, filter_blockhash=None): maprows = postgres_connection.query( """ SELECT * FROM ( @@ -69,14 +69,19 @@ def run_query(to_slot=None): total_cu_requested, supp_infos FROM banking_stage_results_2.blocks - WHERE - -- short circuit if true - (%s or slot <= %s) + WHERE true + AND (%s or slot <= %s) + AND (%s or slot = %s) + AND (%s or block_hash = %s) ORDER BY slot DESC LIMIT 100 ) AS data """, - [to_slot is None, to_slot]) + [ + to_slot is None, to_slot, + filter_slot is None, filter_slot, + filter_blockhash is None, filter_blockhash, + ]) for row in maprows: calc_bars(row) @@ -87,58 +92,18 @@ def run_query(to_slot=None): def find_block_by_slotnumber(slot_number: int): - maprows = postgres_connection.query( - """ - SELECT * FROM ( - SELECT - slot, - processed_transactions, - successful_transactions, - banking_stage_errors, - total_cu_used, - total_cu_requested, - supp_infos - FROM banking_stage_results.blocks - -- this critera uses index idx_blocks_slot - WHERE slot = %s - ) AS data - """, args=[slot_number]) + maprows = run_query(filter_slot=slot_number) assert len(maprows) <= 1, "Slot is primary key - find zero or one" - for row in maprows: - calc_bars(row) - calc_figures(row) - return maprows def find_block_by_blockhash(block_hash: str): - maprows = postgres_connection.query( - """ - SELECT * FROM ( - SELECT - slot, - processed_transactions, - successful_transactions, - banking_stage_errors, - total_cu_used, - total_cu_requested, - supp_infos - FROM banking_stage_results.blocks - -- uses index on primary key - WHERE block_hash = %s - ) AS data - """, args=[block_hash]) + maprows = run_query(filter_blockhash=block_hash) assert len(maprows) <= 1, "Block hash is unique - find zero or one" - for row in maprows: - calc_bars(row) - calc_figures(row) - - print("found ", maprows, block_hash) - return maprows diff --git a/transaction_database.py b/transaction_database.py index 02e5f30..16b2da4 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -1,8 +1,9 @@ import postgres_connection import json +TXLIST_ROW_LIMIT = 50 -def run_query(): +def run_query(filter_txsig=None): maprows = postgres_connection.query( """ SELECT * FROM ( @@ -31,10 +32,13 @@ def run_query(): FROM banking_stage_results_2.transaction_infos txi INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id WHERE true + AND (%s or signature = %s) ) as data ORDER BY processed_slot, utc_timestamp, signature DESC LIMIT 50 - """) + """, [ + filter_txsig is None, filter_txsig, + ]) for index, row in enumerate(maprows): row['pos'] = index + 1 @@ -44,34 +48,9 @@ def run_query(): def find_transaction_by_sig(tx_sig: str): - maprows = postgres_connection.query( - """ - WITH tx_aggregated AS ( - SELECT - signature as sig, - min(first_notification_slot) as min_slot, - ARRAY_AGG(errors) as all_errors - FROM banking_stage_results.transaction_infos - WHERE signature = %s - GROUP BY signature - ) - SELECT - signature, - tx_aggregated.all_errors, - is_executed, - is_confirmed, - first_notification_slot, - cu_requested, - prioritization_fees, - utc_timestamp - FROM banking_stage_results.transaction_infos txi - INNER JOIN tx_aggregated ON tx_aggregated.sig=txi.signature AND tx_aggregated.min_slot=txi.first_notification_slot - """, args=[tx_sig]) + maprows = run_query(filter_txsig=tx_sig) - assert len(maprows) <= 1, "Tx Sig is primary key - find zero or one" - - for row in maprows: - map_jsons_in_row(row) + assert len(maprows) <= 1, "Signature is primary key - find zero or one" return maprows From 42e7d5916ba09de1f4ed22e10cc9759d04c33f3c Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Mon, 11 Dec 2023 22:03:21 +0100 Subject: [PATCH 19/20] use error_text+error_code --- block_details_database.py | 2 +- recent_blocks_database.py | 2 +- transaction_database.py | 6 +++--- transaction_details_database.py | 5 ++--- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/block_details_database.py b/block_details_database.py index 4c7f053..025fcdf 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -14,7 +14,7 @@ def find_block_by_slotnumber(slot_number: int): successful_transactions, ( SELECT - count(tx_slot.error) + count(tx_slot.error_code) FROM banking_stage_results_2.transaction_slot tx_slot WHERE tx_slot.slot=blocks.slot ) AS banking_stage_errors, diff --git a/recent_blocks_database.py b/recent_blocks_database.py index 0078748..661564b 100644 --- a/recent_blocks_database.py +++ b/recent_blocks_database.py @@ -61,7 +61,7 @@ def run_query(to_slot=None, filter_slot=None, filter_blockhash=None): successful_transactions, ( SELECT - count(tx_slot.error) + count(tx_slot.error_code) FROM banking_stage_results_2.transaction_slot tx_slot WHERE tx_slot.slot=blocks.slot ) AS banking_stage_errors, diff --git a/transaction_database.py b/transaction_database.py index 16b2da4..ec73e5d 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -10,9 +10,9 @@ def run_query(filter_txsig=None): SELECT signature, ( - SELECT ARRAY_AGG(json_object('error' VALUE err.error,'count':count)::text) + SELECT ARRAY_AGG(json_build_object('error', err.error_text, 'count', count)::text) FROM banking_stage_results_2.transaction_slot tx_slot - INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error + INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error_code WHERE tx_slot.transaction_id=txi.transaction_id ) AS all_errors, is_successful, @@ -61,7 +61,7 @@ def map_jsons_in_row(row): row["all_errors"] = [] return for errors_json in row["all_errors"]: - # {"{\"error\" : \"TransactionError::AccountInUse\", \"count\" : 1}"} + # {"{\"error_text\" : \"TransactionError::AccountInUse\", \"count\" : 1}"} errors.append(json.loads(errors_json)) row["errors_array"] = errors diff --git a/transaction_details_database.py b/transaction_details_database.py index 7e2e71c..63a76b2 100644 --- a/transaction_details_database.py +++ b/transaction_details_database.py @@ -40,10 +40,9 @@ def find_transaction_details_by_sig(tx_sig: str): """ SELECT tx_slot.slot, - tx_slot.error, - err.error + err.error_text FROM banking_stage_results_2.transaction_slot tx_slot - INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error + INNER JOIN banking_stage_results_2.errors err ON err.error_code=tx_slot.error_code WHERE transaction_id=%s """, args=[row["transaction_id"]]) # ordered by slots ascending From 66b3c8c3e09ce137d22caf34eb8092717d3c7381 Mon Sep 17 00:00:00 2001 From: GroovieGermanikus Date: Tue, 12 Dec 2023 08:57:38 +0100 Subject: [PATCH 20/20] add search txs by address --- app.py | 32 +++++++++++++++++++++++++------- block_details_database.py | 10 ++++++++++ templates/_txlist.html | 5 +++++ transaction_database.py | 29 +++++++++++++++++++++++------ 4 files changed, 63 insertions(+), 13 deletions(-) diff --git a/app.py b/app.py index a1ad169..2853e94 100644 --- a/app.py +++ b/app.py @@ -47,7 +47,7 @@ def dashboard(): def tx_errors(): this_config = config.get_config() start = time.time() - maprows = list(transaction_database.run_query()) + maprows = list(transaction_database.run_query(transaction_row_limit=50)) elapsed = time.time() - start if elapsed > .5: print("transaction_database.RunQuery() took", elapsed, "seconds") @@ -90,16 +90,24 @@ def is_slot_number(raw_string): return re.fullmatch("[0-9,]+", raw_string) is not None -def is_block_hash(raw_string): - # regex is not perfect - feel free to improve +# used for blockhash AND account pubkey +def is_b58_44(raw_string): return re.fullmatch("[0-9a-zA-Z]{43,44}", raw_string) is not None def is_tx_sig(raw_string): # regex is not perfect - feel free to improve - if is_block_hash(raw_string): + if is_b58_44(raw_string): return False - return re.fullmatch("[0-9a-zA-Z]{64,100}", raw_string) is not None + return re.fullmatch("[0-9a-zA-Z]{86,88}", raw_string) is not None + + +# account address +# if NOT blockhash +def is_account_key(raw_string): + return re.fullmatch("[0-9a-zA-Z]{32,44}", raw_string) is not None + + @webapp.route('/search', methods=["GET", "POST"]) @@ -122,18 +130,28 @@ def search(): return render_template('_blockslist.html', config=this_config, blocks=maprows) else: return render_template('_search_noresult.html', config=this_config) - elif is_block_hash(search_string): + + is_blockhash = block_details_database.is_matching_blockhash(search_string) + + if is_blockhash: print("blockhash search=", search_string) maprows = list(recent_blocks_database.find_block_by_blockhash(search_string)) if len(maprows): return render_template('_blockslist.html', config=this_config, blocks=maprows) else: return render_template('_search_noresult.html', config=this_config) + elif not is_blockhash and is_b58_44(search_string): + print("account address search=", search_string) + (maprows, is_limit_exceeded) = list(transaction_database.query_transactions_by_address(search_string)) + if len(maprows): + return render_template('_txlist.html', config=this_config, transactions=maprows, limit_exceeded=is_limit_exceeded) + else: + return render_template('_search_noresult.html', config=this_config) elif is_tx_sig(search_string): print("txsig search=", search_string) maprows = list(transaction_database.find_transaction_by_sig(search_string)) if len(maprows): - return render_template('_txlist.html', config=this_config, transactions=maprows) + return render_template('_txlist.html', config=this_config, transactions=maprows, limit_exceeded=False) else: return render_template('_search_noresult.html', config=this_config) else: diff --git a/block_details_database.py b/block_details_database.py index 025fcdf..b759b66 100644 --- a/block_details_database.py +++ b/block_details_database.py @@ -68,6 +68,16 @@ def find_block_by_slotnumber(slot_number: int): return maprows +def is_matching_blockhash(block_hash): + maprows = postgres_connection.query( + """ + SELECT 1 FROM banking_stage_results_2.blocks + WHERE block_hash = %s + """, [block_hash]) + + return len(maprows) > 0 + + def main(): find_block_by_slotnumber(226352855) diff --git a/templates/_txlist.html b/templates/_txlist.html index 7ab5bd0..77507bf 100644 --- a/templates/_txlist.html +++ b/templates/_txlist.html @@ -11,6 +11,11 @@ + {% if limit_exceeded %} + + Result was truncated + + {% endif %} {% for tx in transactions %} {{ tx.pos }} diff --git a/transaction_database.py b/transaction_database.py index ec73e5d..3270420 100644 --- a/transaction_database.py +++ b/transaction_database.py @@ -1,9 +1,7 @@ import postgres_connection import json -TXLIST_ROW_LIMIT = 50 - -def run_query(filter_txsig=None): +def run_query(transaction_row_limit=None, filter_txsig=None, filter_account_address=None): maprows = postgres_connection.query( """ SELECT * FROM ( @@ -33,11 +31,19 @@ def run_query(filter_txsig=None): INNER JOIN banking_stage_results_2.transactions txs ON txs.transaction_id=txi.transaction_id WHERE true AND (%s or signature = %s) - ) as data + AND (%s or txi.transaction_id in ( + SELECT transaction_id + FROM banking_stage_results_2.accounts_map_transaction amt + INNER JOIN banking_stage_results_2.accounts acc ON acc.acc_id=amt.acc_id + WHERE account_key = %s + )) + ) AS data ORDER BY processed_slot, utc_timestamp, signature DESC - LIMIT 50 + LIMIT %s """, [ filter_txsig is None, filter_txsig, + filter_account_address is None, filter_account_address, + transaction_row_limit or 50, ]) for index, row in enumerate(maprows): @@ -48,13 +54,24 @@ def run_query(filter_txsig=None): def find_transaction_by_sig(tx_sig: str): - maprows = run_query(filter_txsig=tx_sig) + maprows = run_query(transaction_row_limit=10, filter_txsig=tx_sig) assert len(maprows) <= 1, "Signature is primary key - find zero or one" return maprows +# return (rows, is_limit_exceeded) +def query_transactions_by_address(account_key: str) -> (list, bool): + maprows = run_query(transaction_row_limit=501, filter_account_address=account_key) + + if len(maprows) == 501: + print("limit exceeded while searching for transactions by address") + return maprows, True + + return maprows, False + + def map_jsons_in_row(row): errors = [] if row["all_errors"] is None: