New python script to parse json mempool data

Fixes #38.
This commit is contained in:
Jochen Hoenicke 2018-10-19 00:38:51 +02:00
parent 548698d6d2
commit 8fd5666a45
5 changed files with 98 additions and 78 deletions

View File

@ -1,60 +0,0 @@
#!/usr/bin/perl
use List::Util qw[min max];
my $SQLITE="sqlite3";
my $MYSQL="mysql";
my $MEMPOOLLOG="mempool.log";
my $MYSQLMEMPOOLDB="btc_mempool";
my @feelimit=(0.0001,1,2,3,4,5,6,7,8,10,12,14,17,20,25,30,40,50,60,70,80,100,120,140,170,200,250,300,400,500,600,700,800,1000,1200,1400,1700,2000,2500,3000,4000,5000,6000,7000,8000,10000,2100000000000000);
my @total=();
my @count=();
my @fees=();
my $time = time();
for ($i = 0; $i< @feelimit - 1; $i++) {
$total[$i] = 0;
$count[$i] = 0;
$fees[$i] = 0;
}
my $found = 0;
while(<>) {
/"size": (\d+)/ and $size = $1;
/"ancestorsize": (\d+)/ and $asize = $1;
/"descendantsize": (\d+)/ and $dsize = $1;
/"fee": (\d*\.\d+)/ and $fee = int($1*1e8 + .5);
/"ancestorfees": (\d+)/ and $afees = $1;
/"descendantfees": (\d+)/ and $dfees = $1;
if (/},/) {
$afpb = $afees / $asize; # ancestor fee (includes current)
$fpb = $fee / $size; # current fee
$dfpb = $dfees / $dsize; # descendant fee (includes current)
# total average fee for mining all ancestors and descendants.
$tfpb = ($afees + $dfees - $fee) / ($asize + $dsize - $size);
# the "min"s ensure we take the fee level of ancestors in account
# if and only if they pay less (like a miner would do).
# the "max" ensures we take the descendants into account for CPFP.
$feeperbyte = max(min($dfpb, $tfpb), min($fpb, $afpb));
for ($i = 0; $i< @feelimit-1; $i++) {
if ($feeperbyte >= $feelimit[$i] && $feeperbyte < $feelimit[$i+1]) {
$total[$i] += $size;
$count[$i]++;
$fees[$i] += $fee;
}
}
$found = 1;
}
}
if ($found) {
my $cnt = join(",", @count);
my $size = join(",", @total);
my $fee = join(",", @fees);
open(LOG, ">>$MEMPOOLLOG");
print LOG "[$time,[$cnt],[$size],[$fee]],\n";
close(LOG);
open(SQL, "|$MYSQL $MYSQLMEMPOOLDB");
$line = "INSERT INTO mempool VALUES($time,$cnt,$size,$fee);\n";
print SQL $line;
close SQL;
}

View File

@ -8,6 +8,6 @@ mkdir -p /dev/shm/mempool-btc
cd $MEMPOOLHOME
rm -f $TMPFILE
$BITCOINCLI getrawmempool true > $TMPFILE
perl mempool-sql.pl < $TMPFILE
python3 mempool_sql.py < $TMPFILE
./mkdata.sh

71
mempool_sql.py Normal file
View File

@ -0,0 +1,71 @@
#!/usr/bin/env python3
import json
import sys
import time
from subprocess import PIPE, Popen
MYSQL = "/usr/bin/mysql"
MEMPOOLLOG = "mempool.log"
MYSQLMEMPOOLDB = "btc_mempool"
FEELIMIT = [0.0001, 1, 2, 3, 4, 5, 6, 7, 8, 10,
12, 14, 17, 20, 25, 30, 40, 50, 60, 70, 80, 100,
120, 140, 170, 200, 250, 300, 400, 500, 600, 700, 800, 1000,
1200, 1400, 1700, 2000, 2500, 3000, 4000, 5000, 6000, 7000, 8000, 10000]
sizes = [0] * len(FEELIMIT)
count = [0] * len(FEELIMIT)
fees = [0] * len(FEELIMIT)
found = False
def parse_txdata(obj):
global sizes, count, fees, found
if "size" in obj:
size = obj["size"]
fee = int(obj["fee"]*100000000)
if "ancestorsize" in obj:
asize = obj["ancestorsize"]
afees = obj["ancestorfees"]
else:
asize = size
afees = fee
dsize = obj["descendantsize"]
dfees = obj["descendantfees"]
afpb = afees / asize # ancestor fee (includes current)
fpb = fee / size # current fee
dfpb = dfees / dsize # descendant fee (includes current)
# total average fee for mining all ancestors and descendants.
tfpb = (afees + dfees - fee) / (asize + dsize - size)
feeperbyte = max(min(dfpb, tfpb), min(fpb, afpb))
found = True
for i, limit in enumerate(FEELIMIT):
if (feeperbyte >= limit and
(i == len(FEELIMIT) - 1 or feeperbyte < FEELIMIT[i+1])):
sizes[i] += size
count[i] += 1
fees[i] += fee
break
return None
return obj
def dump_data(timestamp, sizes, count, fees):
sizesstr = ",".join(str(x) for x in sizes)
countstr = ",".join(str(x) for x in count)
feesstr = ",".join(str(x) for x in fees)
with open(MEMPOOLLOG, "a") as logfile:
logfile.write("[{:d},[{}],[{}],[{}]],\n"
.format(timestamp, countstr, sizesstr, feesstr))
proc = Popen([MYSQL, MYSQLMEMPOOLDB], stdin=PIPE, stdout=PIPE)
proc.communicate("INSERT INTO mempool VALUES({:d},{},{},{});\n"
.format(timestamp, countstr, sizesstr, feesstr)
.encode("ascii"))
def main():
global sizes, count, fees, found
timestamp = int(time.time())
json.load(sys.stdin, object_hook=parse_txdata)
if found:
dump_data(timestamp, sizes, count, fees)
main()

View File

@ -1,14 +1,13 @@
import rpc_pb2 as ln
import rpc_pb2_grpc as lnrpc
import grpc
import re
import os
import sys
import codecs
import time
import grpc
import rpc_pb2 as ln
import rpc_pb2_grpc as lnrpc
def main():
def connect():
# Due to updated ECDSA generated tls.cert we need to let gprc know that
# we need to use that cipher suite otherwise there will be a handshake
# error when we communicate with the lnd rpc server.
@ -21,7 +20,7 @@ def main():
macaroon_bytes = f.read()
macaroon = codecs.encode(macaroon_bytes, 'hex')
def metadata_callback(context, callback):
def metadata_callback(_context, callback):
# for more info see grpc docs
callback([('macaroon', macaroon)], None)
@ -36,11 +35,14 @@ def main():
# finally pass in the combined credentials when creating a channel
channel = grpc.secure_channel('localhost:10009', combined_creds)
stub = lnrpc.LightningStub(channel)
return stub
def main():
stub = connect()
uri = os.environ.get("REQUEST_URI")
match = re.search("[?&]r_hash=([0-9a-f]+)(&.*)?$", uri)
if not match:
raise("No Index")
raise ValueError("No Index")
rhash = match.group(1)
settled = False
timeout = time.time() + 60 # 1 minute timeout
@ -61,7 +63,7 @@ if debug:
sys.stderr = sys.stdout
try:
main()
except:
except Exception:
import traceback
print("Status: 500 Internal Error")
print("Content-Type: text/html; charset=UTF-8")

View File

@ -1,14 +1,15 @@
import rpc_pb2 as ln
import rpc_pb2_grpc as lnrpc
#!/usr/bin/python3
import cgi
import grpc
import re
import os
import sys
import codecs
import binascii
import grpc
import rpc_pb2 as ln
import rpc_pb2_grpc as lnrpc
def main():
def connect():
# Due to updated ECDSA generated tls.cert we need to let gprc know that
# we need to use that cipher suite otherwise there will be a handshake
# error when we communicate with the lnd rpc server.
@ -21,7 +22,7 @@ def main():
macaroon_bytes = f.read()
macaroon = codecs.encode(macaroon_bytes, 'hex')
def metadata_callback(context, callback):
def metadata_callback(_context, callback):
# for more info see grpc docs
callback([('macaroon', macaroon)], None)
@ -36,14 +37,20 @@ def main():
# finally pass in the combined credentials when creating a channel
channel = grpc.secure_channel('localhost:10009', combined_creds)
stub = lnrpc.LightningStub(channel)
return stub
def main():
stub = connect()
form = cgi.FieldStorage()
value = long(form["value"].value) if "value" in form.keys() else 0
value = int(form["value"].value) if "value" in form.keys() else 0
memo = form["memo"].value if "memo" in form.keys() else ""
invoice = stub.AddInvoice(ln.Invoice(memo=memo, value=value))
print("Content-Type: application/json; charset=UTF-8")
print("")
print('{"r_hash":"%s","payment_request":"%s","add_index":%d}' % (binascii.hexlify(invoice.r_hash),invoice.payment_request,invoice.add_index))
print('{"r_hash":"%s","payment_request":"%s","add_index":%d}'
% (binascii.hexlify(invoice.r_hash),
invoice.payment_request, invoice.add_index))
debug = False
@ -52,7 +59,7 @@ if debug:
sys.stderr = sys.stdout
try:
main()
except:
except Exception:
import traceback
print("Status: 500 Internal Error")
print("Content-Type: text/html; charset=UTF-8")