From 37065d2ed2869aea5a83e69a0ef7a55c5d56724c Mon Sep 17 00:00:00 2001 From: John Newbery Date: Wed, 7 Jun 2017 11:54:39 -0400 Subject: [PATCH 1/6] [tests] remove unused imports from utils.py --- test/functional/bip9-softforks.py | 7 ++++--- test/functional/test_framework/util.py | 2 -- test/functional/walletbackup.py | 3 ++- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/functional/bip9-softforks.py b/test/functional/bip9-softforks.py index b90b0ca62..f00232c9f 100755 --- a/test/functional/bip9-softforks.py +++ b/test/functional/bip9-softforks.py @@ -15,6 +15,10 @@ mine a further 143 blocks (LOCKED_IN) test that enforcement has not triggered (which triggers ACTIVE) test that enforcement has triggered """ +from io import BytesIO +import shutil +import time +import itertools from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * @@ -22,9 +26,6 @@ from test_framework.mininode import CTransaction, NetworkThread from test_framework.blocktools import create_coinbase, create_block from test_framework.comptool import TestInstance, TestManager from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP -from io import BytesIO -import time -import itertools class BIP9SoftForksTest(ComparisonTestFramework): diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 2a4f3104a..9f40de08c 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -5,7 +5,6 @@ """Helpful routines for regression testing.""" import os -import sys from binascii import hexlify, unhexlify from base64 import b64encode @@ -13,7 +12,6 @@ from decimal import Decimal, ROUND_DOWN import json import http.client import random -import shutil import subprocess import tempfile import time diff --git a/test/functional/walletbackup.py b/test/functional/walletbackup.py index a4507182a..ff51cba4b 100755 --- a/test/functional/walletbackup.py +++ b/test/functional/walletbackup.py @@ -30,10 +30,11 @@ confirm 1/2/3/4 balances are same as before. Shutdown again, restore using importwallet, and confirm again balances are correct. """ +from random import randint +import shutil from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * -from random import randint class WalletBackupTest(BitcoinTestFramework): From f1fe5368f191018a72276a93a9e74cd95f896697 Mon Sep 17 00:00:00 2001 From: John Newbery Date: Wed, 7 Jun 2017 11:44:17 -0400 Subject: [PATCH 2/6] [tests] fix flake8 warnings in test_framework.py and util.py --- .../test_framework/test_framework.py | 37 ++-- test/functional/test_framework/util.py | 168 +++++++++--------- 2 files changed, 104 insertions(+), 101 deletions(-) diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index ac0fbe61f..37e59f79d 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -14,6 +14,7 @@ import subprocess import sys import tempfile import time +import traceback from .util import ( PortSeed, @@ -77,7 +78,7 @@ class BitcoinTestFramework(object): pass def setup_chain(self): - self.log.info("Initializing test directory "+self.options.tmpdir) + self.log.info("Initializing test directory " + self.options.tmpdir) if self.setup_clean_chain: self._initialize_chain_clean(self.options.tmpdir, self.num_nodes) else: @@ -111,9 +112,9 @@ class BitcoinTestFramework(object): help="Leave bitcoinds and test.* datadir on exit or error") parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true", help="Don't stop bitcoinds after the test execution") - parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"), + parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"), help="Source directory containing bitcoind/bitcoin-cli (default: %default)") - parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"), + parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), help="Directory for caching pregenerated datadirs") parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs") parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO", @@ -134,7 +135,7 @@ class BitcoinTestFramework(object): PortSeed.n = self.options.port_seed - os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH'] + os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH'] check_json_precision() @@ -188,7 +189,7 @@ class BitcoinTestFramework(object): for fn in filenames: try: with open(fn, 'r') as f: - print("From" , fn, ":") + print("From", fn, ":") print("".join(deque(f, MAX_LINES_TO_PRINT))) except OSError: print("Opening file %s failed." % fn) @@ -257,7 +258,7 @@ class BitcoinTestFramework(object): ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper() ch.setLevel(ll) # Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted) - formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') formatter.converter = time.gmtime fh.setFormatter(formatter) ch.setFormatter(formatter) @@ -354,18 +355,13 @@ class BitcoinTestFramework(object): for i in range(num_nodes): initialize_datadir(test_dir, i) -# Test framework for doing p2p comparison testing, which sets up some bitcoind -# binaries: -# 1 binary: test binary -# 2 binaries: 1 test binary, 1 ref binary -# n>2 binaries: 1 test binary, n-1 ref binaries - -class SkipTest(Exception): - """This exception is raised to skip a test""" - def __init__(self, message): - self.message = message - class ComparisonTestFramework(BitcoinTestFramework): + """Test framework for doing p2p comparison testing + + Sets up some bitcoind binaries: + - 1 binary: test binary + - 2 binaries: 1 test binary, 1 ref binary + - n>2 binaries: 1 test binary, n-1 ref binaries""" def __init__(self): super().__init__() @@ -387,4 +383,9 @@ class ComparisonTestFramework(BitcoinTestFramework): self.nodes = self.start_nodes( self.num_nodes, self.options.tmpdir, extra_args, binary=[self.options.testbinary] + - [self.options.refbinary]*(self.num_nodes-1)) + [self.options.refbinary] * (self.num_nodes - 1)) + +class SkipTest(Exception): + """This exception is raised to skip a test""" + def __init__(self, message): + self.message = message diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 9f40de08c..d02a130a1 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -4,20 +4,19 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Helpful routines for regression testing.""" -import os - -from binascii import hexlify, unhexlify from base64 import b64encode +from binascii import hexlify, unhexlify from decimal import Decimal, ROUND_DOWN -import json +import errno import http.client +import json +import logging +import os import random +import re import subprocess import tempfile import time -import re -import errno -import logging from . import coverage from .authproxy import AuthServiceProxy, JSONRPCException @@ -40,17 +39,17 @@ class PortSeed: # Must be initialized with a unique integer for each process n = None -#Set Mocktime default to OFF. -#MOCKTIME is only needed for scripts that use the -#cached version of the blockchain. If the cached -#version of the blockchain is used without MOCKTIME -#then the mempools will not sync due to IBD. +# Set Mocktime default to OFF. +# MOCKTIME is only needed for scripts that use the +# cached version of the blockchain. If the cached +# version of the blockchain is used without MOCKTIME +# then the mempools will not sync due to IBD. MOCKTIME = 0 def enable_mocktime(): - #For backwared compatibility of the python scripts - #with previous versions of the cache, set MOCKTIME - #to Jan 1, 2014 + (201 * 10 * 60) + # For backwared compatibility of the python scripts + # with previous versions of the cache, set MOCKTIME + # to Jan 1, 2014 + (201 * 10 * 60) global MOCKTIME MOCKTIME = 1388534400 + (201 * 10 * 60) @@ -103,7 +102,7 @@ def rpc_port(n): def check_json_precision(): """Make sure json library being used does not lose precision converting BTC values""" n = Decimal("20000000.00000003") - satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) + satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) if satoshis != 2000000000000003: raise RuntimeError("JSON encode/decode loses precision") @@ -150,7 +149,7 @@ def sync_chain(rpc_connections, *, wait=1, timeout=60): """ while timeout > 0: best_hash = [x.getbestblockhash() for x in rpc_connections] - if best_hash == [best_hash[0]]*len(best_hash): + if best_hash == [best_hash[0]] * len(best_hash): return time.sleep(wait) timeout -= wait @@ -166,7 +165,7 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60): num_match = 1 for i in range(1, len(rpc_connections)): if set(rpc_connections[i].getrawmempool()) == pool: - num_match = num_match+1 + num_match = num_match + 1 if num_match == len(rpc_connections): return time.sleep(wait) @@ -176,18 +175,18 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60): bitcoind_processes = {} def initialize_datadir(dirname, n): - datadir = os.path.join(dirname, "node"+str(n)) + datadir = os.path.join(dirname, "node" + str(n)) if not os.path.isdir(datadir): os.makedirs(datadir) with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: f.write("regtest=1\n") - f.write("port="+str(p2p_port(n))+"\n") - f.write("rpcport="+str(rpc_port(n))+"\n") + f.write("port=" + str(p2p_port(n)) + "\n") + f.write("rpcport=" + str(rpc_port(n)) + "\n") f.write("listenonion=0\n") return datadir def get_datadir_path(dirname, n): - return os.path.join(dirname, "node"+str(n)) + return os.path.join(dirname, "node" + str(n)) def get_auth_cookie(datadir, n): user = None @@ -196,10 +195,10 @@ def get_auth_cookie(datadir, n): with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f: for line in f: if line.startswith("rpcuser="): - assert user is None # Ensure that there is only one rpcuser line + assert user is None # Ensure that there is only one rpcuser line user = line.split("=")[1].strip("\n") if line.startswith("rpcpassword="): - assert password is None # Ensure that there is only one rpcpassword line + assert password is None # Ensure that there is only one rpcpassword line password = line.split("=")[1].strip("\n") if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: @@ -234,15 +233,15 @@ def wait_for_bitcoind_start(process, datadir, i, rpchost=None): try: # Check if .cookie file to be created rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i) - blocks = rpc.getblockcount() - break # break out of loop on success + rpc.getblockcount() + break # break out of loop on success except IOError as e: - if e.errno != errno.ECONNREFUSED: # Port not yet open? - raise # unknown IO error - except JSONRPCException as e: # Initialization phase - if e.error['code'] != -28: # RPC in warmup? - raise # unknown JSON RPC exception - except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting + if e.errno != errno.ECONNREFUSED: # Port not yet open? + raise # unknown IO error + except JSONRPCException as e: # Initialization phase + if e.error['code'] != -28: # RPC in warmup? + raise # unknown JSON RPC exception + except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting if "No RPC credentials" not in str(e): raise time.sleep(0.25) @@ -255,11 +254,12 @@ def _start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary This function should only be called from within test_framework, not by individual test scripts.""" - datadir = os.path.join(dirname, "node"+str(i)) + datadir = os.path.join(dirname, "node" + str(i)) if binary is None: binary = os.getenv("BITCOIND", "bitcoind") args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] - if extra_args is not None: args.extend(extra_args) + if extra_args is not None: + args.extend(extra_args) bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up") wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost) @@ -277,7 +277,7 @@ def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=Non node = _start_node(i, dirname, extra_args, stderr=log_stderr) _stop_node(node, i) except Exception as e: - assert 'bitcoind exited' in str(e) #node must have shutdown + assert 'bitcoind exited' in str(e) # node must have shutdown if expected_msg is not None: log_stderr.seek(0) stderr = log_stderr.read().decode('utf-8') @@ -292,24 +292,27 @@ def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=Non def _start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): """Start multiple bitcoinds, return RPC connections to them - + This function should only be called from within test_framework, not by individual test scripts.""" - if extra_args is None: extra_args = [ None for _ in range(num_nodes) ] - if binary is None: binary = [ None for _ in range(num_nodes) ] + if extra_args is None: + extra_args = [None] * num_nodes + if binary is None: + binary = [None] * num_nodes assert_equal(len(extra_args), num_nodes) assert_equal(len(binary), num_nodes) rpcs = [] try: for i in range(num_nodes): rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) - except: # If one node failed to start, stop the others + except: + # If one node failed to start, stop the others _stop_nodes(rpcs) raise return rpcs def log_filename(dirname, n_node, logname): - return os.path.join(dirname, "node"+str(n_node), "regtest", logname) + return os.path.join(dirname, "node" + str(n_node), "regtest", logname) def _stop_node(node, i): """Stop a bitcoind test node @@ -332,7 +335,7 @@ def _stop_nodes(nodes): for i, node in enumerate(nodes): _stop_node(node, i) - assert not bitcoind_processes.values() # All connections must be gone now + assert not bitcoind_processes.values() # All connections must be gone now def set_node_times(nodes, t): for node in nodes: @@ -350,7 +353,7 @@ def disconnect_nodes(from_connection, node_num): raise AssertionError("timed out waiting for disconnect") def connect_nodes(from_connection, node_num): - ip_port = "127.0.0.1:"+str(p2p_port(node_num)) + ip_port = "127.0.0.1:" + str(p2p_port(node_num)) from_connection.addnode(ip_port, "onetry") # poll until version handshake complete to avoid race conditions # with transaction relaying @@ -370,14 +373,13 @@ def find_output(node, txid, amount): for i in range(len(txdata["vout"])): if txdata["vout"][i]["value"] == amount: return i - raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount))) - + raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount))) def gather_inputs(from_node, amount_needed, confirmations_required=1): """ Return a random set of unspent txouts that are enough to pay amount_needed """ - assert(confirmations_required >=0) + assert(confirmations_required >= 0) utxo = from_node.listunspent(confirmations_required) random.shuffle(utxo) inputs = [] @@ -385,9 +387,9 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1): while total_in < amount_needed and len(utxo) > 0: t = utxo.pop() total_in += t["amount"] - inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } ) + inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]}) if total_in < amount_needed: - raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in)) + raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in)) return (total_in, inputs) def make_change(from_node, amount_in, amount_out, fee): @@ -395,13 +397,13 @@ def make_change(from_node, amount_in, amount_out, fee): Create change output(s), return them """ outputs = {} - amount = amount_out+fee + amount = amount_out + fee change = amount_in - amount - if change > amount*2: + if change > amount * 2: # Create an extra change output to break up big inputs change_address = from_node.getnewaddress() # Split change in two, being careful of rounding: - outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) change = amount_in - amount - outputs[change_address] if change > 0: outputs[from_node.getnewaddress()] = change @@ -414,9 +416,9 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): """ from_node = random.choice(nodes) to_node = random.choice(nodes) - fee = min_fee + fee_increment*random.randint(0,fee_variants) + fee = min_fee + fee_increment * random.randint(0, fee_variants) - (total_in, inputs) = gather_inputs(from_node, amount+fee) + (total_in, inputs) = gather_inputs(from_node, amount + fee) outputs = make_change(from_node, total_in, amount, fee) outputs[to_node.getnewaddress()] = float(amount) @@ -430,10 +432,10 @@ def assert_fee_amount(fee, tx_size, fee_per_kB): """Assert the fee was in range""" target_fee = tx_size * fee_per_kB / 1000 if fee < target_fee: - raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee))) + raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee))) # allow the wallet's estimation to be at most 2 bytes off if fee > (tx_size + 2) * fee_per_kB / 1000: - raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee))) + raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee))) def assert_equal(thing1, thing2, *args): if thing1 != thing2 or any(thing1 != arg for arg in args): @@ -441,11 +443,11 @@ def assert_equal(thing1, thing2, *args): def assert_greater_than(thing1, thing2): if thing1 <= thing2: - raise AssertionError("%s <= %s"%(str(thing1),str(thing2))) + raise AssertionError("%s <= %s" % (str(thing1), str(thing2))) def assert_greater_than_or_equal(thing1, thing2): if thing1 < thing2: - raise AssertionError("%s < %s"%(str(thing1),str(thing2))) + raise AssertionError("%s < %s" % (str(thing1), str(thing2))) def assert_raises(exc, fun, *args, **kwds): assert_raises_message(exc, None, fun, *args, **kwds) @@ -455,9 +457,9 @@ def assert_raises_message(exc, message, fun, *args, **kwds): fun(*args, **kwds) except exc as e: if message is not None and message not in e.error['message']: - raise AssertionError("Expected substring not found:"+e.error['message']) + raise AssertionError("Expected substring not found:" + e.error['message']) except Exception as e: - raise AssertionError("Unexpected exception raised: "+type(e).__name__) + raise AssertionError("Unexpected exception raised: " + type(e).__name__) else: raise AssertionError("No exception raised") @@ -484,9 +486,9 @@ def assert_raises_jsonrpc(code, message, fun, *args, **kwds): if (code is not None) and (code != e.error["code"]): raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) if (message is not None) and (message not in e.error['message']): - raise AssertionError("Expected substring not found:"+e.error['message']) + raise AssertionError("Expected substring not found:" + e.error['message']) except Exception as e: - raise AssertionError("Unexpected exception raised: "+type(e).__name__) + raise AssertionError("Unexpected exception raised: " + type(e).__name__) else: raise AssertionError("No exception raised") @@ -507,7 +509,7 @@ def assert_is_hash_string(string, length=64): raise AssertionError( "String %r contains invalid characters for a hash." % string) -def assert_array_result(object_array, to_match, expected, should_not_find = False): +def assert_array_result(object_array, to_match, expected, should_not_find=False): """ Pass in array of JSON objects, a dictionary with key/value pairs to match against, and another dictionary with expected key/value @@ -515,26 +517,26 @@ def assert_array_result(object_array, to_match, expected, should_not_find = Fals If the should_not_find flag is true, to_match should not be found in object_array """ - if should_not_find == True: - assert_equal(expected, { }) + if should_not_find: + assert_equal(expected, {}) num_matched = 0 for item in object_array: all_match = True - for key,value in to_match.items(): + for key, value in to_match.items(): if item[key] != value: all_match = False if not all_match: continue - elif should_not_find == True: - num_matched = num_matched+1 - for key,value in expected.items(): + elif should_not_find: + num_matched = num_matched + 1 + for key, value in expected.items(): if item[key] != value: - raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) - num_matched = num_matched+1 - if num_matched == 0 and should_not_find != True: - raise AssertionError("No objects matched %s"%(str(to_match))) - if num_matched > 0 and should_not_find == True: - raise AssertionError("Objects were found %s"%(str(to_match))) + raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value))) + num_matched = num_matched + 1 + if num_matched == 0 and not should_not_find: + raise AssertionError("No objects matched %s" % (str(to_match))) + if num_matched > 0 and should_not_find: + raise AssertionError("Objects were found %s" % (str(to_match))) def satoshi_round(amount): return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) @@ -542,7 +544,7 @@ def satoshi_round(amount): # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. def create_confirmed_utxos(fee, node, count): - node.generate(int(0.5*count)+101) + node.generate(int(0.5 * count) + 101) utxos = node.listunspent() iterations = count - len(utxos) addr1 = node.getnewaddress() @@ -552,14 +554,14 @@ def create_confirmed_utxos(fee, node, count): for i in range(iterations): t = utxos.pop() inputs = [] - inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) + inputs.append({"txid": t["txid"], "vout": t["vout"]}) outputs = {} send_value = t['amount'] - fee - outputs[addr1] = satoshi_round(send_value/2) - outputs[addr2] = satoshi_round(send_value/2) + outputs[addr1] = satoshi_round(send_value / 2) + outputs[addr2] = satoshi_round(send_value / 2) raw_tx = node.createrawtransaction(inputs, outputs) signed_tx = node.signrawtransaction(raw_tx)["hex"] - txid = node.sendrawtransaction(signed_tx) + node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): node.generate(1) @@ -574,8 +576,8 @@ def gen_return_txouts(): # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create # So we have big transactions (and therefore can't fit very many into each block) # create one script_pubkey - script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes - for i in range (512): + script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes + for i in range(512): script_pubkey = script_pubkey + "01" # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change txouts = "81" @@ -589,8 +591,8 @@ def gen_return_txouts(): return txouts def create_tx(node, coinbase, to_address, amount): - inputs = [{ "txid" : coinbase, "vout" : 0}] - outputs = { to_address : amount } + inputs = [{"txid": coinbase, "vout": 0}] + outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) assert_equal(signresult["complete"], True) @@ -603,7 +605,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee): txids = [] for _ in range(num): t = utxos.pop() - inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}] + inputs = [{"txid": t["txid"], "vout": t["vout"]}] outputs = {} change = t['amount'] - fee outputs[addr] = satoshi_round(change) From cad967a892d836b3afbd1ab81c73731e968368c6 Mon Sep 17 00:00:00 2001 From: John Newbery Date: Fri, 2 Jun 2017 13:14:14 -0400 Subject: [PATCH 3/6] [tests] Move stop_node and start_node methods to BitcoinTestFramework This commit moves functions start_node, start_nodes, stop_node and stop_nodes functions into the BitcoinTestFramework class. It also moves the bitcoind_processes dict and coverage variables into BitcoinTestFramework. --- test/functional/blockchain.py | 8 +- test/functional/bumpfee.py | 2 +- test/functional/dbcrash.py | 4 +- test/functional/fundrawtransaction.py | 4 +- test/functional/keypool.py | 2 +- test/functional/rpcbind_test.py | 2 +- .../test_framework/test_framework.py | 135 +++++++++++++++--- test/functional/test_framework/util.py | 133 +---------------- test/functional/wallet-dump.py | 4 +- test/functional/wallet-encryption.py | 6 +- test/functional/wallet-hd.py | 3 +- 11 files changed, 131 insertions(+), 172 deletions(-) diff --git a/test/functional/blockchain.py b/test/functional/blockchain.py index eeef05efd..a7034e6bc 100755 --- a/test/functional/blockchain.py +++ b/test/functional/blockchain.py @@ -21,15 +21,13 @@ from decimal import Decimal import http.client import subprocess -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import (BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT) from test_framework.util import ( assert_equal, assert_raises, assert_raises_jsonrpc, assert_is_hex_string, assert_is_hash_string, - bitcoind_processes, - BITCOIND_PROC_WAIT_TIMEOUT, ) @@ -141,13 +139,13 @@ class BlockchainTest(BitcoinTestFramework): self.nodes[0].generate(6) assert_equal(self.nodes[0].getblockcount(), 206) self.log.debug('Node should not stop at this height') - assert_raises(subprocess.TimeoutExpired, lambda: bitcoind_processes[0].wait(timeout=3)) + assert_raises(subprocess.TimeoutExpired, lambda: self.bitcoind_processes[0].wait(timeout=3)) try: self.nodes[0].generate(1) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') - bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) assert_equal(self.nodes[0].getblockcount(), 207) diff --git a/test/functional/bumpfee.py b/test/functional/bumpfee.py index 569db7ced..9237f0924 100755 --- a/test/functional/bumpfee.py +++ b/test/functional/bumpfee.py @@ -42,7 +42,7 @@ class BumpFeeTest(BitcoinTestFramework): # Encrypt wallet for test_locked_wallet_fails test self.nodes[1].encryptwallet(WALLET_PASSPHRASE) - bitcoind_processes[1].wait() + self.bitcoind_processes[1].wait() self.nodes[1] = self.start_node(1, self.options.tmpdir, extra_args[1]) self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT) diff --git a/test/functional/dbcrash.py b/test/functional/dbcrash.py index 4a10743f0..6f877f836 100755 --- a/test/functional/dbcrash.py +++ b/test/functional/dbcrash.py @@ -88,7 +88,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework): # An exception here should mean the node is about to crash. # If bitcoind exits, then try again. wait_for_node_exit() # should raise an exception if bitcoind doesn't exit. - wait_for_node_exit(node_index, timeout=10) + self.wait_for_node_exit(node_index, timeout=10) self.crashed_on_restart += 1 time.sleep(1) @@ -140,7 +140,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework): if not self.submit_block_catch_error(i, block): # TODO: more carefully check that the crash is due to -dbcrashratio # (change the exit code perhaps, and check that here?) - wait_for_node_exit(i, timeout=30) + self.wait_for_node_exit(i, timeout=30) self.log.debug("Restarting node %d after block hash %s", i, block_hash) nodei_utxo_hash = self.restart_node(i, block_hash) assert nodei_utxo_hash is not None diff --git a/test/functional/fundrawtransaction.py b/test/functional/fundrawtransaction.py index 0a3166b89..0baab6d01 100755 --- a/test/functional/fundrawtransaction.py +++ b/test/functional/fundrawtransaction.py @@ -4,7 +4,7 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the fundrawtransaction RPC.""" -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT from test_framework.util import * @@ -452,7 +452,7 @@ class RawTransactionsTest(BitcoinTestFramework): self.stop_node(2) self.stop_node(3) self.nodes[1].encryptwallet("test") - bitcoind_processes[1].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[1].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) # This test is not meant to test fee estimation and we'd like diff --git a/test/functional/keypool.py b/test/functional/keypool.py index f23a427d1..e8be55991 100755 --- a/test/functional/keypool.py +++ b/test/functional/keypool.py @@ -18,7 +18,7 @@ class KeyPoolTest(BitcoinTestFramework): # Encrypt wallet and wait to terminate nodes[0].encryptwallet('test') - bitcoind_processes[0].wait() + self.bitcoind_processes[0].wait() # Restart node 0 nodes[0] = self.start_node(0, self.options.tmpdir) # Keep creating keys diff --git a/test/functional/rpcbind_test.py b/test/functional/rpcbind_test.py index 198599010..951685aa7 100755 --- a/test/functional/rpcbind_test.py +++ b/test/functional/rpcbind_test.py @@ -37,7 +37,7 @@ class RPCBindTest(BitcoinTestFramework): base_args += ['-rpcallowip=' + x for x in allow_ips] binds = ['-rpcbind='+addr for addr in addresses] self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to) - pid = bitcoind_processes[0].pid + pid = self.bitcoind_processes[0].pid assert_equal(set(get_bind_addrs(pid)), set(expected)) self.stop_nodes() diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 37e59f79d..557c9db48 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -5,7 +5,9 @@ """Base class for RPC testing.""" from collections import deque +import errno from enum import Enum +import http.client import logging import optparse import os @@ -16,15 +18,16 @@ import tempfile import time import traceback +from .authproxy import JSONRPCException +from . import coverage from .util import ( - PortSeed, MAX_NODES, - bitcoind_processes, + PortSeed, + assert_equal, check_json_precision, connect_nodes_bi, disable_mocktime, disconnect_nodes, - enable_coverage, enable_mocktime, get_mocktime, get_rpc_proxy, @@ -34,15 +37,9 @@ from .util import ( p2p_port, rpc_url, set_node_times, - _start_node, - _start_nodes, - _stop_node, - _stop_nodes, sync_blocks, sync_mempools, - wait_for_bitcoind_start, ) -from .authproxy import JSONRPCException class TestStatus(Enum): PASSED = 1 @@ -53,6 +50,8 @@ TEST_EXIT_PASSED = 0 TEST_EXIT_FAILED = 1 TEST_EXIT_SKIPPED = 77 +BITCOIND_PROC_WAIT_TIMEOUT = 60 + class BitcoinTestFramework(object): """Base class for a bitcoin test script. @@ -72,7 +71,8 @@ class BitcoinTestFramework(object): def __init__(self): self.num_nodes = 4 self.setup_clean_chain = False - self.nodes = None + self.nodes = [] + self.bitcoind_processes = {} def add_options(self, parser): pass @@ -98,7 +98,7 @@ class BitcoinTestFramework(object): extra_args = None if hasattr(self, "extra_args"): extra_args = self.extra_args - self.nodes = _start_nodes(self.num_nodes, self.options.tmpdir, extra_args) + self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args) def run_test(self): raise NotImplementedError @@ -130,9 +130,6 @@ class BitcoinTestFramework(object): self.add_options(parser) (self.options, self.args) = parser.parse_args() - if self.options.coveragedir: - enable_coverage(self.options.coveragedir) - PortSeed.n = self.options.port_seed os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH'] @@ -209,16 +206,88 @@ class BitcoinTestFramework(object): # Public helper methods. These can be accessed by the subclass test scripts. def start_node(self, i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): - return _start_node(i, dirname, extra_args, rpchost, timewait, binary, stderr) + """Start a bitcoind and return RPC connection to it""" + + datadir = os.path.join(dirname, "node" + str(i)) + if binary is None: + binary = os.getenv("BITCOIND", "bitcoind") + args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] + if extra_args is not None: + args.extend(extra_args) + self.bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) + self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up") + self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i, rpchost) + self.log.debug("initialize_chain: RPC successfully started") + proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait) + + if self.options.coveragedir: + coverage.write_all_rpc_commands(self.options.coveragedir, proxy) + + return proxy def start_nodes(self, num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): - return _start_nodes(num_nodes, dirname, extra_args, rpchost, timewait, binary) + """Start multiple bitcoinds, return RPC connections to them""" - def stop_node(self, num_node): - _stop_node(self.nodes[num_node], num_node) + if extra_args is None: + extra_args = [None] * num_nodes + if binary is None: + binary = [None] * num_nodes + assert_equal(len(extra_args), num_nodes) + assert_equal(len(binary), num_nodes) + rpcs = [] + try: + for i in range(num_nodes): + rpcs.append(self.start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) + except: + # If one node failed to start, stop the others + # TODO: abusing self.nodes in this way is a little hacky. + # Eventually we should do a better job of tracking nodes + self.nodes.extend(rpcs) + self.stop_nodes() + self.nodes = [] + raise + return rpcs + + def stop_node(self, i): + """Stop a bitcoind test node""" + + self.log.debug("Stopping node %d" % i) + try: + self.nodes[i].stop() + except http.client.CannotSendRequest as e: + self.log.exception("Unable to stop node") + return_code = self.bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + assert_equal(return_code, 0) + del self.bitcoind_processes[i] def stop_nodes(self): - _stop_nodes(self.nodes) + """Stop multiple bitcoind test nodes""" + + for i in range(len(self.nodes)): + self.stop_node(i) + assert not self.bitcoind_processes.values() # All connections must be gone now + + def assert_start_raises_init_error(self, i, dirname, extra_args=None, expected_msg=None): + with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr: + try: + self.start_node(i, dirname, extra_args, stderr=log_stderr) + self.stop_node(i) + except Exception as e: + assert 'bitcoind exited' in str(e) # node must have shutdown + if expected_msg is not None: + log_stderr.seek(0) + stderr = log_stderr.read().decode('utf-8') + if expected_msg not in stderr: + raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr) + else: + if expected_msg is None: + assert_msg = "bitcoind should have exited with an error" + else: + assert_msg = "bitcoind should have exited with expected error " + expected_msg + raise AssertionError(assert_msg) + + def wait_for_node_exit(self, i, timeout): + self.bitcoind_processes[i].wait(timeout) def split_network(self): """ @@ -300,9 +369,9 @@ class BitcoinTestFramework(object): args = [os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"] if i > 0: args.append("-connect=127.0.0.1:" + str(p2p_port(0))) - bitcoind_processes[i] = subprocess.Popen(args) + self.bitcoind_processes[i] = subprocess.Popen(args) self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up") - wait_for_bitcoind_start(bitcoind_processes[i], datadir, i) + self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i) self.log.debug("initialize_chain: RPC successfully started") self.nodes = [] @@ -355,6 +424,30 @@ class BitcoinTestFramework(object): for i in range(num_nodes): initialize_datadir(test_dir, i) + def _wait_for_bitcoind_start(self, process, datadir, i, rpchost=None): + """Wait for bitcoind to start. + + This means that RPC is accessible and fully initialized. + Raise an exception if bitcoind exits during initialization.""" + while True: + if process.poll() is not None: + raise Exception('bitcoind exited with status %i during initialization' % process.returncode) + try: + # Check if .cookie file to be created + rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, coveragedir=self.options.coveragedir) + rpc.getblockcount() + break # break out of loop on success + except IOError as e: + if e.errno != errno.ECONNREFUSED: # Port not yet open? + raise # unknown IO error + except JSONRPCException as e: # Initialization phase + if e.error['code'] != -28: # RPC in warmup? + raise # unknown JSON RPC exception + except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting + if "No RPC credentials" not in str(e): + raise + time.sleep(0.25) + class ComparisonTestFramework(BitcoinTestFramework): """Test framework for doing p2p comparison testing diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index d02a130a1..fbdb3d7e6 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -7,22 +7,16 @@ from base64 import b64encode from binascii import hexlify, unhexlify from decimal import Decimal, ROUND_DOWN -import errno -import http.client import json import logging import os import random import re -import subprocess -import tempfile import time from . import coverage from .authproxy import AuthServiceProxy, JSONRPCException -COVERAGE_DIR = None - logger = logging.getLogger("TestFramework.utils") # The maximum number of nodes a single test can spawn @@ -32,9 +26,6 @@ PORT_MIN = 11000 # The number of ports to "reserve" for p2p and rpc, each PORT_RANGE = 5000 -BITCOIND_PROC_WAIT_TIMEOUT = 60 - - class PortSeed: # Must be initialized with a unique integer for each process n = None @@ -60,13 +51,7 @@ def disable_mocktime(): def get_mocktime(): return MOCKTIME -def enable_coverage(dirname): - """Maintain a log of which RPC calls are made during testing.""" - global COVERAGE_DIR - COVERAGE_DIR = dirname - - -def get_rpc_proxy(url, node_number, timeout=None): +def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): """ Args: url (str): URL of the RPC server to call @@ -87,7 +72,7 @@ def get_rpc_proxy(url, node_number, timeout=None): proxy.url = url # store URL on proxy for info coverage_logfile = coverage.get_filename( - COVERAGE_DIR, node_number) if COVERAGE_DIR else None + coveragedir, node_number) if coveragedir else None return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) @@ -172,8 +157,6 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60): timeout -= wait raise AssertionError("Mempool sync failed") -bitcoind_processes = {} - def initialize_datadir(dirname, n): datadir = os.path.join(dirname, "node" + str(n)) if not os.path.isdir(datadir): @@ -222,121 +205,9 @@ def rpc_url(datadir, i, rpchost=None): host = rpchost return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) -def wait_for_bitcoind_start(process, datadir, i, rpchost=None): - ''' - Wait for bitcoind to start. This means that RPC is accessible and fully initialized. - Raise an exception if bitcoind exits during initialization. - ''' - while True: - if process.poll() is not None: - raise Exception('bitcoind exited with status %i during initialization' % process.returncode) - try: - # Check if .cookie file to be created - rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i) - rpc.getblockcount() - break # break out of loop on success - except IOError as e: - if e.errno != errno.ECONNREFUSED: # Port not yet open? - raise # unknown IO error - except JSONRPCException as e: # Initialization phase - if e.error['code'] != -28: # RPC in warmup? - raise # unknown JSON RPC exception - except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting - if "No RPC credentials" not in str(e): - raise - time.sleep(0.25) - -def wait_for_node_exit(node_index, timeout): - bitcoind_processes[node_index].wait(timeout) - -def _start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): - """Start a bitcoind and return RPC connection to it - - This function should only be called from within test_framework, not by individual test scripts.""" - - datadir = os.path.join(dirname, "node" + str(i)) - if binary is None: - binary = os.getenv("BITCOIND", "bitcoind") - args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] - if extra_args is not None: - args.extend(extra_args) - bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) - logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up") - wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost) - logger.debug("initialize_chain: RPC successfully started") - proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait) - - if COVERAGE_DIR: - coverage.write_all_rpc_commands(COVERAGE_DIR, proxy) - - return proxy - -def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=None): - with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr: - try: - node = _start_node(i, dirname, extra_args, stderr=log_stderr) - _stop_node(node, i) - except Exception as e: - assert 'bitcoind exited' in str(e) # node must have shutdown - if expected_msg is not None: - log_stderr.seek(0) - stderr = log_stderr.read().decode('utf-8') - if expected_msg not in stderr: - raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr) - else: - if expected_msg is None: - assert_msg = "bitcoind should have exited with an error" - else: - assert_msg = "bitcoind should have exited with expected error " + expected_msg - raise AssertionError(assert_msg) - -def _start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): - """Start multiple bitcoinds, return RPC connections to them - - This function should only be called from within test_framework, not by individual test scripts.""" - - if extra_args is None: - extra_args = [None] * num_nodes - if binary is None: - binary = [None] * num_nodes - assert_equal(len(extra_args), num_nodes) - assert_equal(len(binary), num_nodes) - rpcs = [] - try: - for i in range(num_nodes): - rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) - except: - # If one node failed to start, stop the others - _stop_nodes(rpcs) - raise - return rpcs - def log_filename(dirname, n_node, logname): return os.path.join(dirname, "node" + str(n_node), "regtest", logname) -def _stop_node(node, i): - """Stop a bitcoind test node - - This function should only be called from within test_framework, not by individual test scripts.""" - - logger.debug("Stopping node %d" % i) - try: - node.stop() - except http.client.CannotSendRequest as e: - logger.exception("Unable to stop node") - return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) - del bitcoind_processes[i] - assert_equal(return_code, 0) - -def _stop_nodes(nodes): - """Stop multiple bitcoind test nodes - - This function should only be called from within test_framework, not by individual test scripts.""" - - for i, node in enumerate(nodes): - _stop_node(node, i) - assert not bitcoind_processes.values() # All connections must be gone now - def set_node_times(nodes, t): for node in nodes: node.setmocktime(t) diff --git a/test/functional/wallet-dump.py b/test/functional/wallet-dump.py index 9cb32d465..569cc46e6 100755 --- a/test/functional/wallet-dump.py +++ b/test/functional/wallet-dump.py @@ -7,7 +7,7 @@ import os from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import (assert_equal, bitcoind_processes) +from test_framework.util import assert_equal def read_dump(file_name, addrs, hd_master_addr_old): @@ -95,7 +95,7 @@ class WalletDumpTest(BitcoinTestFramework): #encrypt wallet, restart, unlock and dump self.nodes[0].encryptwallet('test') - bitcoind_processes[0].wait() + self.bitcoind_processes[0].wait() self.nodes[0] = self.start_node(0, self.options.tmpdir, self.extra_args[0]) self.nodes[0].walletpassphrase('test', 10) # Should be a no-op: diff --git a/test/functional/wallet-encryption.py b/test/functional/wallet-encryption.py index 33872e3c9..ba72918fe 100755 --- a/test/functional/wallet-encryption.py +++ b/test/functional/wallet-encryption.py @@ -6,12 +6,10 @@ import time -from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT from test_framework.util import ( assert_equal, assert_raises_jsonrpc, - bitcoind_processes, - BITCOIND_PROC_WAIT_TIMEOUT, ) class WalletEncryptionTest(BitcoinTestFramework): @@ -33,7 +31,7 @@ class WalletEncryptionTest(BitcoinTestFramework): # Encrypt the wallet self.nodes[0].encryptwallet(passphrase) - bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) self.nodes[0] = self.start_node(0, self.options.tmpdir) # Test that the wallet is encrypted diff --git a/test/functional/wallet-hd.py b/test/functional/wallet-hd.py index e7ec72a24..dfd3dc83c 100755 --- a/test/functional/wallet-hd.py +++ b/test/functional/wallet-hd.py @@ -8,7 +8,6 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, connect_nodes_bi, - assert_start_raises_init_error ) import os import shutil @@ -27,7 +26,7 @@ class WalletHDTest(BitcoinTestFramework): # Make sure can't switch off usehd after wallet creation self.stop_node(1) - assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet') + self.assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet') self.nodes[1] = self.start_node(1, self.options.tmpdir, self.extra_args[1]) connect_nodes_bi(self.nodes, 0, 1) From 0d473c539ed0f53168aa3bae8474f214831dec1f Mon Sep 17 00:00:00 2001 From: John Newbery Date: Wed, 7 Jun 2017 13:43:03 -0400 Subject: [PATCH 4/6] [tests] move mocktime property and functions to BitcoinTestFramework --- test/functional/listtransactions.py | 2 +- test/functional/receivedby.py | 2 +- .../test_framework/test_framework.py | 27 ++++++++++++++----- test/functional/test_framework/util.py | 21 --------------- 4 files changed, 22 insertions(+), 30 deletions(-) diff --git a/test/functional/listtransactions.py b/test/functional/listtransactions.py index f69f1c572..f75a8e29c 100755 --- a/test/functional/listtransactions.py +++ b/test/functional/listtransactions.py @@ -23,7 +23,7 @@ class ListTransactionsTest(BitcoinTestFramework): def setup_nodes(self): #This test requires mocktime - enable_mocktime() + self.enable_mocktime() self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) def run_test(self): diff --git a/test/functional/receivedby.py b/test/functional/receivedby.py index 2cad6269a..19d99c9c9 100755 --- a/test/functional/receivedby.py +++ b/test/functional/receivedby.py @@ -31,7 +31,7 @@ class ReceivedByTest(BitcoinTestFramework): def setup_nodes(self): #This test requires mocktime - enable_mocktime() + self.enable_mocktime() self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) def run_test(self): diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 557c9db48..00807b022 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -26,10 +26,7 @@ from .util import ( assert_equal, check_json_precision, connect_nodes_bi, - disable_mocktime, disconnect_nodes, - enable_mocktime, - get_mocktime, get_rpc_proxy, initialize_datadir, get_datadir_path, @@ -73,6 +70,7 @@ class BitcoinTestFramework(object): self.setup_clean_chain = False self.nodes = [] self.bitcoind_processes = {} + self.mocktime = 0 def add_options(self, parser): pass @@ -211,7 +209,7 @@ class BitcoinTestFramework(object): datadir = os.path.join(dirname, "node" + str(i)) if binary is None: binary = os.getenv("BITCOIND", "bitcoind") - args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] + args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(self.mocktime), "-uacomment=testnode%d" % i] if extra_args is not None: args.extend(extra_args) self.bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) @@ -312,6 +310,21 @@ class BitcoinTestFramework(object): sync_blocks(group) sync_mempools(group) + def enable_mocktime(self): + """Enable mocktime for the script. + + mocktime may be needed for scripts that use the cached version of the + blockchain. If the cached version of the blockchain is used without + mocktime then the mempools will not sync due to IBD. + + For backwared compatibility of the python scripts with previous + versions of the cache, this helper function sets mocktime to Jan 1, + 2014 + (201 * 10 * 60)""" + self.mocktime = 1388534400 + (201 * 10 * 60) + + def disable_mocktime(self): + self.mocktime = 0 + # Private helper methods. These should not be accessed by the subclass test scripts. def _start_logging(self): @@ -389,8 +402,8 @@ class BitcoinTestFramework(object): # # blocks are created with timestamps 10 minutes apart # starting from 2010 minutes in the past - enable_mocktime() - block_time = get_mocktime() - (201 * 10 * 60) + self.enable_mocktime() + block_time = self.mocktime - (201 * 10 * 60) for i in range(2): for peer in range(4): for j in range(25): @@ -403,7 +416,7 @@ class BitcoinTestFramework(object): # Shut them down, and clean up cache directories: self.stop_nodes() self.nodes = [] - disable_mocktime() + self.disable_mocktime() for i in range(MAX_NODES): os.remove(log_filename(cachedir, i, "debug.log")) os.remove(log_filename(cachedir, i, "db.log")) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index fbdb3d7e6..cca1e2b89 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -30,27 +30,6 @@ class PortSeed: # Must be initialized with a unique integer for each process n = None -# Set Mocktime default to OFF. -# MOCKTIME is only needed for scripts that use the -# cached version of the blockchain. If the cached -# version of the blockchain is used without MOCKTIME -# then the mempools will not sync due to IBD. -MOCKTIME = 0 - -def enable_mocktime(): - # For backwared compatibility of the python scripts - # with previous versions of the cache, set MOCKTIME - # to Jan 1, 2014 + (201 * 10 * 60) - global MOCKTIME - MOCKTIME = 1388534400 + (201 * 10 * 60) - -def disable_mocktime(): - global MOCKTIME - MOCKTIME = 0 - -def get_mocktime(): - return MOCKTIME - def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): """ Args: From 05b8c081b435e87b08335e9f9b62a55fa1d48ecc Mon Sep 17 00:00:00 2001 From: John Newbery Date: Wed, 7 Jun 2017 13:51:25 -0400 Subject: [PATCH 5/6] [tests] reorganize utils.py module (code move only) This commit re-organizes the utils.py module into logical sections. --- test/functional/test_framework/util.py | 541 +++++++++++++------------ 1 file changed, 279 insertions(+), 262 deletions(-) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index cca1e2b89..8080c8c97 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -19,264 +19,8 @@ from .authproxy import AuthServiceProxy, JSONRPCException logger = logging.getLogger("TestFramework.utils") -# The maximum number of nodes a single test can spawn -MAX_NODES = 8 -# Don't assign rpc or p2p ports lower than this -PORT_MIN = 11000 -# The number of ports to "reserve" for p2p and rpc, each -PORT_RANGE = 5000 - -class PortSeed: - # Must be initialized with a unique integer for each process - n = None - -def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): - """ - Args: - url (str): URL of the RPC server to call - node_number (int): the node number (or id) that this calls to - - Kwargs: - timeout (int): HTTP timeout in seconds - - Returns: - AuthServiceProxy. convenience object for making RPC calls. - - """ - proxy_kwargs = {} - if timeout is not None: - proxy_kwargs['timeout'] = timeout - - proxy = AuthServiceProxy(url, **proxy_kwargs) - proxy.url = url # store URL on proxy for info - - coverage_logfile = coverage.get_filename( - coveragedir, node_number) if coveragedir else None - - return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) - - -def p2p_port(n): - assert(n <= MAX_NODES) - return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) - -def rpc_port(n): - return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) - -def check_json_precision(): - """Make sure json library being used does not lose precision converting BTC values""" - n = Decimal("20000000.00000003") - satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) - if satoshis != 2000000000000003: - raise RuntimeError("JSON encode/decode loses precision") - -def count_bytes(hex_string): - return len(bytearray.fromhex(hex_string)) - -def bytes_to_hex_str(byte_str): - return hexlify(byte_str).decode('ascii') - -def hex_str_to_bytes(hex_str): - return unhexlify(hex_str.encode('ascii')) - -def str_to_b64str(string): - return b64encode(string.encode('utf-8')).decode('ascii') - -def sync_blocks(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same tip. - - sync_blocks needs to be called with an rpc_connections set that has least - one node already synced to the latest, stable tip, otherwise there's a - chance it might return before all nodes are stably synced. - """ - # Use getblockcount() instead of waitforblockheight() to determine the - # initial max height because the two RPCs look at different internal global - # variables (chainActive vs latestBlock) and the former gets updated - # earlier. - maxheight = max(x.getblockcount() for x in rpc_connections) - start_time = cur_time = time.time() - while cur_time <= start_time + timeout: - tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] - if all(t["height"] == maxheight for t in tips): - if all(t["hash"] == tips[0]["hash"] for t in tips): - return - raise AssertionError("Block sync failed, mismatched block hashes:{}".format( - "".join("\n {!r}".format(tip) for tip in tips))) - cur_time = time.time() - raise AssertionError("Block sync to height {} timed out:{}".format( - maxheight, "".join("\n {!r}".format(tip) for tip in tips))) - -def sync_chain(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same best block - """ - while timeout > 0: - best_hash = [x.getbestblockhash() for x in rpc_connections] - if best_hash == [best_hash[0]] * len(best_hash): - return - time.sleep(wait) - timeout -= wait - raise AssertionError("Chain sync failed: Best block hashes don't match") - -def sync_mempools(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same transactions in their memory - pools - """ - while timeout > 0: - pool = set(rpc_connections[0].getrawmempool()) - num_match = 1 - for i in range(1, len(rpc_connections)): - if set(rpc_connections[i].getrawmempool()) == pool: - num_match = num_match + 1 - if num_match == len(rpc_connections): - return - time.sleep(wait) - timeout -= wait - raise AssertionError("Mempool sync failed") - -def initialize_datadir(dirname, n): - datadir = os.path.join(dirname, "node" + str(n)) - if not os.path.isdir(datadir): - os.makedirs(datadir) - with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: - f.write("regtest=1\n") - f.write("port=" + str(p2p_port(n)) + "\n") - f.write("rpcport=" + str(rpc_port(n)) + "\n") - f.write("listenonion=0\n") - return datadir - -def get_datadir_path(dirname, n): - return os.path.join(dirname, "node" + str(n)) - -def get_auth_cookie(datadir, n): - user = None - password = None - if os.path.isfile(os.path.join(datadir, "bitcoin.conf")): - with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f: - for line in f: - if line.startswith("rpcuser="): - assert user is None # Ensure that there is only one rpcuser line - user = line.split("=")[1].strip("\n") - if line.startswith("rpcpassword="): - assert password is None # Ensure that there is only one rpcpassword line - password = line.split("=")[1].strip("\n") - if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): - with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: - userpass = f.read() - split_userpass = userpass.split(':') - user = split_userpass[0] - password = split_userpass[1] - if user is None or password is None: - raise ValueError("No RPC credentials") - return user, password - -def rpc_url(datadir, i, rpchost=None): - rpc_u, rpc_p = get_auth_cookie(datadir, i) - host = '127.0.0.1' - port = rpc_port(i) - if rpchost: - parts = rpchost.split(':') - if len(parts) == 2: - host, port = parts - else: - host = rpchost - return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) - -def log_filename(dirname, n_node, logname): - return os.path.join(dirname, "node" + str(n_node), "regtest", logname) - -def set_node_times(nodes, t): - for node in nodes: - node.setmocktime(t) - -def disconnect_nodes(from_connection, node_num): - for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]: - from_connection.disconnectnode(nodeid=peer_id) - - for _ in range(50): - if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []: - break - time.sleep(0.1) - else: - raise AssertionError("timed out waiting for disconnect") - -def connect_nodes(from_connection, node_num): - ip_port = "127.0.0.1:" + str(p2p_port(node_num)) - from_connection.addnode(ip_port, "onetry") - # poll until version handshake complete to avoid race conditions - # with transaction relaying - while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()): - time.sleep(0.1) - -def connect_nodes_bi(nodes, a, b): - connect_nodes(nodes[a], b) - connect_nodes(nodes[b], a) - -def find_output(node, txid, amount): - """ - Return index to output of txid with value amount - Raises exception if there is none. - """ - txdata = node.getrawtransaction(txid, 1) - for i in range(len(txdata["vout"])): - if txdata["vout"][i]["value"] == amount: - return i - raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount))) - -def gather_inputs(from_node, amount_needed, confirmations_required=1): - """ - Return a random set of unspent txouts that are enough to pay amount_needed - """ - assert(confirmations_required >= 0) - utxo = from_node.listunspent(confirmations_required) - random.shuffle(utxo) - inputs = [] - total_in = Decimal("0.00000000") - while total_in < amount_needed and len(utxo) > 0: - t = utxo.pop() - total_in += t["amount"] - inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]}) - if total_in < amount_needed: - raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in)) - return (total_in, inputs) - -def make_change(from_node, amount_in, amount_out, fee): - """ - Create change output(s), return them - """ - outputs = {} - amount = amount_out + fee - change = amount_in - amount - if change > amount * 2: - # Create an extra change output to break up big inputs - change_address = from_node.getnewaddress() - # Split change in two, being careful of rounding: - outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) - change = amount_in - amount - outputs[change_address] - if change > 0: - outputs[from_node.getnewaddress()] = change - return outputs - -def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): - """ - Create a random transaction. - Returns (txid, hex-encoded-transaction-data, fee) - """ - from_node = random.choice(nodes) - to_node = random.choice(nodes) - fee = min_fee + fee_increment * random.randint(0, fee_variants) - - (total_in, inputs) = gather_inputs(from_node, amount + fee) - outputs = make_change(from_node, total_in, amount, fee) - outputs[to_node.getnewaddress()] = float(amount) - - rawtx = from_node.createrawtransaction(inputs, outputs) - signresult = from_node.signrawtransaction(rawtx) - txid = from_node.sendrawtransaction(signresult["hex"], True) - - return (txid, signresult["hex"], fee) +# Assert functions +################## def assert_fee_amount(fee, tx_size, fee_per_kB): """Assert the fee was in range""" @@ -388,9 +132,286 @@ def assert_array_result(object_array, to_match, expected, should_not_find=False) if num_matched > 0 and should_not_find: raise AssertionError("Objects were found %s" % (str(to_match))) +# Utility functions +################### + +def check_json_precision(): + """Make sure json library being used does not lose precision converting BTC values""" + n = Decimal("20000000.00000003") + satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) + if satoshis != 2000000000000003: + raise RuntimeError("JSON encode/decode loses precision") + +def count_bytes(hex_string): + return len(bytearray.fromhex(hex_string)) + +def bytes_to_hex_str(byte_str): + return hexlify(byte_str).decode('ascii') + +def hex_str_to_bytes(hex_str): + return unhexlify(hex_str.encode('ascii')) + +def str_to_b64str(string): + return b64encode(string.encode('utf-8')).decode('ascii') + def satoshi_round(amount): return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) +# RPC/P2P connection constants and functions +############################################ + +# The maximum number of nodes a single test can spawn +MAX_NODES = 8 +# Don't assign rpc or p2p ports lower than this +PORT_MIN = 11000 +# The number of ports to "reserve" for p2p and rpc, each +PORT_RANGE = 5000 + +class PortSeed: + # Must be initialized with a unique integer for each process + n = None + +def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): + """ + Args: + url (str): URL of the RPC server to call + node_number (int): the node number (or id) that this calls to + + Kwargs: + timeout (int): HTTP timeout in seconds + + Returns: + AuthServiceProxy. convenience object for making RPC calls. + + """ + proxy_kwargs = {} + if timeout is not None: + proxy_kwargs['timeout'] = timeout + + proxy = AuthServiceProxy(url, **proxy_kwargs) + proxy.url = url # store URL on proxy for info + + coverage_logfile = coverage.get_filename( + coveragedir, node_number) if coveragedir else None + + return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) + +def p2p_port(n): + assert(n <= MAX_NODES) + return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + +def rpc_port(n): + return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + +def rpc_auth_pair(n): + return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n) + +def rpc_url(datadir, i, rpchost=None): + rpc_u, rpc_p = get_auth_cookie(datadir, i) + host = '127.0.0.1' + port = rpc_port(i) + if rpchost: + parts = rpchost.split(':') + if len(parts) == 2: + host, port = parts + else: + host = rpchost + return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) + +# Node functions +################ + +def initialize_datadir(dirname, n): + datadir = os.path.join(dirname, "node" + str(n)) + if not os.path.isdir(datadir): + os.makedirs(datadir) + with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: + f.write("regtest=1\n") + f.write("port=" + str(p2p_port(n)) + "\n") + f.write("rpcport=" + str(rpc_port(n)) + "\n") + f.write("listenonion=0\n") + return datadir + +def get_datadir_path(dirname, n): + return os.path.join(dirname, "node" + str(n)) + +def get_auth_cookie(datadir, n): + user = None + password = None + if os.path.isfile(os.path.join(datadir, "bitcoin.conf")): + with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f: + for line in f: + if line.startswith("rpcuser="): + assert user is None # Ensure that there is only one rpcuser line + user = line.split("=")[1].strip("\n") + if line.startswith("rpcpassword="): + assert password is None # Ensure that there is only one rpcpassword line + password = line.split("=")[1].strip("\n") + if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): + with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: + userpass = f.read() + split_userpass = userpass.split(':') + user = split_userpass[0] + password = split_userpass[1] + if user is None or password is None: + raise ValueError("No RPC credentials") + return user, password + +def log_filename(dirname, n_node, logname): + return os.path.join(dirname, "node" + str(n_node), "regtest", logname) + +def get_bip9_status(node, key): + info = node.getblockchaininfo() + return info['bip9_softforks'][key] + +def set_node_times(nodes, t): + for node in nodes: + node.setmocktime(t) + +def disconnect_nodes(from_connection, node_num): + for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]: + from_connection.disconnectnode(nodeid=peer_id) + + for _ in range(50): + if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []: + break + time.sleep(0.1) + else: + raise AssertionError("timed out waiting for disconnect") + +def connect_nodes(from_connection, node_num): + ip_port = "127.0.0.1:" + str(p2p_port(node_num)) + from_connection.addnode(ip_port, "onetry") + # poll until version handshake complete to avoid race conditions + # with transaction relaying + while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()): + time.sleep(0.1) + +def connect_nodes_bi(nodes, a, b): + connect_nodes(nodes[a], b) + connect_nodes(nodes[b], a) + +def sync_blocks(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same tip. + + sync_blocks needs to be called with an rpc_connections set that has least + one node already synced to the latest, stable tip, otherwise there's a + chance it might return before all nodes are stably synced. + """ + # Use getblockcount() instead of waitforblockheight() to determine the + # initial max height because the two RPCs look at different internal global + # variables (chainActive vs latestBlock) and the former gets updated + # earlier. + maxheight = max(x.getblockcount() for x in rpc_connections) + start_time = cur_time = time.time() + while cur_time <= start_time + timeout: + tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] + if all(t["height"] == maxheight for t in tips): + if all(t["hash"] == tips[0]["hash"] for t in tips): + return + raise AssertionError("Block sync failed, mismatched block hashes:{}".format( + "".join("\n {!r}".format(tip) for tip in tips))) + cur_time = time.time() + raise AssertionError("Block sync to height {} timed out:{}".format( + maxheight, "".join("\n {!r}".format(tip) for tip in tips))) + +def sync_chain(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same best block + """ + while timeout > 0: + best_hash = [x.getbestblockhash() for x in rpc_connections] + if best_hash == [best_hash[0]] * len(best_hash): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Chain sync failed: Best block hashes don't match") + +def sync_mempools(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same transactions in their memory + pools + """ + while timeout > 0: + pool = set(rpc_connections[0].getrawmempool()) + num_match = 1 + for i in range(1, len(rpc_connections)): + if set(rpc_connections[i].getrawmempool()) == pool: + num_match = num_match + 1 + if num_match == len(rpc_connections): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Mempool sync failed") + +# Transaction/Block functions +############################# + +def find_output(node, txid, amount): + """ + Return index to output of txid with value amount + Raises exception if there is none. + """ + txdata = node.getrawtransaction(txid, 1) + for i in range(len(txdata["vout"])): + if txdata["vout"][i]["value"] == amount: + return i + raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount))) + +def gather_inputs(from_node, amount_needed, confirmations_required=1): + """ + Return a random set of unspent txouts that are enough to pay amount_needed + """ + assert(confirmations_required >= 0) + utxo = from_node.listunspent(confirmations_required) + random.shuffle(utxo) + inputs = [] + total_in = Decimal("0.00000000") + while total_in < amount_needed and len(utxo) > 0: + t = utxo.pop() + total_in += t["amount"] + inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]}) + if total_in < amount_needed: + raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in)) + return (total_in, inputs) + +def make_change(from_node, amount_in, amount_out, fee): + """ + Create change output(s), return them + """ + outputs = {} + amount = amount_out + fee + change = amount_in - amount + if change > amount * 2: + # Create an extra change output to break up big inputs + change_address = from_node.getnewaddress() + # Split change in two, being careful of rounding: + outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + change = amount_in - amount - outputs[change_address] + if change > 0: + outputs[from_node.getnewaddress()] = change + return outputs + +def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): + """ + Create a random transaction. + Returns (txid, hex-encoded-transaction-data, fee) + """ + from_node = random.choice(nodes) + to_node = random.choice(nodes) + fee = min_fee + fee_increment * random.randint(0, fee_variants) + + (total_in, inputs) = gather_inputs(from_node, amount + fee) + outputs = make_change(from_node, total_in, amount, fee) + outputs[to_node.getnewaddress()] = float(amount) + + rawtx = from_node.createrawtransaction(inputs, outputs) + signresult = from_node.signrawtransaction(rawtx) + txid = from_node.sendrawtransaction(signresult["hex"], True) + + return (txid, signresult["hex"], fee) + # Helper to create at least "count" utxos # Pass in a fee that is sufficient for relay and mining new transactions. def create_confirmed_utxos(fee, node, count): @@ -480,7 +501,3 @@ def mine_large_block(node, utxos=None): fee = 100 * node.getnetworkinfo()["relayfee"] create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) node.generate(1) - -def get_bip9_status(node, key): - info = node.getblockchaininfo() - return info['bip9_softforks'][key] From 5ba83c1d434be508064cf039d73f215f0b199c2d Mon Sep 17 00:00:00 2001 From: John Newbery Date: Thu, 29 Jun 2017 15:06:33 +0100 Subject: [PATCH 6/6] [tests] fix nits. Thanks to Marco Falke. --- test/functional/test_framework/test_framework.py | 2 +- test/functional/test_framework/util.py | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 00807b022..8d698a732 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -255,8 +255,8 @@ class BitcoinTestFramework(object): except http.client.CannotSendRequest as e: self.log.exception("Unable to stop node") return_code = self.bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) - assert_equal(return_code, 0) del self.bitcoind_processes[i] + assert_equal(return_code, 0) def stop_nodes(self): """Stop multiple bitcoind test nodes""" diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 8080c8c97..3c918b48f 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -203,9 +203,6 @@ def p2p_port(n): def rpc_port(n): return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) -def rpc_auth_pair(n): - return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n) - def rpc_url(datadir, i, rpchost=None): rpc_u, rpc_p = get_auth_cookie(datadir, i) host = '127.0.0.1'