Browse Source

[tests] fix flake8 warnings in test_framework.py and util.py

0.15
John Newbery 8 years ago
parent
commit
f1fe5368f1
  1. 37
      test/functional/test_framework/test_framework.py
  2. 166
      test/functional/test_framework/util.py

37
test/functional/test_framework/test_framework.py

@ -14,6 +14,7 @@ import subprocess
import sys import sys
import tempfile import tempfile
import time import time
import traceback
from .util import ( from .util import (
PortSeed, PortSeed,
@ -77,7 +78,7 @@ class BitcoinTestFramework(object):
pass pass
def setup_chain(self): def setup_chain(self):
self.log.info("Initializing test directory "+self.options.tmpdir) self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain: if self.setup_clean_chain:
self._initialize_chain_clean(self.options.tmpdir, self.num_nodes) self._initialize_chain_clean(self.options.tmpdir, self.num_nodes)
else: else:
@ -111,9 +112,9 @@ class BitcoinTestFramework(object):
help="Leave bitcoinds and test.* datadir on exit or error") help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true", parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop bitcoinds after the test execution") help="Don't stop bitcoinds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"), parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"),
help="Source directory containing bitcoind/bitcoin-cli (default: %default)") help="Source directory containing bitcoind/bitcoin-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../cache"), parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs") help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs") parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO", parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
@ -134,7 +135,7 @@ class BitcoinTestFramework(object):
PortSeed.n = self.options.port_seed PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH'] os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH']
check_json_precision() check_json_precision()
@ -188,7 +189,7 @@ class BitcoinTestFramework(object):
for fn in filenames: for fn in filenames:
try: try:
with open(fn, 'r') as f: with open(fn, 'r') as f:
print("From" , fn, ":") print("From", fn, ":")
print("".join(deque(f, MAX_LINES_TO_PRINT))) print("".join(deque(f, MAX_LINES_TO_PRINT)))
except OSError: except OSError:
print("Opening file %s failed." % fn) print("Opening file %s failed." % fn)
@ -257,7 +258,7 @@ class BitcoinTestFramework(object):
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper() ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll) ch.setLevel(ll)
# Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted) # Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
formatter.converter = time.gmtime formatter.converter = time.gmtime
fh.setFormatter(formatter) fh.setFormatter(formatter)
ch.setFormatter(formatter) ch.setFormatter(formatter)
@ -354,18 +355,13 @@ class BitcoinTestFramework(object):
for i in range(num_nodes): for i in range(num_nodes):
initialize_datadir(test_dir, i) initialize_datadir(test_dir, i)
# Test framework for doing p2p comparison testing, which sets up some bitcoind
# binaries:
# 1 binary: test binary
# 2 binaries: 1 test binary, 1 ref binary
# n>2 binaries: 1 test binary, n-1 ref binaries
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
class ComparisonTestFramework(BitcoinTestFramework): class ComparisonTestFramework(BitcoinTestFramework):
"""Test framework for doing p2p comparison testing
Sets up some bitcoind binaries:
- 1 binary: test binary
- 2 binaries: 1 test binary, 1 ref binary
- n>2 binaries: 1 test binary, n-1 ref binaries"""
def __init__(self): def __init__(self):
super().__init__() super().__init__()
@ -387,4 +383,9 @@ class ComparisonTestFramework(BitcoinTestFramework):
self.nodes = self.start_nodes( self.nodes = self.start_nodes(
self.num_nodes, self.options.tmpdir, extra_args, self.num_nodes, self.options.tmpdir, extra_args,
binary=[self.options.testbinary] + binary=[self.options.testbinary] +
[self.options.refbinary]*(self.num_nodes-1)) [self.options.refbinary] * (self.num_nodes - 1))
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message

166
test/functional/test_framework/util.py

@ -4,20 +4,19 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php. # file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing.""" """Helpful routines for regression testing."""
import os
from binascii import hexlify, unhexlify
from base64 import b64encode from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN from decimal import Decimal, ROUND_DOWN
import json import errno
import http.client import http.client
import json
import logging
import os
import random import random
import re
import subprocess import subprocess
import tempfile import tempfile
import time import time
import re
import errno
import logging
from . import coverage from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException from .authproxy import AuthServiceProxy, JSONRPCException
@ -40,17 +39,17 @@ class PortSeed:
# Must be initialized with a unique integer for each process # Must be initialized with a unique integer for each process
n = None n = None
#Set Mocktime default to OFF. # Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the # MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached # cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME # version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD. # then the mempools will not sync due to IBD.
MOCKTIME = 0 MOCKTIME = 0
def enable_mocktime(): def enable_mocktime():
#For backwared compatibility of the python scripts # For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME # with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60) # to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME global MOCKTIME
MOCKTIME = 1388534400 + (201 * 10 * 60) MOCKTIME = 1388534400 + (201 * 10 * 60)
@ -103,7 +102,7 @@ def rpc_port(n):
def check_json_precision(): def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values""" """Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003") n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003: if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision") raise RuntimeError("JSON encode/decode loses precision")
@ -150,7 +149,7 @@ def sync_chain(rpc_connections, *, wait=1, timeout=60):
""" """
while timeout > 0: while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections] best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]]*len(best_hash): if best_hash == [best_hash[0]] * len(best_hash):
return return
time.sleep(wait) time.sleep(wait)
timeout -= wait timeout -= wait
@ -166,7 +165,7 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60):
num_match = 1 num_match = 1
for i in range(1, len(rpc_connections)): for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool: if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1 num_match = num_match + 1
if num_match == len(rpc_connections): if num_match == len(rpc_connections):
return return
time.sleep(wait) time.sleep(wait)
@ -176,18 +175,18 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60):
bitcoind_processes = {} bitcoind_processes = {}
def initialize_datadir(dirname, n): def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n)) datadir = os.path.join(dirname, "node" + str(n))
if not os.path.isdir(datadir): if not os.path.isdir(datadir):
os.makedirs(datadir) os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n") f.write("regtest=1\n")
f.write("port="+str(p2p_port(n))+"\n") f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport="+str(rpc_port(n))+"\n") f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("listenonion=0\n") f.write("listenonion=0\n")
return datadir return datadir
def get_datadir_path(dirname, n): def get_datadir_path(dirname, n):
return os.path.join(dirname, "node"+str(n)) return os.path.join(dirname, "node" + str(n))
def get_auth_cookie(datadir, n): def get_auth_cookie(datadir, n):
user = None user = None
@ -196,10 +195,10 @@ def get_auth_cookie(datadir, n):
with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f: with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f:
for line in f: for line in f:
if line.startswith("rpcuser="): if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n") user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="): if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n") password = line.split("=")[1].strip("\n")
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
@ -234,15 +233,15 @@ def wait_for_bitcoind_start(process, datadir, i, rpchost=None):
try: try:
# Check if .cookie file to be created # Check if .cookie file to be created
rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i) rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i)
blocks = rpc.getblockcount() rpc.getblockcount()
break # break out of loop on success break # break out of loop on success
except IOError as e: except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open? if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error raise # unknown IO error
except JSONRPCException as e: # Initialization phase except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup? if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception raise # unknown JSON RPC exception
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
if "No RPC credentials" not in str(e): if "No RPC credentials" not in str(e):
raise raise
time.sleep(0.25) time.sleep(0.25)
@ -255,11 +254,12 @@ def _start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary
This function should only be called from within test_framework, not by individual test scripts.""" This function should only be called from within test_framework, not by individual test scripts."""
datadir = os.path.join(dirname, "node"+str(i)) datadir = os.path.join(dirname, "node" + str(i))
if binary is None: if binary is None:
binary = os.getenv("BITCOIND", "bitcoind") binary = os.getenv("BITCOIND", "bitcoind")
args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i] args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(get_mocktime()), "-uacomment=testnode%d" % i]
if extra_args is not None: args.extend(extra_args) if extra_args is not None:
args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up") logger.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost) wait_for_bitcoind_start(bitcoind_processes[i], datadir, i, rpchost)
@ -277,7 +277,7 @@ def assert_start_raises_init_error(i, dirname, extra_args=None, expected_msg=Non
node = _start_node(i, dirname, extra_args, stderr=log_stderr) node = _start_node(i, dirname, extra_args, stderr=log_stderr)
_stop_node(node, i) _stop_node(node, i)
except Exception as e: except Exception as e:
assert 'bitcoind exited' in str(e) #node must have shutdown assert 'bitcoind exited' in str(e) # node must have shutdown
if expected_msg is not None: if expected_msg is not None:
log_stderr.seek(0) log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8') stderr = log_stderr.read().decode('utf-8')
@ -295,21 +295,24 @@ def _start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=Non
This function should only be called from within test_framework, not by individual test scripts.""" This function should only be called from within test_framework, not by individual test scripts."""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ] if extra_args is None:
if binary is None: binary = [ None for _ in range(num_nodes) ] extra_args = [None] * num_nodes
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes) assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes) assert_equal(len(binary), num_nodes)
rpcs = [] rpcs = []
try: try:
for i in range(num_nodes): for i in range(num_nodes):
rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) rpcs.append(_start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
except: # If one node failed to start, stop the others except:
# If one node failed to start, stop the others
_stop_nodes(rpcs) _stop_nodes(rpcs)
raise raise
return rpcs return rpcs
def log_filename(dirname, n_node, logname): def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname) return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
def _stop_node(node, i): def _stop_node(node, i):
"""Stop a bitcoind test node """Stop a bitcoind test node
@ -332,7 +335,7 @@ def _stop_nodes(nodes):
for i, node in enumerate(nodes): for i, node in enumerate(nodes):
_stop_node(node, i) _stop_node(node, i)
assert not bitcoind_processes.values() # All connections must be gone now assert not bitcoind_processes.values() # All connections must be gone now
def set_node_times(nodes, t): def set_node_times(nodes, t):
for node in nodes: for node in nodes:
@ -350,7 +353,7 @@ def disconnect_nodes(from_connection, node_num):
raise AssertionError("timed out waiting for disconnect") raise AssertionError("timed out waiting for disconnect")
def connect_nodes(from_connection, node_num): def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num)) ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry") from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions # poll until version handshake complete to avoid race conditions
# with transaction relaying # with transaction relaying
@ -370,14 +373,13 @@ def find_output(node, txid, amount):
for i in range(len(txdata["vout"])): for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount: if txdata["vout"][i]["value"] == amount:
return i return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount))) raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1): def gather_inputs(from_node, amount_needed, confirmations_required=1):
""" """
Return a random set of unspent txouts that are enough to pay amount_needed Return a random set of unspent txouts that are enough to pay amount_needed
""" """
assert(confirmations_required >=0) assert(confirmations_required >= 0)
utxo = from_node.listunspent(confirmations_required) utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo) random.shuffle(utxo)
inputs = [] inputs = []
@ -385,9 +387,9 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1):
while total_in < amount_needed and len(utxo) > 0: while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop() t = utxo.pop()
total_in += t["amount"] total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } ) inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed: if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in)) raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs) return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee): def make_change(from_node, amount_in, amount_out, fee):
@ -395,13 +397,13 @@ def make_change(from_node, amount_in, amount_out, fee):
Create change output(s), return them Create change output(s), return them
""" """
outputs = {} outputs = {}
amount = amount_out+fee amount = amount_out + fee
change = amount_in - amount change = amount_in - amount
if change > amount*2: if change > amount * 2:
# Create an extra change output to break up big inputs # Create an extra change output to break up big inputs
change_address = from_node.getnewaddress() change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding: # Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address] change = amount_in - amount - outputs[change_address]
if change > 0: if change > 0:
outputs[from_node.getnewaddress()] = change outputs[from_node.getnewaddress()] = change
@ -414,9 +416,9 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
""" """
from_node = random.choice(nodes) from_node = random.choice(nodes)
to_node = random.choice(nodes) to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants) fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee) (total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee) outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount) outputs[to_node.getnewaddress()] = float(amount)
@ -430,10 +432,10 @@ def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range""" """Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000 target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee: if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee))) raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off # allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000: if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee))) raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args): def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args): if thing1 != thing2 or any(thing1 != arg for arg in args):
@ -441,11 +443,11 @@ def assert_equal(thing1, thing2, *args):
def assert_greater_than(thing1, thing2): def assert_greater_than(thing1, thing2):
if thing1 <= thing2: if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2))) raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2): def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2: if thing1 < thing2:
raise AssertionError("%s < %s"%(str(thing1),str(thing2))) raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds): def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds) assert_raises_message(exc, None, fun, *args, **kwds)
@ -455,9 +457,9 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
fun(*args, **kwds) fun(*args, **kwds)
except exc as e: except exc as e:
if message is not None and message not in e.error['message']: if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:"+e.error['message']) raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e: except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__) raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else: else:
raise AssertionError("No exception raised") raise AssertionError("No exception raised")
@ -484,9 +486,9 @@ def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
if (code is not None) and (code != e.error["code"]): if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']): if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:"+e.error['message']) raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e: except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__) raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else: else:
raise AssertionError("No exception raised") raise AssertionError("No exception raised")
@ -507,7 +509,7 @@ def assert_is_hash_string(string, length=64):
raise AssertionError( raise AssertionError(
"String %r contains invalid characters for a hash." % string) "String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False): def assert_array_result(object_array, to_match, expected, should_not_find=False):
""" """
Pass in array of JSON objects, a dictionary with key/value pairs Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value to match against, and another dictionary with expected key/value
@ -515,26 +517,26 @@ def assert_array_result(object_array, to_match, expected, should_not_find = Fals
If the should_not_find flag is true, to_match should not be found If the should_not_find flag is true, to_match should not be found
in object_array in object_array
""" """
if should_not_find == True: if should_not_find:
assert_equal(expected, { }) assert_equal(expected, {})
num_matched = 0 num_matched = 0
for item in object_array: for item in object_array:
all_match = True all_match = True
for key,value in to_match.items(): for key, value in to_match.items():
if item[key] != value: if item[key] != value:
all_match = False all_match = False
if not all_match: if not all_match:
continue continue
elif should_not_find == True: elif should_not_find:
num_matched = num_matched+1 num_matched = num_matched + 1
for key,value in expected.items(): for key, value in expected.items():
if item[key] != value: if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched+1 num_matched = num_matched + 1
if num_matched == 0 and should_not_find != True: if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s"%(str(to_match))) raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find == True: if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s"%(str(to_match))) raise AssertionError("Objects were found %s" % (str(to_match)))
def satoshi_round(amount): def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
@ -542,7 +544,7 @@ def satoshi_round(amount):
# Helper to create at least "count" utxos # Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions. # Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count): def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101) node.generate(int(0.5 * count) + 101)
utxos = node.listunspent() utxos = node.listunspent()
iterations = count - len(utxos) iterations = count - len(utxos)
addr1 = node.getnewaddress() addr1 = node.getnewaddress()
@ -552,14 +554,14 @@ def create_confirmed_utxos(fee, node, count):
for i in range(iterations): for i in range(iterations):
t = utxos.pop() t = utxos.pop()
inputs = [] inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {} outputs = {}
send_value = t['amount'] - fee send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2) outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value/2) outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs) raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"] signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx) node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0): while (node.getmempoolinfo()['size'] > 0):
node.generate(1) node.generate(1)
@ -574,8 +576,8 @@ def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block) # So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey # create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range (512): for i in range(512):
script_pubkey = script_pubkey + "01" script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81" txouts = "81"
@ -589,8 +591,8 @@ def gen_return_txouts():
return txouts return txouts
def create_tx(node, coinbase, to_address, amount): def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}] inputs = [{"txid": coinbase, "vout": 0}]
outputs = { to_address : amount } outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs) rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx) signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True) assert_equal(signresult["complete"], True)
@ -603,7 +605,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
txids = [] txids = []
for _ in range(num): for _ in range(num):
t = utxos.pop() t = utxos.pop()
inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}] inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {} outputs = {}
change = t['amount'] - fee change = t['amount'] - fee
outputs[addr] = satoshi_round(change) outputs[addr] = satoshi_round(change)

Loading…
Cancel
Save