|
|
|
@ -19,6 +19,147 @@ from .authproxy import AuthServiceProxy, JSONRPCException
@@ -19,6 +19,147 @@ from .authproxy import AuthServiceProxy, JSONRPCException
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger("TestFramework.utils") |
|
|
|
|
|
|
|
|
|
# Assert functions |
|
|
|
|
################## |
|
|
|
|
|
|
|
|
|
def assert_fee_amount(fee, tx_size, fee_per_kB): |
|
|
|
|
"""Assert the fee was in range""" |
|
|
|
|
target_fee = tx_size * fee_per_kB / 1000 |
|
|
|
|
if fee < target_fee: |
|
|
|
|
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee))) |
|
|
|
|
# allow the wallet's estimation to be at most 2 bytes off |
|
|
|
|
if fee > (tx_size + 2) * fee_per_kB / 1000: |
|
|
|
|
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee))) |
|
|
|
|
|
|
|
|
|
def assert_equal(thing1, thing2, *args): |
|
|
|
|
if thing1 != thing2 or any(thing1 != arg for arg in args): |
|
|
|
|
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) |
|
|
|
|
|
|
|
|
|
def assert_greater_than(thing1, thing2): |
|
|
|
|
if thing1 <= thing2: |
|
|
|
|
raise AssertionError("%s <= %s" % (str(thing1), str(thing2))) |
|
|
|
|
|
|
|
|
|
def assert_greater_than_or_equal(thing1, thing2): |
|
|
|
|
if thing1 < thing2: |
|
|
|
|
raise AssertionError("%s < %s" % (str(thing1), str(thing2))) |
|
|
|
|
|
|
|
|
|
def assert_raises(exc, fun, *args, **kwds): |
|
|
|
|
assert_raises_message(exc, None, fun, *args, **kwds) |
|
|
|
|
|
|
|
|
|
def assert_raises_message(exc, message, fun, *args, **kwds): |
|
|
|
|
try: |
|
|
|
|
fun(*args, **kwds) |
|
|
|
|
except exc as e: |
|
|
|
|
if message is not None and message not in e.error['message']: |
|
|
|
|
raise AssertionError("Expected substring not found:" + e.error['message']) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError("Unexpected exception raised: " + type(e).__name__) |
|
|
|
|
else: |
|
|
|
|
raise AssertionError("No exception raised") |
|
|
|
|
|
|
|
|
|
def assert_raises_jsonrpc(code, message, fun, *args, **kwds): |
|
|
|
|
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised. |
|
|
|
|
|
|
|
|
|
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException |
|
|
|
|
and verifies that the error code and message are as expected. Throws AssertionError if |
|
|
|
|
no JSONRPCException was returned or if the error code/message are not as expected. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
code (int), optional: the error code returned by the RPC call (defined |
|
|
|
|
in src/rpc/protocol.h). Set to None if checking the error code is not required. |
|
|
|
|
message (string), optional: [a substring of] the error string returned by the |
|
|
|
|
RPC call. Set to None if checking the error string is not required |
|
|
|
|
fun (function): the function to call. This should be the name of an RPC. |
|
|
|
|
args*: positional arguments for the function. |
|
|
|
|
kwds**: named arguments for the function. |
|
|
|
|
""" |
|
|
|
|
try: |
|
|
|
|
fun(*args, **kwds) |
|
|
|
|
except JSONRPCException as e: |
|
|
|
|
# JSONRPCException was thrown as expected. Check the code and message values are correct. |
|
|
|
|
if (code is not None) and (code != e.error["code"]): |
|
|
|
|
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) |
|
|
|
|
if (message is not None) and (message not in e.error['message']): |
|
|
|
|
raise AssertionError("Expected substring not found:" + e.error['message']) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError("Unexpected exception raised: " + type(e).__name__) |
|
|
|
|
else: |
|
|
|
|
raise AssertionError("No exception raised") |
|
|
|
|
|
|
|
|
|
def assert_is_hex_string(string): |
|
|
|
|
try: |
|
|
|
|
int(string, 16) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError( |
|
|
|
|
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) |
|
|
|
|
|
|
|
|
|
def assert_is_hash_string(string, length=64): |
|
|
|
|
if not isinstance(string, str): |
|
|
|
|
raise AssertionError("Expected a string, got type %r" % type(string)) |
|
|
|
|
elif length and len(string) != length: |
|
|
|
|
raise AssertionError( |
|
|
|
|
"String of length %d expected; got %d" % (length, len(string))) |
|
|
|
|
elif not re.match('[abcdef0-9]+$', string): |
|
|
|
|
raise AssertionError( |
|
|
|
|
"String %r contains invalid characters for a hash." % string) |
|
|
|
|
|
|
|
|
|
def assert_array_result(object_array, to_match, expected, should_not_find=False): |
|
|
|
|
""" |
|
|
|
|
Pass in array of JSON objects, a dictionary with key/value pairs |
|
|
|
|
to match against, and another dictionary with expected key/value |
|
|
|
|
pairs. |
|
|
|
|
If the should_not_find flag is true, to_match should not be found |
|
|
|
|
in object_array |
|
|
|
|
""" |
|
|
|
|
if should_not_find: |
|
|
|
|
assert_equal(expected, {}) |
|
|
|
|
num_matched = 0 |
|
|
|
|
for item in object_array: |
|
|
|
|
all_match = True |
|
|
|
|
for key, value in to_match.items(): |
|
|
|
|
if item[key] != value: |
|
|
|
|
all_match = False |
|
|
|
|
if not all_match: |
|
|
|
|
continue |
|
|
|
|
elif should_not_find: |
|
|
|
|
num_matched = num_matched + 1 |
|
|
|
|
for key, value in expected.items(): |
|
|
|
|
if item[key] != value: |
|
|
|
|
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value))) |
|
|
|
|
num_matched = num_matched + 1 |
|
|
|
|
if num_matched == 0 and not should_not_find: |
|
|
|
|
raise AssertionError("No objects matched %s" % (str(to_match))) |
|
|
|
|
if num_matched > 0 and should_not_find: |
|
|
|
|
raise AssertionError("Objects were found %s" % (str(to_match))) |
|
|
|
|
|
|
|
|
|
# Utility functions |
|
|
|
|
################### |
|
|
|
|
|
|
|
|
|
def check_json_precision(): |
|
|
|
|
"""Make sure json library being used does not lose precision converting BTC values""" |
|
|
|
|
n = Decimal("20000000.00000003") |
|
|
|
|
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) |
|
|
|
|
if satoshis != 2000000000000003: |
|
|
|
|
raise RuntimeError("JSON encode/decode loses precision") |
|
|
|
|
|
|
|
|
|
def count_bytes(hex_string): |
|
|
|
|
return len(bytearray.fromhex(hex_string)) |
|
|
|
|
|
|
|
|
|
def bytes_to_hex_str(byte_str): |
|
|
|
|
return hexlify(byte_str).decode('ascii') |
|
|
|
|
|
|
|
|
|
def hex_str_to_bytes(hex_str): |
|
|
|
|
return unhexlify(hex_str.encode('ascii')) |
|
|
|
|
|
|
|
|
|
def str_to_b64str(string): |
|
|
|
|
return b64encode(string.encode('utf-8')).decode('ascii') |
|
|
|
|
|
|
|
|
|
def satoshi_round(amount): |
|
|
|
|
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) |
|
|
|
|
|
|
|
|
|
# RPC/P2P connection constants and functions |
|
|
|
|
############################################ |
|
|
|
|
|
|
|
|
|
# The maximum number of nodes a single test can spawn |
|
|
|
|
MAX_NODES = 8 |
|
|
|
|
# Don't assign rpc or p2p ports lower than this |
|
|
|
@ -55,7 +196,6 @@ def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
@@ -55,7 +196,6 @@ def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
|
|
|
|
|
|
|
|
|
|
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def p2p_port(n): |
|
|
|
|
assert(n <= MAX_NODES) |
|
|
|
|
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) |
|
|
|
@ -63,78 +203,23 @@ def p2p_port(n):
@@ -63,78 +203,23 @@ def p2p_port(n):
|
|
|
|
|
def rpc_port(n): |
|
|
|
|
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) |
|
|
|
|
|
|
|
|
|
def check_json_precision(): |
|
|
|
|
"""Make sure json library being used does not lose precision converting BTC values""" |
|
|
|
|
n = Decimal("20000000.00000003") |
|
|
|
|
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8) |
|
|
|
|
if satoshis != 2000000000000003: |
|
|
|
|
raise RuntimeError("JSON encode/decode loses precision") |
|
|
|
|
|
|
|
|
|
def count_bytes(hex_string): |
|
|
|
|
return len(bytearray.fromhex(hex_string)) |
|
|
|
|
|
|
|
|
|
def bytes_to_hex_str(byte_str): |
|
|
|
|
return hexlify(byte_str).decode('ascii') |
|
|
|
|
|
|
|
|
|
def hex_str_to_bytes(hex_str): |
|
|
|
|
return unhexlify(hex_str.encode('ascii')) |
|
|
|
|
def rpc_auth_pair(n): |
|
|
|
|
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n) |
|
|
|
|
|
|
|
|
|
def str_to_b64str(string): |
|
|
|
|
return b64encode(string.encode('utf-8')).decode('ascii') |
|
|
|
|
|
|
|
|
|
def sync_blocks(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same tip. |
|
|
|
|
|
|
|
|
|
sync_blocks needs to be called with an rpc_connections set that has least |
|
|
|
|
one node already synced to the latest, stable tip, otherwise there's a |
|
|
|
|
chance it might return before all nodes are stably synced. |
|
|
|
|
""" |
|
|
|
|
# Use getblockcount() instead of waitforblockheight() to determine the |
|
|
|
|
# initial max height because the two RPCs look at different internal global |
|
|
|
|
# variables (chainActive vs latestBlock) and the former gets updated |
|
|
|
|
# earlier. |
|
|
|
|
maxheight = max(x.getblockcount() for x in rpc_connections) |
|
|
|
|
start_time = cur_time = time.time() |
|
|
|
|
while cur_time <= start_time + timeout: |
|
|
|
|
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] |
|
|
|
|
if all(t["height"] == maxheight for t in tips): |
|
|
|
|
if all(t["hash"] == tips[0]["hash"] for t in tips): |
|
|
|
|
return |
|
|
|
|
raise AssertionError("Block sync failed, mismatched block hashes:{}".format( |
|
|
|
|
"".join("\n {!r}".format(tip) for tip in tips))) |
|
|
|
|
cur_time = time.time() |
|
|
|
|
raise AssertionError("Block sync to height {} timed out:{}".format( |
|
|
|
|
maxheight, "".join("\n {!r}".format(tip) for tip in tips))) |
|
|
|
|
def rpc_url(datadir, i, rpchost=None): |
|
|
|
|
rpc_u, rpc_p = get_auth_cookie(datadir, i) |
|
|
|
|
host = '127.0.0.1' |
|
|
|
|
port = rpc_port(i) |
|
|
|
|
if rpchost: |
|
|
|
|
parts = rpchost.split(':') |
|
|
|
|
if len(parts) == 2: |
|
|
|
|
host, port = parts |
|
|
|
|
else: |
|
|
|
|
host = rpchost |
|
|
|
|
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) |
|
|
|
|
|
|
|
|
|
def sync_chain(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same best block |
|
|
|
|
""" |
|
|
|
|
while timeout > 0: |
|
|
|
|
best_hash = [x.getbestblockhash() for x in rpc_connections] |
|
|
|
|
if best_hash == [best_hash[0]] * len(best_hash): |
|
|
|
|
return |
|
|
|
|
time.sleep(wait) |
|
|
|
|
timeout -= wait |
|
|
|
|
raise AssertionError("Chain sync failed: Best block hashes don't match") |
|
|
|
|
|
|
|
|
|
def sync_mempools(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same transactions in their memory |
|
|
|
|
pools |
|
|
|
|
""" |
|
|
|
|
while timeout > 0: |
|
|
|
|
pool = set(rpc_connections[0].getrawmempool()) |
|
|
|
|
num_match = 1 |
|
|
|
|
for i in range(1, len(rpc_connections)): |
|
|
|
|
if set(rpc_connections[i].getrawmempool()) == pool: |
|
|
|
|
num_match = num_match + 1 |
|
|
|
|
if num_match == len(rpc_connections): |
|
|
|
|
return |
|
|
|
|
time.sleep(wait) |
|
|
|
|
timeout -= wait |
|
|
|
|
raise AssertionError("Mempool sync failed") |
|
|
|
|
# Node functions |
|
|
|
|
################ |
|
|
|
|
|
|
|
|
|
def initialize_datadir(dirname, n): |
|
|
|
|
datadir = os.path.join(dirname, "node" + str(n)) |
|
|
|
@ -172,21 +257,13 @@ def get_auth_cookie(datadir, n):
@@ -172,21 +257,13 @@ def get_auth_cookie(datadir, n):
|
|
|
|
|
raise ValueError("No RPC credentials") |
|
|
|
|
return user, password |
|
|
|
|
|
|
|
|
|
def rpc_url(datadir, i, rpchost=None): |
|
|
|
|
rpc_u, rpc_p = get_auth_cookie(datadir, i) |
|
|
|
|
host = '127.0.0.1' |
|
|
|
|
port = rpc_port(i) |
|
|
|
|
if rpchost: |
|
|
|
|
parts = rpchost.split(':') |
|
|
|
|
if len(parts) == 2: |
|
|
|
|
host, port = parts |
|
|
|
|
else: |
|
|
|
|
host = rpchost |
|
|
|
|
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) |
|
|
|
|
|
|
|
|
|
def log_filename(dirname, n_node, logname): |
|
|
|
|
return os.path.join(dirname, "node" + str(n_node), "regtest", logname) |
|
|
|
|
|
|
|
|
|
def get_bip9_status(node, key): |
|
|
|
|
info = node.getblockchaininfo() |
|
|
|
|
return info['bip9_softforks'][key] |
|
|
|
|
|
|
|
|
|
def set_node_times(nodes, t): |
|
|
|
|
for node in nodes: |
|
|
|
|
node.setmocktime(t) |
|
|
|
@ -214,6 +291,63 @@ def connect_nodes_bi(nodes, a, b):
@@ -214,6 +291,63 @@ def connect_nodes_bi(nodes, a, b):
|
|
|
|
|
connect_nodes(nodes[a], b) |
|
|
|
|
connect_nodes(nodes[b], a) |
|
|
|
|
|
|
|
|
|
def sync_blocks(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same tip. |
|
|
|
|
|
|
|
|
|
sync_blocks needs to be called with an rpc_connections set that has least |
|
|
|
|
one node already synced to the latest, stable tip, otherwise there's a |
|
|
|
|
chance it might return before all nodes are stably synced. |
|
|
|
|
""" |
|
|
|
|
# Use getblockcount() instead of waitforblockheight() to determine the |
|
|
|
|
# initial max height because the two RPCs look at different internal global |
|
|
|
|
# variables (chainActive vs latestBlock) and the former gets updated |
|
|
|
|
# earlier. |
|
|
|
|
maxheight = max(x.getblockcount() for x in rpc_connections) |
|
|
|
|
start_time = cur_time = time.time() |
|
|
|
|
while cur_time <= start_time + timeout: |
|
|
|
|
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] |
|
|
|
|
if all(t["height"] == maxheight for t in tips): |
|
|
|
|
if all(t["hash"] == tips[0]["hash"] for t in tips): |
|
|
|
|
return |
|
|
|
|
raise AssertionError("Block sync failed, mismatched block hashes:{}".format( |
|
|
|
|
"".join("\n {!r}".format(tip) for tip in tips))) |
|
|
|
|
cur_time = time.time() |
|
|
|
|
raise AssertionError("Block sync to height {} timed out:{}".format( |
|
|
|
|
maxheight, "".join("\n {!r}".format(tip) for tip in tips))) |
|
|
|
|
|
|
|
|
|
def sync_chain(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same best block |
|
|
|
|
""" |
|
|
|
|
while timeout > 0: |
|
|
|
|
best_hash = [x.getbestblockhash() for x in rpc_connections] |
|
|
|
|
if best_hash == [best_hash[0]] * len(best_hash): |
|
|
|
|
return |
|
|
|
|
time.sleep(wait) |
|
|
|
|
timeout -= wait |
|
|
|
|
raise AssertionError("Chain sync failed: Best block hashes don't match") |
|
|
|
|
|
|
|
|
|
def sync_mempools(rpc_connections, *, wait=1, timeout=60): |
|
|
|
|
""" |
|
|
|
|
Wait until everybody has the same transactions in their memory |
|
|
|
|
pools |
|
|
|
|
""" |
|
|
|
|
while timeout > 0: |
|
|
|
|
pool = set(rpc_connections[0].getrawmempool()) |
|
|
|
|
num_match = 1 |
|
|
|
|
for i in range(1, len(rpc_connections)): |
|
|
|
|
if set(rpc_connections[i].getrawmempool()) == pool: |
|
|
|
|
num_match = num_match + 1 |
|
|
|
|
if num_match == len(rpc_connections): |
|
|
|
|
return |
|
|
|
|
time.sleep(wait) |
|
|
|
|
timeout -= wait |
|
|
|
|
raise AssertionError("Mempool sync failed") |
|
|
|
|
|
|
|
|
|
# Transaction/Block functions |
|
|
|
|
############################# |
|
|
|
|
|
|
|
|
|
def find_output(node, txid, amount): |
|
|
|
|
""" |
|
|
|
|
Return index to output of txid with value amount |
|
|
|
@ -278,119 +412,6 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
@@ -278,119 +412,6 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
|
|
|
|
|
|
|
|
|
|
return (txid, signresult["hex"], fee) |
|
|
|
|
|
|
|
|
|
def assert_fee_amount(fee, tx_size, fee_per_kB): |
|
|
|
|
"""Assert the fee was in range""" |
|
|
|
|
target_fee = tx_size * fee_per_kB / 1000 |
|
|
|
|
if fee < target_fee: |
|
|
|
|
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee))) |
|
|
|
|
# allow the wallet's estimation to be at most 2 bytes off |
|
|
|
|
if fee > (tx_size + 2) * fee_per_kB / 1000: |
|
|
|
|
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee))) |
|
|
|
|
|
|
|
|
|
def assert_equal(thing1, thing2, *args): |
|
|
|
|
if thing1 != thing2 or any(thing1 != arg for arg in args): |
|
|
|
|
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) |
|
|
|
|
|
|
|
|
|
def assert_greater_than(thing1, thing2): |
|
|
|
|
if thing1 <= thing2: |
|
|
|
|
raise AssertionError("%s <= %s" % (str(thing1), str(thing2))) |
|
|
|
|
|
|
|
|
|
def assert_greater_than_or_equal(thing1, thing2): |
|
|
|
|
if thing1 < thing2: |
|
|
|
|
raise AssertionError("%s < %s" % (str(thing1), str(thing2))) |
|
|
|
|
|
|
|
|
|
def assert_raises(exc, fun, *args, **kwds): |
|
|
|
|
assert_raises_message(exc, None, fun, *args, **kwds) |
|
|
|
|
|
|
|
|
|
def assert_raises_message(exc, message, fun, *args, **kwds): |
|
|
|
|
try: |
|
|
|
|
fun(*args, **kwds) |
|
|
|
|
except exc as e: |
|
|
|
|
if message is not None and message not in e.error['message']: |
|
|
|
|
raise AssertionError("Expected substring not found:" + e.error['message']) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError("Unexpected exception raised: " + type(e).__name__) |
|
|
|
|
else: |
|
|
|
|
raise AssertionError("No exception raised") |
|
|
|
|
|
|
|
|
|
def assert_raises_jsonrpc(code, message, fun, *args, **kwds): |
|
|
|
|
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised. |
|
|
|
|
|
|
|
|
|
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException |
|
|
|
|
and verifies that the error code and message are as expected. Throws AssertionError if |
|
|
|
|
no JSONRPCException was returned or if the error code/message are not as expected. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
code (int), optional: the error code returned by the RPC call (defined |
|
|
|
|
in src/rpc/protocol.h). Set to None if checking the error code is not required. |
|
|
|
|
message (string), optional: [a substring of] the error string returned by the |
|
|
|
|
RPC call. Set to None if checking the error string is not required |
|
|
|
|
fun (function): the function to call. This should be the name of an RPC. |
|
|
|
|
args*: positional arguments for the function. |
|
|
|
|
kwds**: named arguments for the function. |
|
|
|
|
""" |
|
|
|
|
try: |
|
|
|
|
fun(*args, **kwds) |
|
|
|
|
except JSONRPCException as e: |
|
|
|
|
# JSONRPCException was thrown as expected. Check the code and message values are correct. |
|
|
|
|
if (code is not None) and (code != e.error["code"]): |
|
|
|
|
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) |
|
|
|
|
if (message is not None) and (message not in e.error['message']): |
|
|
|
|
raise AssertionError("Expected substring not found:" + e.error['message']) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError("Unexpected exception raised: " + type(e).__name__) |
|
|
|
|
else: |
|
|
|
|
raise AssertionError("No exception raised") |
|
|
|
|
|
|
|
|
|
def assert_is_hex_string(string): |
|
|
|
|
try: |
|
|
|
|
int(string, 16) |
|
|
|
|
except Exception as e: |
|
|
|
|
raise AssertionError( |
|
|
|
|
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) |
|
|
|
|
|
|
|
|
|
def assert_is_hash_string(string, length=64): |
|
|
|
|
if not isinstance(string, str): |
|
|
|
|
raise AssertionError("Expected a string, got type %r" % type(string)) |
|
|
|
|
elif length and len(string) != length: |
|
|
|
|
raise AssertionError( |
|
|
|
|
"String of length %d expected; got %d" % (length, len(string))) |
|
|
|
|
elif not re.match('[abcdef0-9]+$', string): |
|
|
|
|
raise AssertionError( |
|
|
|
|
"String %r contains invalid characters for a hash." % string) |
|
|
|
|
|
|
|
|
|
def assert_array_result(object_array, to_match, expected, should_not_find=False): |
|
|
|
|
""" |
|
|
|
|
Pass in array of JSON objects, a dictionary with key/value pairs |
|
|
|
|
to match against, and another dictionary with expected key/value |
|
|
|
|
pairs. |
|
|
|
|
If the should_not_find flag is true, to_match should not be found |
|
|
|
|
in object_array |
|
|
|
|
""" |
|
|
|
|
if should_not_find: |
|
|
|
|
assert_equal(expected, {}) |
|
|
|
|
num_matched = 0 |
|
|
|
|
for item in object_array: |
|
|
|
|
all_match = True |
|
|
|
|
for key, value in to_match.items(): |
|
|
|
|
if item[key] != value: |
|
|
|
|
all_match = False |
|
|
|
|
if not all_match: |
|
|
|
|
continue |
|
|
|
|
elif should_not_find: |
|
|
|
|
num_matched = num_matched + 1 |
|
|
|
|
for key, value in expected.items(): |
|
|
|
|
if item[key] != value: |
|
|
|
|
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value))) |
|
|
|
|
num_matched = num_matched + 1 |
|
|
|
|
if num_matched == 0 and not should_not_find: |
|
|
|
|
raise AssertionError("No objects matched %s" % (str(to_match))) |
|
|
|
|
if num_matched > 0 and should_not_find: |
|
|
|
|
raise AssertionError("Objects were found %s" % (str(to_match))) |
|
|
|
|
|
|
|
|
|
def satoshi_round(amount): |
|
|
|
|
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) |
|
|
|
|
|
|
|
|
|
# Helper to create at least "count" utxos |
|
|
|
|
# Pass in a fee that is sufficient for relay and mining new transactions. |
|
|
|
|
def create_confirmed_utxos(fee, node, count): |
|
|
|
@ -480,7 +501,3 @@ def mine_large_block(node, utxos=None):
@@ -480,7 +501,3 @@ def mine_large_block(node, utxos=None):
|
|
|
|
|
fee = 100 * node.getnetworkinfo()["relayfee"] |
|
|
|
|
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) |
|
|
|
|
node.generate(1) |
|
|
|
|
|
|
|
|
|
def get_bip9_status(node, key): |
|
|
|
|
info = node.getblockchaininfo() |
|
|
|
|
return info['bip9_softforks'][key] |
|
|
|
|