@ -16,6 +16,7 @@ For a description of arguments recognized by test scripts, see
@@ -16,6 +16,7 @@ For a description of arguments recognized by test scripts, see
import argparse
import configparser
import datetime
import os
import time
import shutil
@ -170,6 +171,7 @@ def main():
@@ -170,6 +171,7 @@ def main():
parser . add_argument ( ' --jobs ' , ' -j ' , type = int , default = 4 , help = ' how many test scripts to run in parallel. Default=4. ' )
parser . add_argument ( ' --keepcache ' , ' -k ' , action = ' store_true ' , help = ' the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun. ' )
parser . add_argument ( ' --quiet ' , ' -q ' , action = ' store_true ' , help = ' only print results summary and failure logs ' )
parser . add_argument ( ' --tmpdirprefix ' , ' -t ' , default = tempfile . gettempdir ( ) , help = " Root directory for datadirs " )
args , unknown_args = parser . parse_known_args ( )
# Create a set to store arguments and create the passon string
@ -187,6 +189,12 @@ def main():
@@ -187,6 +189,12 @@ def main():
logging_level = logging . INFO if args . quiet else logging . DEBUG
logging . basicConfig ( format = ' %(message)s ' , level = logging_level )
# Create base test directory
tmpdir = " %s /bitcoin_test_runner_ %s " % ( args . tmpdirprefix , datetime . datetime . now ( ) . strftime ( " % Y % m %d _ % H % M % S " ) )
os . makedirs ( tmpdir )
logging . debug ( " Temporary test directory at %s " % tmpdir )
enable_wallet = config [ " components " ] . getboolean ( " ENABLE_WALLET " )
enable_utils = config [ " components " ] . getboolean ( " ENABLE_UTILS " )
enable_bitcoind = config [ " components " ] . getboolean ( " ENABLE_BITCOIND " )
@ -239,9 +247,9 @@ def main():
@@ -239,9 +247,9 @@ def main():
if not args . keepcache :
shutil . rmtree ( " %s /test/cache " % config [ " environment " ] [ " BUILDDIR " ] , ignore_errors = True )
run_tests ( test_list , config [ " environment " ] [ " SRCDIR " ] , config [ " environment " ] [ " BUILDDIR " ] , config [ " environment " ] [ " EXEEXT " ] , args . jobs , args . coverage , passon_args )
run_tests ( test_list , config [ " environment " ] [ " SRCDIR " ] , config [ " environment " ] [ " BUILDDIR " ] , config [ " environment " ] [ " EXEEXT " ] , tmpdir , args . jobs , args . coverage , passon_args )
def run_tests ( test_list , src_dir , build_dir , exeext , jobs = 1 , enable_coverage = False , args = [ ] ) :
def run_tests ( test_list , src_dir , build_dir , exeext , tmpdir , jobs = 1 , enable_coverage = False , args = [ ] ) :
# Warn if bitcoind is already running (unix only)
try :
if subprocess . check_output ( [ " pidof " , " bitcoind " ] ) is not None :
@ -272,10 +280,10 @@ def run_tests(test_list, src_dir, build_dir, exeext, jobs=1, enable_coverage=Fal
@@ -272,10 +280,10 @@ def run_tests(test_list, src_dir, build_dir, exeext, jobs=1, enable_coverage=Fal
if len ( test_list ) > 1 and jobs > 1 :
# Populate cache
subprocess . check_output ( [ tests_dir + ' create_cache.py ' ] + flags )
subprocess . check_output ( [ tests_dir + ' create_cache.py ' ] + flags + [ " --tmpdir= %s /cache " % tmpdir ] )
#Run Tests
job_queue = TestHandler ( jobs , tests_dir , test_list , flags )
job_queue = TestHandler ( jobs , tests_dir , tmpdir , t est_list , flags )
time0 = time . time ( )
test_results = [ ]
@ -302,6 +310,10 @@ def run_tests(test_list, src_dir, build_dir, exeext, jobs=1, enable_coverage=Fal
@@ -302,6 +310,10 @@ def run_tests(test_list, src_dir, build_dir, exeext, jobs=1, enable_coverage=Fal
logging . debug ( " Cleaning up coverage data " )
coverage . cleanup ( )
# Clear up the temp directory if all subdirectories are gone
if not os . listdir ( tmpdir ) :
os . rmdir ( tmpdir )
all_passed = all ( map ( lambda test_result : test_result . was_successful , test_results ) )
sys . exit ( not all_passed )
@ -329,10 +341,11 @@ class TestHandler:
@@ -329,10 +341,11 @@ class TestHandler:
Trigger the testscrips passed in via the list .
"""
def __init__ ( self , num_tests_parallel , tests_dir , test_list = None , flags = None ) :
def __init__ ( self , num_tests_parallel , tests_dir , tmpdir , t est_list = None , flags = None ) :
assert ( num_tests_parallel > = 1 )
self . num_jobs = num_tests_parallel
self . tests_dir = tests_dir
self . tmpdir = tmpdir
self . test_list = test_list
self . flags = flags
self . num_running = 0
@ -347,13 +360,15 @@ class TestHandler:
@@ -347,13 +360,15 @@ class TestHandler:
# Add tests
self . num_running + = 1
t = self . test_list . pop ( 0 )
port_seed = [ " --portseed= {} " . format ( len ( self . test_list ) + self . portseed_offset ) ]
portseed = len ( self . test_list ) + self . portseed_offset
portseed_arg = [ " --portseed= {} " . format ( portseed ) ]
log_stdout = tempfile . SpooledTemporaryFile ( max_size = 2 * * 16 )
log_stderr = tempfile . SpooledTemporaryFile ( max_size = 2 * * 16 )
test_argv = t . split ( )
tmpdir = [ " --tmpdir= %s / %s _ %s " % ( self . tmpdir , re . sub ( " .py$ " , " " , test_argv [ 0 ] ) , portseed ) ]
self . jobs . append ( ( t ,
time . time ( ) ,
subprocess . Popen ( [ self . tests_dir + test_argv [ 0 ] ] + test_argv [ 1 : ] + self . flags + port_ seed ,
subprocess . Popen ( [ self . tests_dir + test_argv [ 0 ] ] + test_argv [ 1 : ] + self . flags + portseed_arg + tmpdir ,
universal_newlines = True ,
stdout = log_stdout ,
stderr = log_stderr ) ,