|
@@ -523,7 +523,7 @@ class Benchmarker:
|
|
|
except Exception:
|
|
|
pass
|
|
|
with open(os.path.join(logDir, 'out.txt'), 'w') as out, \
|
|
|
- open(os.path.join(logDir, 'err.txt'), 'w') as err:
|
|
|
+ open(os.path.join(logDir, 'verification.txt'), 'w') as verification:
|
|
|
|
|
|
if test.os.lower() != self.os.lower() or test.database_os.lower() != self.database_os.lower():
|
|
|
out.write("OS or Database OS specified in benchmark_config.json does not match the current environment. Skipping.\n")
|
|
@@ -554,7 +554,7 @@ class Benchmarker:
|
|
|
out.flush()
|
|
|
try:
|
|
|
if test.requires_database():
|
|
|
- p = subprocess.Popen(self.database_ssh_string, stdin=subprocess.PIPE, stdout=out, stderr=err, shell=True)
|
|
|
+ p = subprocess.Popen(self.database_ssh_string, stdin=subprocess.PIPE, stdout=out, stderr=out, shell=True)
|
|
|
p.communicate("""
|
|
|
sudo restart mysql
|
|
|
sudo restart mongod
|
|
@@ -577,24 +577,23 @@ class Benchmarker:
|
|
|
|
|
|
if self.__is_port_bound(test.port):
|
|
|
# This can happen sometimes - let's try again
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
out.flush()
|
|
|
- err.flush()
|
|
|
time.sleep(15)
|
|
|
if self.__is_port_bound(test.port):
|
|
|
# We gave it our all
|
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " is not available before start")
|
|
|
- err.write(header("Error: Port %s is not available, cannot start %s" % (test.port, test.name)))
|
|
|
- err.flush()
|
|
|
+ out.write(header("Error: Port %s is not available, cannot start %s" % (test.port, test.name)))
|
|
|
+ out.flush()
|
|
|
print "Error: Unable to recover port, cannot start test"
|
|
|
return exit_with_code(1)
|
|
|
|
|
|
- result = test.start(out, err)
|
|
|
+ result = test.start(out)
|
|
|
if result != 0:
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
time.sleep(5)
|
|
|
- err.write( "ERROR: Problem starting {name}\n".format(name=test.name) )
|
|
|
- err.flush()
|
|
|
+ out.write( "ERROR: Problem starting {name}\n".format(name=test.name) )
|
|
|
+ out.flush()
|
|
|
self.__write_intermediate_results(test.name,"<setup.py>#start() returned non-zero")
|
|
|
return exit_with_code(1)
|
|
|
|
|
@@ -605,9 +604,8 @@ class Benchmarker:
|
|
|
# Verify URLs
|
|
|
##########################
|
|
|
logging.info("Verifying framework URLs")
|
|
|
- passed_verify = test.verify_urls(out, err)
|
|
|
- out.flush()
|
|
|
- err.flush()
|
|
|
+ passed_verify = test.verify_urls(verification)
|
|
|
+ verification.flush()
|
|
|
|
|
|
##########################
|
|
|
# Benchmark this test
|
|
@@ -616,31 +614,28 @@ class Benchmarker:
|
|
|
logging.info("Benchmarking")
|
|
|
out.write(header("Benchmarking %s" % test.name))
|
|
|
out.flush()
|
|
|
- test.benchmark(out, err)
|
|
|
+ test.benchmark(out)
|
|
|
out.flush()
|
|
|
- err.flush()
|
|
|
|
|
|
##########################
|
|
|
# Stop this test
|
|
|
##########################
|
|
|
out.write(header("Stopping %s" % test.name))
|
|
|
out.flush()
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
out.flush()
|
|
|
- err.flush()
|
|
|
time.sleep(15)
|
|
|
|
|
|
if self.__is_port_bound(test.port):
|
|
|
# This can happen sometimes - let's try again
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
out.flush()
|
|
|
- err.flush()
|
|
|
time.sleep(15)
|
|
|
if self.__is_port_bound(test.port):
|
|
|
# We gave it our all
|
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " was not released by stop")
|
|
|
- err.write(header("Error: Port %s was not released by stop %s" % (test.port, test.name)))
|
|
|
- err.flush()
|
|
|
+ out.write(header("Error: Port %s was not released by stop %s" % (test.port, test.name)))
|
|
|
+ out.flush()
|
|
|
return exit_with_code(1)
|
|
|
|
|
|
out.write(header("Stopped %s" % test.name))
|
|
@@ -660,30 +655,28 @@ class Benchmarker:
|
|
|
return exit_with_code(1)
|
|
|
except (OSError, IOError, subprocess.CalledProcessError) as e:
|
|
|
self.__write_intermediate_results(test.name,"<setup.py> raised an exception")
|
|
|
- err.write(header("Subprocess Error %s" % test.name))
|
|
|
- traceback.print_exc(file=err)
|
|
|
- err.flush()
|
|
|
+ out.write(header("Subprocess Error %s" % test.name))
|
|
|
+ traceback.print_exc(file=out)
|
|
|
+ out.flush()
|
|
|
try:
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
except (subprocess.CalledProcessError) as e:
|
|
|
self.__write_intermediate_results(test.name,"<setup.py>#stop() raised an error")
|
|
|
- err.write(header("Subprocess Error: Test .stop() raised exception %s" % test.name))
|
|
|
- traceback.print_exc(file=err)
|
|
|
- err.flush()
|
|
|
+ out.write(header("Subprocess Error: Test .stop() raised exception %s" % test.name))
|
|
|
+ traceback.print_exc(file=out)
|
|
|
+ out.flush()
|
|
|
out.close()
|
|
|
- err.close()
|
|
|
return exit_with_code(1)
|
|
|
# TODO - subprocess should not catch this exception!
|
|
|
# Parent process should catch it and cleanup/exit
|
|
|
except (KeyboardInterrupt) as e:
|
|
|
- self.__stop_test(out, err)
|
|
|
+ self.__stop_test(out)
|
|
|
out.write(header("Cleaning up..."))
|
|
|
out.flush()
|
|
|
self.__finish()
|
|
|
sys.exit(1)
|
|
|
|
|
|
out.close()
|
|
|
- err.close()
|
|
|
return exit_with_code(0)
|
|
|
|
|
|
############################################################
|
|
@@ -694,9 +687,9 @@ class Benchmarker:
|
|
|
# __stop_test(benchmarker)
|
|
|
# Stops all running tests
|
|
|
############################################################
|
|
|
- def __stop_test(self, out, err):
|
|
|
+ def __stop_test(self, out):
|
|
|
try:
|
|
|
- subprocess.check_call('sudo killall -s 9 -u %s' % self.runner_user, shell=True, stderr=err, stdout=out)
|
|
|
+ subprocess.check_call('sudo killall -s 9 -u %s' % self.runner_user, shell=True, stderr=out, stdout=out)
|
|
|
retcode = 0
|
|
|
except Exception:
|
|
|
retcode = 1
|