|
@@ -2,12 +2,13 @@ from setup.linux.installer import Installer
|
|
from setup.linux import setup_util
|
|
from setup.linux import setup_util
|
|
|
|
|
|
from benchmark import framework_test
|
|
from benchmark import framework_test
|
|
|
|
+from utils import Header
|
|
|
|
|
|
import os
|
|
import os
|
|
import json
|
|
import json
|
|
import subprocess
|
|
import subprocess
|
|
|
|
+import traceback
|
|
import time
|
|
import time
|
|
-import textwrap
|
|
|
|
import pprint
|
|
import pprint
|
|
import csv
|
|
import csv
|
|
import sys
|
|
import sys
|
|
@@ -104,11 +105,7 @@ class Benchmarker:
|
|
##########################
|
|
##########################
|
|
# Setup client/server
|
|
# Setup client/server
|
|
##########################
|
|
##########################
|
|
- print textwrap.dedent("""
|
|
|
|
- =====================================================
|
|
|
|
- Preparing Server, Database, and Client ...
|
|
|
|
- =====================================================
|
|
|
|
- """)
|
|
|
|
|
|
+ print Header("Preparing Server, Database, and Client ...", top='=', bottom='=')
|
|
self.__setup_server()
|
|
self.__setup_server()
|
|
self.__setup_database()
|
|
self.__setup_database()
|
|
self.__setup_client()
|
|
self.__setup_client()
|
|
@@ -120,22 +117,14 @@ class Benchmarker:
|
|
##########################
|
|
##########################
|
|
# Run tests
|
|
# Run tests
|
|
##########################
|
|
##########################
|
|
- print textwrap.dedent("""
|
|
|
|
- =====================================================
|
|
|
|
- Running Tests ...
|
|
|
|
- =====================================================
|
|
|
|
- """)
|
|
|
|
|
|
+ print Header("Running Tests...", top='=', bottom='=')
|
|
result = self.__run_tests(all_tests)
|
|
result = self.__run_tests(all_tests)
|
|
|
|
|
|
##########################
|
|
##########################
|
|
# Parse results
|
|
# Parse results
|
|
##########################
|
|
##########################
|
|
if self.mode == "benchmark":
|
|
if self.mode == "benchmark":
|
|
- print textwrap.dedent("""
|
|
|
|
- =====================================================
|
|
|
|
- Parsing Results ...
|
|
|
|
- =====================================================
|
|
|
|
- """)
|
|
|
|
|
|
+ print Header("Parsing Results ...", top='=', bottom='=')
|
|
self.__parse_results(all_tests)
|
|
self.__parse_results(all_tests)
|
|
|
|
|
|
self.__finish()
|
|
self.__finish()
|
|
@@ -528,14 +517,10 @@ class Benchmarker:
|
|
# These features do not work on Windows
|
|
# These features do not work on Windows
|
|
for test in tests:
|
|
for test in tests:
|
|
if __name__ == 'benchmark.benchmarker':
|
|
if __name__ == 'benchmark.benchmarker':
|
|
- print textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Running Test: {name} ...
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name))
|
|
|
|
|
|
+ print Header("Running Test: %s" % test.name)
|
|
with open('current_benchmark.txt', 'w') as benchmark_resume_file:
|
|
with open('current_benchmark.txt', 'w') as benchmark_resume_file:
|
|
benchmark_resume_file.write(test.name)
|
|
benchmark_resume_file.write(test.name)
|
|
- test_process = Process(target=self.__run_test, args=(test,))
|
|
|
|
|
|
+ test_process = Process(target=self.__run_test, name="Test Runner (%s)" % test.name, args=(test,))
|
|
test_process.start()
|
|
test_process.start()
|
|
test_process.join(self.run_test_timeout_seconds)
|
|
test_process.join(self.run_test_timeout_seconds)
|
|
self.__load_results() # Load intermediate result from child process
|
|
self.__load_results() # Load intermediate result from child process
|
|
@@ -580,6 +565,7 @@ class Benchmarker:
|
|
os.makedirs(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name)))
|
|
os.makedirs(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name)))
|
|
except:
|
|
except:
|
|
pass
|
|
pass
|
|
|
|
+<<<<<<< HEAD
|
|
with open(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name), 'out.txt'), 'w') as out, \
|
|
with open(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name), 'out.txt'), 'w') as out, \
|
|
open(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name), 'err.txt'), 'w') as err:
|
|
open(os.path.join(self.latest_results_directory, 'logs', "{name}".format(name=test.name), 'err.txt'), 'w') as err:
|
|
if hasattr(test, 'skip'):
|
|
if hasattr(test, 'skip'):
|
|
@@ -611,24 +597,15 @@ class Benchmarker:
|
|
if self.results['frameworks'] != None and test.name in self.results['completed']:
|
|
if self.results['frameworks'] != None and test.name in self.results['completed']:
|
|
out.write('Framework {name} found in latest saved data. Skipping.\n'.format(name=str(test.name)))
|
|
out.write('Framework {name} found in latest saved data. Skipping.\n'.format(name=str(test.name)))
|
|
return exit_with_code(1)
|
|
return exit_with_code(1)
|
|
-
|
|
|
|
out.flush()
|
|
out.flush()
|
|
|
|
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- =====================================================
|
|
|
|
- Beginning {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ out.write(Header("Beginning %s" % test.name, top='='))
|
|
out.flush()
|
|
out.flush()
|
|
|
|
|
|
##########################
|
|
##########################
|
|
# Start this test
|
|
# Start this test
|
|
##########################
|
|
##########################
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Starting {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ out.write(Header("Starting %s" % test.name))
|
|
out.flush()
|
|
out.flush()
|
|
try:
|
|
try:
|
|
if test.requires_database():
|
|
if test.requires_database():
|
|
@@ -643,11 +620,7 @@ class Benchmarker:
|
|
|
|
|
|
if self.__is_port_bound(test.port):
|
|
if self.__is_port_bound(test.port):
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " is not available before start")
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " is not available before start")
|
|
- err.write( textwrap.dedent("""
|
|
|
|
- ---------------------------------------------------------
|
|
|
|
- Error: Port {port} is not available before start {name}
|
|
|
|
- ---------------------------------------------------------
|
|
|
|
- """.format(name=test.name, port=str(test.port))) )
|
|
|
|
|
|
+ err.write(Header("Error: Port %s is not available, cannot start %s" % (test.port, test.name)))
|
|
err.flush()
|
|
err.flush()
|
|
return exit_with_code(1)
|
|
return exit_with_code(1)
|
|
|
|
|
|
@@ -656,11 +629,7 @@ class Benchmarker:
|
|
test.stop(out, err)
|
|
test.stop(out, err)
|
|
time.sleep(5)
|
|
time.sleep(5)
|
|
err.write( "ERROR: Problem starting {name}\n".format(name=test.name) )
|
|
err.write( "ERROR: Problem starting {name}\n".format(name=test.name) )
|
|
- err.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Stopped {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ err.write(Header("Stopped %s" % test.name))
|
|
err.flush()
|
|
err.flush()
|
|
self.__write_intermediate_results(test.name,"<setup.py>#start() returned non-zero")
|
|
self.__write_intermediate_results(test.name,"<setup.py>#start() returned non-zero")
|
|
return exit_with_code(1)
|
|
return exit_with_code(1)
|
|
@@ -678,11 +647,7 @@ class Benchmarker:
|
|
# Benchmark this test
|
|
# Benchmark this test
|
|
##########################
|
|
##########################
|
|
if self.mode == "benchmark":
|
|
if self.mode == "benchmark":
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Benchmarking {name} ...
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ out.write(Header("Benchmarking %s" % test.name))
|
|
out.flush()
|
|
out.flush()
|
|
test.benchmark(out, err)
|
|
test.benchmark(out, err)
|
|
out.flush()
|
|
out.flush()
|
|
@@ -691,11 +656,7 @@ class Benchmarker:
|
|
##########################
|
|
##########################
|
|
# Stop this test
|
|
# Stop this test
|
|
##########################
|
|
##########################
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Stopping {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ out.write(Header("Stopping %s" % test.name))
|
|
out.flush()
|
|
out.flush()
|
|
test.stop(out, err)
|
|
test.stop(out, err)
|
|
out.flush()
|
|
out.flush()
|
|
@@ -704,19 +665,11 @@ class Benchmarker:
|
|
|
|
|
|
if self.__is_port_bound(test.port):
|
|
if self.__is_port_bound(test.port):
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " was not released by stop")
|
|
self.__write_intermediate_results(test.name, "port " + str(test.port) + " was not released by stop")
|
|
- err.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Error: Port {port} was not released by stop {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name, port=str(test.port))) )
|
|
|
|
|
|
+ err.write(Header("Error: Port %s was not released by stop %s" % (test.port, test.name)))
|
|
err.flush()
|
|
err.flush()
|
|
return exit_with_code(1)
|
|
return exit_with_code(1)
|
|
|
|
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Stopped {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+ out.write(Header("Stopped %s" % test.name))
|
|
out.flush()
|
|
out.flush()
|
|
time.sleep(5)
|
|
time.sleep(5)
|
|
|
|
|
|
@@ -724,11 +677,8 @@ class Benchmarker:
|
|
# Save results thus far into toolset/benchmark/latest.json
|
|
# Save results thus far into toolset/benchmark/latest.json
|
|
##########################################################
|
|
##########################################################
|
|
|
|
|
|
- out.write( textwrap.dedent("""
|
|
|
|
- ----------------------------------------------------
|
|
|
|
- Saving results through {name}
|
|
|
|
- ----------------------------------------------------
|
|
|
|
- """.format(name=test.name)) )
|
|
|
|
|
|
+<<<<<<< HEAD
|
|
|
|
+ out.write(Header("Saving results through %s" % test.name))
|
|
out.flush()
|
|
out.flush()
|
|
self.__write_intermediate_results(test.name,time.strftime("%Y%m%d%H%M%S", time.localtime()))
|
|
self.__write_intermediate_results(test.name,time.strftime("%Y%m%d%H%M%S", time.localtime()))
|
|
|
|
|
|
@@ -737,25 +687,15 @@ class Benchmarker:
|
|
return exit_with_code(1)
|
|
return exit_with_code(1)
|
|
except (OSError, IOError, subprocess.CalledProcessError) as e:
|
|
except (OSError, IOError, subprocess.CalledProcessError) as e:
|
|
self.__write_intermediate_results(test.name,"<setup.py> raised an exception")
|
|
self.__write_intermediate_results(test.name,"<setup.py> raised an exception")
|
|
- err.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Subprocess Error {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- {err}
|
|
|
|
- {trace}
|
|
|
|
- """.format(name=test.name, err=e, trace=sys.exc_info()[:2])) )
|
|
|
|
|
|
+ err.write((Header("Subprocess Error %s" % test.name))
|
|
|
|
+ traceback.print_exc(file=err)
|
|
err.flush()
|
|
err.flush()
|
|
try:
|
|
try:
|
|
test.stop(out, err)
|
|
test.stop(out, err)
|
|
except (subprocess.CalledProcessError) as e:
|
|
except (subprocess.CalledProcessError) as e:
|
|
self.__write_intermediate_results(test.name,"<setup.py>#stop() raised an error")
|
|
self.__write_intermediate_results(test.name,"<setup.py>#stop() raised an error")
|
|
- err.write( textwrap.dedent("""
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Subprocess Error: Test .stop() raised exception {name}
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- {err}
|
|
|
|
- {trace}
|
|
|
|
- """.format(name=test.name, err=e, trace=sys.exc_info()[:2])) )
|
|
|
|
|
|
+ err.write(Header("Subprocess Error: Test .stop() raised exception %s" % test.name))
|
|
|
|
+ traceback.print_exc(file=err)
|
|
err.flush()
|
|
err.flush()
|
|
out.close()
|
|
out.close()
|
|
err.close()
|
|
err.close()
|
|
@@ -764,11 +704,7 @@ class Benchmarker:
|
|
# Parent process should catch it and cleanup/exit
|
|
# Parent process should catch it and cleanup/exit
|
|
except (KeyboardInterrupt) as e:
|
|
except (KeyboardInterrupt) as e:
|
|
test.stop(out, err)
|
|
test.stop(out, err)
|
|
- out.write( """
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- Cleaning up....
|
|
|
|
- -----------------------------------------------------
|
|
|
|
- """)
|
|
|
|
|
|
+ out.write(Header("Cleaning up..."))
|
|
out.flush()
|
|
out.flush()
|
|
self.__finish()
|
|
self.__finish()
|
|
sys.exit(1)
|
|
sys.exit(1)
|