test_all.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630
  1. #! /usr/bin/env python3
  2. """Brute-force test script: test libpqxx against many compilers etc.
  3. This script makes no changes in the source tree; all builds happen in
  4. temporary directories.
  5. To make this possible, you may need to run "make distclean" in the
  6. source tree. The configure script will refuse to configure otherwise.
  7. """
  8. # Without this, pocketlint does not yet understand the print function.
  9. from __future__ import print_function
  10. from abc import (
  11. ABCMeta,
  12. abstractmethod,
  13. )
  14. from argparse import ArgumentParser
  15. from contextlib import contextmanager
  16. from datetime import datetime
  17. from functools import partial
  18. import json
  19. from multiprocessing import (
  20. JoinableQueue,
  21. Process,
  22. Queue,
  23. )
  24. from multiprocessing.pool import (
  25. Pool,
  26. )
  27. from os import (
  28. cpu_count,
  29. getcwd,
  30. )
  31. import os.path
  32. from queue import Empty
  33. from shutil import rmtree
  34. from subprocess import (
  35. CalledProcessError,
  36. check_call,
  37. check_output,
  38. DEVNULL,
  39. )
  40. from sys import (
  41. stderr,
  42. stdout,
  43. )
  44. from tempfile import mkdtemp
  45. from textwrap import dedent
  46. CPUS = cpu_count()
  47. GCC_VERSIONS = list(range(8, 14))
  48. GCC = ['g++-%d' % ver for ver in GCC_VERSIONS]
  49. CLANG_VERSIONS = list(range(7, 15))
  50. CLANG = ['clang++-6.0'] + ['clang++-%d' % ver for ver in CLANG_VERSIONS]
  51. CXX = GCC + CLANG
  52. STDLIB = (
  53. '',
  54. '-stdlib=libc++',
  55. )
  56. OPT = ('-O0', '-O3')
  57. LINK = {
  58. 'static': ['--enable-static', '--disable-shared'],
  59. 'dynamic': ['--disable-static', '--enable-shared'],
  60. }
  61. DEBUG = {
  62. 'plain': [],
  63. 'audit': ['--enable-audit'],
  64. 'maintainer': ['--enable-maintainer-mode'],
  65. 'full': ['--enable-audit', '--enable-maintainer-mode'],
  66. }
  67. # CMake "generators." Maps a value for cmake's -G option to a command line to
  68. # run.
  69. #
  70. # I prefer Ninja if available, because it's fast. But hey, the default will
  71. # work.
  72. #
  73. # Maps the name of the generator (as used with cmake's -G option) to the
  74. # actual command line needed to do the build.
  75. CMAKE_GENERATORS = {
  76. 'Ninja': ['ninja'],
  77. 'Unix Makefiles': ['make', '-j%d' % CPUS],
  78. }
  79. class Fail(Exception):
  80. """A known, well-handled exception. Doesn't need a traceback."""
  81. class Skip(Exception):
  82. """"We're not doing this build. It's not an error though."""
  83. def run(cmd, output, cwd=None):
  84. """Run a command, write output to file-like object."""
  85. command_line = ' '.join(cmd)
  86. output.write("%s\n\n" % command_line)
  87. check_call(cmd, stdout=output, stderr=output, cwd=cwd)
  88. def report(output, message):
  89. """Report a message to output, and standard output."""
  90. print(message, flush=True)
  91. output.write('\n\n')
  92. output.write(message)
  93. output.write('\n')
  94. def file_contains(path, text):
  95. """Does the file at path contain text?"""
  96. with open(path) as stream:
  97. for line in stream:
  98. if text in line:
  99. return True
  100. return False
  101. @contextmanager
  102. def tmp_dir():
  103. """Create a temporary directory, and clean it up again."""
  104. tmp = mkdtemp()
  105. try:
  106. yield tmp
  107. finally:
  108. rmtree(tmp)
  109. def write_check_code(work_dir):
  110. """Write a simple C++ program so we can tesst whether we can compile it.
  111. Returns the file's full path.
  112. """
  113. path = os.path.join(work_dir, "check.cxx")
  114. with open(path, 'w') as source:
  115. source.write(dedent("""\
  116. #include <iostream>
  117. int main()
  118. {
  119. std::cout << "Hello world." << std::endl;
  120. }
  121. """))
  122. return path
  123. def check_compiler(work_dir, cxx, stdlib, check, verbose=False):
  124. """Is the given compiler combo available?"""
  125. err_file = os.path.join(work_dir, 'stderr.log')
  126. if verbose:
  127. err_output = open(err_file, 'w')
  128. else:
  129. err_output = DEVNULL
  130. try:
  131. command = [cxx, check]
  132. if stdlib != '':
  133. command.append(stdlib)
  134. check_call(command, cwd=work_dir, stderr=err_output)
  135. except (OSError, CalledProcessError):
  136. if verbose:
  137. with open(err_file) as errors:
  138. stdout.write(errors.read())
  139. print("Can't build with '%s %s'. Skipping." % (cxx, stdlib))
  140. return False
  141. else:
  142. return True
  143. # TODO: Use Pool.
  144. def check_compilers(compilers, stdlibs, verbose=False):
  145. """Check which compiler configurations are viable."""
  146. with tmp_dir() as work_dir:
  147. check = write_check_code(work_dir)
  148. return [
  149. (cxx, stdlib)
  150. for stdlib in stdlibs
  151. for cxx in compilers
  152. if check_compiler(
  153. work_dir, cxx, stdlib, check=check, verbose=verbose)
  154. ]
  155. def find_cmake_command():
  156. """Figure out a CMake generator we can use, or None."""
  157. try:
  158. caps = check_output(['cmake', '-E', 'capabilities'])
  159. except FileNotFoundError:
  160. return None
  161. names = {generator['name'] for generator in json.loads(caps)['generators']}
  162. for gen in CMAKE_GENERATORS.keys():
  163. if gen in names:
  164. return gen
  165. return None
  166. class Config:
  167. """Configuration for a build.
  168. These classes must be suitable for pickling, so we can send its objects to
  169. worker processes.
  170. """
  171. __metaclass__ = ABCMeta
  172. @abstractmethod
  173. def name(self):
  174. """Return an identifier for this build configuration."""
  175. def make_log_name(self):
  176. """Compose log file name for this build."""
  177. return "build-%s.out" % self.name()
  178. class Build:
  179. """A pending or ondoing build, in its own directory.
  180. Each step returns True for Success, or False for failure.
  181. These classes must be suitable for pickling, so we can send its objects to
  182. worker processes.
  183. """
  184. def __init__(self, logs_dir, config=None):
  185. self.config = config
  186. self.log = os.path.join(logs_dir, config.make_log_name())
  187. # Start a fresh log file.
  188. with open(self.log, 'w') as log:
  189. log.write("Starting %s.\n" % datetime.utcnow())
  190. self.work_dir = mkdtemp()
  191. def clean_up(self):
  192. """Delete the build tree."""
  193. rmtree(self.work_dir)
  194. @abstractmethod
  195. def configure(self, log):
  196. """Prepare for a build."""
  197. @abstractmethod
  198. def build(self, log):
  199. """Build the code, including the tests. Don't run tests though."""
  200. def test(self, log):
  201. """Run tests."""
  202. run(
  203. [os.path.join(os.path.curdir, 'test', 'runner')], log,
  204. cwd=self.work_dir)
  205. def logging(self, function):
  206. """Call function, pass open write handle for `self.log`."""
  207. # TODO: Should probably be a decorator.
  208. with open(self.log, 'a') as log:
  209. try:
  210. function(log)
  211. except Exception as error:
  212. log.write("%s\n" % error)
  213. raise
  214. def do_configure(self):
  215. """Call `configure`, writing output to `self.log`."""
  216. self.logging(self.configure)
  217. def do_build(self):
  218. """Call `build`, writing output to `self.log`."""
  219. self.logging(self.build)
  220. def do_test(self):
  221. """Call `test`, writing output to `self.log`."""
  222. self.logging(self.test)
  223. class AutotoolsConfig(Config):
  224. """A combination of build options for the "configure" script."""
  225. def __init__(self, cxx, opt, stdlib, link, link_opts, debug, debug_opts):
  226. self.cxx = cxx
  227. self.opt = opt
  228. self.stdlib = stdlib
  229. self.link = link
  230. self.link_opts = link_opts
  231. self.debug = debug
  232. self.debug_opts = debug_opts
  233. def name(self):
  234. return '_'.join([
  235. self.cxx, self.opt, self.stdlib, self.link, self.debug])
  236. class AutotoolsBuild(Build):
  237. """Build using the "configure" script."""
  238. __metaclass__ = ABCMeta
  239. def configure(self, log):
  240. configure = [
  241. os.path.join(getcwd(), "configure"),
  242. "CXX=%s" % self.config.cxx,
  243. ]
  244. if self.config.stdlib == '':
  245. configure += [
  246. "CXXFLAGS=%s" % self.config.opt,
  247. ]
  248. else:
  249. configure += [
  250. "CXXFLAGS=%s %s" % (self.config.opt, self.config.stdlib),
  251. "LDFLAGS=%s" % self.config.stdlib,
  252. ]
  253. configure += [
  254. "--disable-documentation",
  255. ] + self.config.link_opts + self.config.debug_opts
  256. run(configure, log, cwd=self.work_dir)
  257. def build(self, log):
  258. run(['make', '-j%d' % CPUS], log, cwd=self.work_dir)
  259. # Passing "TESTS=" like this will suppress the actual running of
  260. # the tests. We run them in the "test" stage.
  261. run(['make', '-j%d' % CPUS, 'check', 'TESTS='], log, cwd=self.work_dir)
  262. class CMakeConfig(Config):
  263. """Configuration for a CMake build."""
  264. def __init__(self, generator):
  265. self.generator = generator
  266. self.builder = CMAKE_GENERATORS[generator]
  267. def name(self):
  268. return "cmake"
  269. class CMakeBuild(Build):
  270. """Build using CMake.
  271. Ignores the config for now.
  272. """
  273. __metaclass__ = ABCMeta
  274. def configure(self, log):
  275. source_dir = getcwd()
  276. generator = self.config.generator
  277. run(
  278. ['cmake', '-G', generator, source_dir], output=log,
  279. cwd=self.work_dir)
  280. def build(self, log):
  281. run(self.config.builder, log, cwd=self.work_dir)
  282. def parse_args():
  283. """Parse command-line arguments."""
  284. parser = ArgumentParser(description=__doc__)
  285. parser.add_argument('--verbose', '-v', action='store_true')
  286. parser.add_argument(
  287. '--compilers', '-c', default=','.join(CXX),
  288. help="Compilers, separated by commas. Default is %(default)s.")
  289. parser.add_argument(
  290. '--optimize', '-O', default=','.join(OPT),
  291. help=(
  292. "Alternative optimisation options, separated by commas. "
  293. "Default is %(default)s."))
  294. parser.add_argument(
  295. '--stdlibs', '-L', default=','.join(STDLIB),
  296. help=(
  297. "Comma-separated options for choosing standard library. "
  298. "Defaults to %(default)s."))
  299. parser.add_argument(
  300. '--logs', '-l', default='.', metavar='DIRECTORY',
  301. help="Write build logs to DIRECTORY.")
  302. parser.add_argument(
  303. '--jobs', '-j', default=CPUS, metavar='CPUS',
  304. help=(
  305. "When running 'make', run up to CPUS concurrent processes. "
  306. "Defaults to %(default)s."))
  307. parser.add_argument(
  308. '--minimal', '-m', action='store_true',
  309. help="Make it as short a run as possible. For testing this script.")
  310. return parser.parse_args()
  311. def soft_get(queue, block=True):
  312. """Get an item off `queue`, or `None` if the queue is empty."""
  313. try:
  314. return queue.get(block)
  315. except Empty:
  316. return None
  317. def read_queue(queue, block=True):
  318. """Read entries off `queue`, terminating when it gets a `None`.
  319. Also terminates when the queue is empty.
  320. """
  321. entry = soft_get(queue, block)
  322. while entry is not None:
  323. yield entry
  324. entry = soft_get(queue, block)
  325. def service_builds(in_queue, fail_queue, out_queue):
  326. """Worker process for "build" stage: process one job at a time.
  327. Sends successful builds to `out_queue`, and failed builds to `fail_queue`.
  328. Terminates when it receives a `None`, at which point it will send a `None`
  329. into `out_queue` in turn.
  330. """
  331. for build in read_queue(in_queue):
  332. try:
  333. build.do_build()
  334. except Exception as error:
  335. fail_queue.put((build, "%s" % error))
  336. else:
  337. out_queue.put(build)
  338. in_queue.task_done()
  339. # Mark the end of the queue.
  340. out_queue.put(None)
  341. def service_tests(in_queue, fail_queue, out_queue):
  342. """Worker process for "test" stage: test one build at a time.
  343. Sends successful builds to `out_queue`, and failed builds to `fail_queue`.
  344. Terminates when it receives a final `None`. Does not send out a final
  345. `None` of its own.
  346. """
  347. for build in read_queue(in_queue):
  348. try:
  349. build.do_test()
  350. except Exception as error:
  351. fail_queue.put((build, "%s" % error))
  352. else:
  353. out_queue.put(build)
  354. in_queue.task_done()
  355. def report_failures(queue, message):
  356. """Report failures from a failure queue. Return total number."""
  357. failures = 0
  358. for build, error in read_queue(queue, block=False):
  359. print("%s: %s - %s" % (message, build.config.name(), error))
  360. failures += 1
  361. return failures
  362. def count_entries(queue):
  363. """Get and discard all entries from `queue`, return the total count."""
  364. total = 0
  365. for _ in read_queue(queue, block=False):
  366. total += 1
  367. return total
  368. def gather_builds(args):
  369. """Produce the list of builds we want to perform."""
  370. if args.verbose:
  371. print("\nChecking available compilers.")
  372. compiler_candidates = args.compilers.split(',')
  373. compilers = check_compilers(
  374. compiler_candidates, args.stdlibs.split(','),
  375. verbose=args.verbose)
  376. if list(compilers) == []:
  377. raise Fail(
  378. "Did not find any viable compilers. Tried: %s."
  379. % ', '.join(compiler_candidates))
  380. opt_levels = args.optimize.split(',')
  381. link_types = LINK.items()
  382. debug_mixes = DEBUG.items()
  383. if args.minimal:
  384. compilers = compilers[:1]
  385. opt_levels = opt_levels[:1]
  386. link_types = list(link_types)[:1]
  387. debug_mixes = list(debug_mixes)[:1]
  388. builds = [
  389. AutotoolsBuild(
  390. args.logs,
  391. AutotoolsConfig(
  392. opt=opt, link=link, link_opts=link_opts, debug=debug,
  393. debug_opts=debug_opts, cxx=cxx, stdlib=stdlib))
  394. for opt in sorted(opt_levels)
  395. for link, link_opts in sorted(link_types)
  396. for debug, debug_opts in sorted(debug_mixes)
  397. for cxx, stdlib in compilers
  398. ]
  399. cmake = find_cmake_command()
  400. if cmake is not None:
  401. builds.append(CMakeBuild(args.logs, CMakeConfig(cmake)))
  402. return builds
  403. def enqueue(queue, build, *args):
  404. """Put `build` on `queue`.
  405. Ignores additional arguments, so that it can be used as a clalback for
  406. `Pool`.
  407. We do this instead of a lambda in order to get the closure right. We want
  408. the build for the current iteration, not the last one that was executed
  409. before the lambda runs.
  410. """
  411. queue.put(build)
  412. def enqueue_error(queue, build, error):
  413. """Put the pair of `build` and `error` on `queue`."""
  414. queue.put((build, error))
  415. def main(args):
  416. """Do it all."""
  417. if not os.path.isdir(args.logs):
  418. raise Fail("Logs location '%s' is not a directory." % args.logs)
  419. builds = gather_builds(args)
  420. if args.verbose:
  421. print("Lined up %d builds." % len(builds))
  422. # The "configure" step is single-threaded. We can run many at the same
  423. # time, even when we're also running a "build" step at the same time.
  424. # This means we may run a lot more processes than we have CPUs, but there's
  425. # no law against that. There's also I/O time to be covered.
  426. configure_pool = Pool()
  427. # Builds which have failed the "configure" stage, with their errors. This
  428. # queue must never stall, so that we can let results pile up here while the
  429. # work continues.
  430. configure_fails = Queue(len(builds))
  431. # Waiting list for the "build" stage. It contains Build objects,
  432. # terminated by a final None to signify that there are no more builds to be
  433. # done.
  434. build_queue = JoinableQueue(10)
  435. # Builds that have failed the "build" stage.
  436. build_fails = Queue(len(builds))
  437. # Waiting list for the "test" stage. It contains Build objects, terminated
  438. # by a final None.
  439. test_queue = JoinableQueue(10)
  440. # The "build" step tries to utilise all CPUs, and it may use a fair bit of
  441. # memory. Run only one of these at a time, in a single worker process.
  442. build_worker = Process(
  443. target=service_builds, args=(build_queue, build_fails, test_queue))
  444. build_worker.start()
  445. # Builds that have failed the "test" stage.
  446. test_fails = Queue(len(builds))
  447. # Completed builds. This must never stall.
  448. done_queue = JoinableQueue(len(builds))
  449. # The "test" step can not run concurrently (yet). So, run tests serially
  450. # in a single worker process. It takes its jobs directly from the "build"
  451. # worker.
  452. test_worker = Process(
  453. target=service_tests, args=(test_queue, test_fails, done_queue))
  454. test_worker.start()
  455. # Feed all builds into the "configure" pool. Each build which passes this
  456. # stage goes into the "build" queue.
  457. for build in builds:
  458. configure_pool.apply_async(
  459. build.do_configure, callback=partial(enqueue, build_queue, build),
  460. error_callback=partial(enqueue_error, configure_fails, build))
  461. if args.verbose:
  462. print("All jobs are underway.")
  463. configure_pool.close()
  464. configure_pool.join()
  465. # TODO: Async reporting for faster feedback.
  466. configure_fail_count = report_failures(configure_fails, "CONFIGURE FAIL")
  467. if args.verbose:
  468. print("Configure stage done.")
  469. # Mark the end of the build queue for the build worker.
  470. build_queue.put(None)
  471. build_worker.join()
  472. # TODO: Async reporting for faster feedback.
  473. build_fail_count = report_failures(build_fails, "BUILD FAIL")
  474. if args.verbose:
  475. print("Build step done.")
  476. # Mark the end of the test queue for the test worker.
  477. test_queue.put(None)
  478. test_worker.join()
  479. # TODO: Async reporting for faster feedback.
  480. # TODO: Collate failures into meaningful output, e.g. "shared library fails."
  481. test_fail_count = report_failures(test_fails, "TEST FAIL")
  482. if args.verbose:
  483. print("Test step done.")
  484. # All done. Clean up.
  485. for build in builds:
  486. build.clean_up()
  487. ok_count = count_entries(done_queue)
  488. if ok_count == len(builds):
  489. print("All tests OK.")
  490. else:
  491. print(
  492. "Failures during configure: %d - build: %d - test: %d. OK: %d."
  493. % (
  494. configure_fail_count,
  495. build_fail_count,
  496. test_fail_count,
  497. ok_count,
  498. ))
  499. if __name__ == '__main__':
  500. try:
  501. exit(main(parse_args()))
  502. except Fail as failure:
  503. stderr.write("%s\n" % failure)
  504. exit(2)