docker_helper.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375
  1. import os
  2. import socket
  3. import fnmatch
  4. import subprocess
  5. import multiprocessing
  6. import json
  7. import docker
  8. import time
  9. import re
  10. import traceback
  11. from threading import Thread
  12. from colorama import Fore, Style
  13. from toolset.utils.output_helper import log, FNULL
  14. from toolset.utils.metadata_helper import gather_tests
  15. from toolset.utils.ordered_set import OrderedSet
  16. from toolset.utils.database_helper import test_database
  17. def clean(config):
  18. '''
  19. Cleans all the docker images from the system
  20. '''
  21. # Clean the app server images
  22. subprocess.check_call(["docker", "image", "prune", "-f"])
  23. docker_ids = subprocess.check_output(["docker", "images",
  24. "-q"]).splitlines()
  25. for docker_id in docker_ids:
  26. subprocess.check_call(["docker", "image", "rmi", "-f", docker_id])
  27. subprocess.check_call(["docker", "system", "prune", "-a", "-f"])
  28. # Clean the database server images
  29. command = list(config.database_ssh_command)
  30. command.extend(["docker", "image", "prune", "-f"])
  31. subprocess.check_call(command)
  32. command = list(config.database_ssh_command)
  33. command.extend(["docker", "images", "-q"])
  34. docker_ids = subprocess.check_output(command).splitlines()
  35. for docker_id in docker_ids:
  36. command = list(config.database_ssh_command)
  37. command.extend(["docker", "image", "rmi", "-f", docker_id])
  38. subprocess.check_call(command)
  39. command = list(config.database_ssh_command)
  40. command.extend(["docker", "system", "prune", "-a", "-f"])
  41. subprocess.check_call(command)
  42. def build(benchmarker_config, test_names, build_log_dir=os.devnull):
  43. '''
  44. Builds the dependency chain as well as the test implementation docker images
  45. for the given tests.
  46. '''
  47. tests = gather_tests(test_names)
  48. for test in tests:
  49. log_prefix = "%s: " % test.name
  50. docker_buildargs = {
  51. 'MAX_CONCURRENCY': str(max(benchmarker_config.concurrency_levels)),
  52. 'TFB_DATABASE': str(benchmarker_config.database_host)
  53. }
  54. test_docker_files = ["%s.dockerfile" % test.name]
  55. if test.docker_files is not None:
  56. if type(test.docker_files) is list:
  57. test_docker_files.extend(test.docker_files)
  58. else:
  59. raise Exception(
  60. "docker_files in benchmark_config.json must be an array")
  61. for test_docker_file in test_docker_files:
  62. deps = OrderedSet(
  63. list(
  64. reversed(
  65. __gather_dependencies(
  66. os.path.join(test.directory, test_docker_file)))))
  67. docker_dir = os.path.join(
  68. os.getenv('FWROOT'), "toolset", "setup", "docker")
  69. for dependency in deps:
  70. build_log_file = build_log_dir
  71. if build_log_dir is not os.devnull:
  72. build_log_file = os.path.join(
  73. build_log_dir, "%s.log" % dependency.lower())
  74. with open(build_log_file, 'w') as build_log:
  75. docker_file = os.path.join(test.directory,
  76. dependency + ".dockerfile")
  77. if not docker_file or not os.path.exists(docker_file):
  78. docker_file = find(docker_dir,
  79. dependency + ".dockerfile")
  80. if not docker_file:
  81. log("Docker build failed; %s could not be found; terminating"
  82. % (dependency + ".dockerfile"),
  83. prefix=log_prefix, file=build_log, color=Fore.RED)
  84. return 1
  85. # Build the dependency image
  86. try:
  87. for line in docker.APIClient(
  88. base_url='unix://var/run/docker.sock').build(
  89. path=os.path.dirname(docker_file),
  90. dockerfile="%s.dockerfile" % dependency,
  91. tag="tfb/%s" % dependency,
  92. buildargs=docker_buildargs,
  93. forcerm=True):
  94. if line.startswith('{"stream":'):
  95. line = json.loads(line)
  96. line = line[line.keys()[0]].encode('utf-8')
  97. log(line,
  98. prefix=log_prefix,
  99. file=build_log,
  100. color=Fore.WHITE + Style.BRIGHT \
  101. if re.match(r'^Step \d+\/\d+', line) else '')
  102. except Exception:
  103. tb = traceback.format_exc()
  104. log("Docker dependency build failed; terminating",
  105. prefix=log_prefix, file=build_log, color=Fore.RED)
  106. log(tb, prefix=log_prefix, file=build_log)
  107. return 1
  108. # Build the test images
  109. for test_docker_file in test_docker_files:
  110. build_log_file = build_log_dir
  111. if build_log_dir is not os.devnull:
  112. build_log_file = os.path.join(
  113. build_log_dir, "%s.log" % test_docker_file.replace(
  114. ".dockerfile", "").lower())
  115. with open(build_log_file, 'w') as build_log:
  116. try:
  117. for line in docker.APIClient(
  118. base_url='unix://var/run/docker.sock').build(
  119. path=test.directory,
  120. dockerfile=test_docker_file,
  121. tag="tfb/test/%s" % test_docker_file.replace(
  122. ".dockerfile", ""),
  123. buildargs=docker_buildargs,
  124. forcerm=True):
  125. if line.startswith('{"stream":'):
  126. line = json.loads(line)
  127. line = line[line.keys()[0]].encode('utf-8')
  128. log(line,
  129. prefix=log_prefix,
  130. file=build_log,
  131. color=Fore.WHITE + Style.BRIGHT \
  132. if re.match(r'^Step \d+\/\d+', line) else '')
  133. except Exception:
  134. tb = traceback.format_exc()
  135. log("Docker build failed; terminating",
  136. prefix=log_prefix, file=build_log, color=Fore.RED)
  137. log(tb, prefix=log_prefix, file=build_log)
  138. return 1
  139. return 0
  140. def run(benchmarker_config, docker_files, run_log_dir):
  141. '''
  142. Run the given Docker container(s)
  143. '''
  144. client = docker.from_env()
  145. for docker_file in docker_files:
  146. log_prefix = "%s: " % docker_file.replace(".dockerfile", "")
  147. try:
  148. def watch_container(container, docker_file):
  149. with open(
  150. os.path.join(
  151. run_log_dir, "%s.log" % docker_file.replace(
  152. ".dockerfile", "").lower()), 'w') as run_log:
  153. for line in container.logs(stream=True):
  154. log(line, prefix=log_prefix, file=run_log)
  155. extra_hosts = {
  156. socket.gethostname(): str(benchmarker_config.server_host),
  157. 'TFB-SERVER': str(benchmarker_config.server_host),
  158. 'TFB-DATABASE': str(benchmarker_config.database_host),
  159. 'TFB-CLIENT': str(benchmarker_config.client_host)
  160. }
  161. container = client.containers.run(
  162. "tfb/test/%s" % docker_file.replace(".dockerfile", ""),
  163. network_mode="host",
  164. privileged=True,
  165. stderr=True,
  166. detach=True,
  167. init=True,
  168. extra_hosts=extra_hosts)
  169. watch_thread = Thread(
  170. target=watch_container, args=(
  171. container,
  172. docker_file,
  173. ))
  174. watch_thread.daemon = True
  175. watch_thread.start()
  176. except Exception:
  177. with open(
  178. os.path.join(run_log_dir, "%s.log" % docker_file.replace(
  179. ".dockerfile", "").lower()), 'w') as run_log:
  180. tb = traceback.format_exc()
  181. log("Running docker cointainer: %s failed" % docker_file,
  182. prefix=log_prefix, file=run_log)
  183. log(tb, prefix=log_prefix, file=run_log)
  184. return 1
  185. return 0
  186. def successfully_running_containers(docker_files, out):
  187. '''
  188. Returns whether all the expected containers for the given docker_files are
  189. running.
  190. '''
  191. client = docker.from_env()
  192. expected_running_container_images = []
  193. for docker_file in docker_files:
  194. # 'gemini.dockerfile' -> 'gemini'
  195. image_tag = docker_file.split('.')[0]
  196. expected_running_container_images.append(image_tag)
  197. running_container_images = []
  198. for container in client.containers.list():
  199. # 'tfb/test/gemini:latest' -> 'gemini'
  200. image_tag = container.image.tags[0].split(':')[0][9:]
  201. running_container_images.append(image_tag)
  202. for image_name in expected_running_container_images:
  203. if image_name not in running_container_images:
  204. log_prefix = "%s: " % image_name
  205. log("ERROR: Expected tfb/test/%s to be running container" %
  206. image_name, prefix=log_prefix, file=out)
  207. return False
  208. return True
  209. def stop(config=None, database_container_id=None, test=None):
  210. '''
  211. Attempts to stop the running test container.
  212. '''
  213. client = docker.from_env()
  214. # Stop all the containers
  215. for container in client.containers.list():
  216. if container.status == "running" and container.id != database_container_id:
  217. container.stop()
  218. # Remove only the tfb/test image for this test
  219. try:
  220. client.images.remove("tfb/test/%s" % test.name, force=True)
  221. except:
  222. # This can be okay if the user hit ctrl+c before the image built/ran
  223. pass
  224. # Stop the database container
  225. if database_container_id:
  226. command = list(config.database_ssh_command)
  227. command.extend(['docker', 'stop', database_container_id])
  228. subprocess.check_call(command, stdout=FNULL, stderr=subprocess.STDOUT)
  229. client.images.prune()
  230. client.containers.prune()
  231. client.networks.prune()
  232. client.volumes.prune()
  233. def find(path, pattern):
  234. '''
  235. Finds and returns all the the files matching the given pattern recursively in
  236. the given path.
  237. '''
  238. for root, dirs, files in os.walk(path):
  239. for name in files:
  240. if fnmatch.fnmatch(name, pattern):
  241. return os.path.join(root, name)
  242. def start_database(config, database):
  243. '''
  244. Sets up a container for the given database and port, and starts said docker
  245. container.
  246. '''
  247. def __is_hex(s):
  248. try:
  249. int(s, 16)
  250. except ValueError:
  251. return False
  252. return len(s) % 2 == 0
  253. command = list(config.database_ssh_command)
  254. command.extend(['docker', 'images', '-q', database])
  255. out = subprocess.check_output(command)
  256. dbid = ''
  257. if len(out.splitlines()) > 0:
  258. dbid = out.splitlines()[len(out.splitlines()) - 1]
  259. # If the database image exists, then dbid will look like
  260. # fe12ca519b47, and we do not want to rebuild if it exists
  261. if len(dbid) != 12 and not __is_hex(dbid):
  262. def __scp_command(files):
  263. scpstr = ["scp", "-i", config.database_identity_file]
  264. for file in files:
  265. scpstr.append(file)
  266. scpstr.append("%s@%s:~/%s/" % (config.database_user,
  267. config.database_host, database))
  268. return scpstr
  269. command = list(config.database_ssh_command)
  270. command.extend(['mkdir', '-p', database])
  271. subprocess.check_call(command)
  272. dbpath = os.path.join(config.fwroot, "toolset", "setup", "docker",
  273. "databases", database)
  274. dbfiles = ""
  275. for dbfile in os.listdir(dbpath):
  276. dbfiles += "%s " % os.path.join(dbpath, dbfile)
  277. subprocess.check_call(__scp_command(dbfiles.split()))
  278. command = list(config.database_ssh_command)
  279. command.extend([
  280. 'docker', 'build', '-f',
  281. '~/%s/%s.dockerfile' % (database, database), '-t', database,
  282. '~/%s' % database
  283. ])
  284. subprocess.check_call(command)
  285. command = list(config.database_ssh_command)
  286. command.extend(
  287. ['docker', 'run', '-d', '--rm', '--init', '--network=host', database])
  288. docker_id = subprocess.check_output(command).strip()
  289. # Sleep until the database accepts connections
  290. slept = 0
  291. max_sleep = 60
  292. while not test_database(config, database) and slept < max_sleep:
  293. time.sleep(1)
  294. slept += 1
  295. return docker_id
  296. def __gather_dependencies(docker_file):
  297. '''
  298. Gathers all the known docker dependencies for the given docker image.
  299. '''
  300. deps = []
  301. docker_dir = os.path.join(
  302. os.getenv('FWROOT'), "toolset", "setup", "docker")
  303. if os.path.exists(docker_file):
  304. with open(docker_file) as fp:
  305. for line in fp.readlines():
  306. tokens = line.strip().split(' ')
  307. if tokens[0] == "FROM":
  308. # This is magic that our base image points to
  309. if tokens[1] != "ubuntu:16.04":
  310. dep_ref = tokens[1].strip().split(':')[0].strip()
  311. if '/' not in dep_ref:
  312. raise AttributeError(
  313. "Could not find docker FROM dependency: %s" %
  314. dep_ref)
  315. depToken = dep_ref.split('/')[1]
  316. deps.append(depToken)
  317. dep_docker_file = os.path.join(
  318. os.path.dirname(docker_file),
  319. depToken + ".dockerfile")
  320. if not os.path.exists(dep_docker_file):
  321. dep_docker_file = find(docker_dir,
  322. depToken + ".dockerfile")
  323. deps.extend(__gather_dependencies(dep_docker_file))
  324. return deps