docker_helper.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327
  1. import os
  2. import socket
  3. import fnmatch
  4. import subprocess
  5. import multiprocessing
  6. import json
  7. import docker
  8. import time
  9. from threading import Thread
  10. from toolset.utils import setup_util
  11. from toolset.utils.output_helper import tee_output
  12. from toolset.utils.metadata_helper import gather_tests
  13. from toolset.utils.ordered_set import OrderedSet
  14. from toolset.utils.database_helper import test_database
  15. def clean():
  16. '''
  17. Cleans all the docker images from the system
  18. '''
  19. subprocess.check_call(["docker", "image", "prune", "-f"])
  20. docker_ids = subprocess.check_output(["docker", "images",
  21. "-q"]).splitlines()
  22. for docker_id in docker_ids:
  23. subprocess.check_call(["docker", "image", "rmi", "-f", docker_id])
  24. subprocess.check_call(["docker", "system", "prune", "-a", "-f"])
  25. def build(benchmarker_config, test_names, out):
  26. '''
  27. Builds the dependency chain as well as the test implementation docker images
  28. for the given tests.
  29. '''
  30. tests = gather_tests(test_names)
  31. for test in tests:
  32. docker_buildargs = {
  33. 'CPU_COUNT': str(multiprocessing.cpu_count()),
  34. 'MAX_CONCURRENCY': str(max(benchmarker_config.concurrency_levels)),
  35. 'TFB_DATABASE': str(benchmarker_config.database_host)
  36. }
  37. test_docker_files = ["%s.dockerfile" % test.name]
  38. if test.docker_files is not None:
  39. if type(test.docker_files) is list:
  40. test_docker_files.extend(test.docker_files)
  41. else:
  42. raise Exception(
  43. "docker_files in benchmark_config.json must be an array")
  44. for test_docker_file in test_docker_files:
  45. deps = OrderedSet(
  46. list(
  47. reversed(
  48. __gather_dependencies(
  49. os.path.join(test.directory, test_docker_file)))))
  50. docker_dir = os.path.join(setup_util.get_fwroot(), "toolset",
  51. "setup", "docker")
  52. for dependency in deps:
  53. docker_file = os.path.join(test.directory,
  54. dependency + ".dockerfile")
  55. if not docker_file or not os.path.exists(docker_file):
  56. docker_file = find(docker_dir, dependency + ".dockerfile")
  57. if not docker_file:
  58. tee_output(
  59. out,
  60. "Docker build failed; %s could not be found; terminating\n"
  61. % (dependency + ".dockerfile"))
  62. return 1
  63. # Build the dependency image
  64. try:
  65. for line in docker.APIClient(
  66. base_url='unix://var/run/docker.sock').build(
  67. path=os.path.dirname(docker_file),
  68. dockerfile="%s.dockerfile" % dependency,
  69. tag="tfb/%s" % dependency,
  70. buildargs=docker_buildargs,
  71. forcerm=True):
  72. prev_line = os.linesep
  73. if line.startswith('{"stream":'):
  74. line = json.loads(line)
  75. line = line[line.keys()[0]].encode('utf-8')
  76. if prev_line.endswith(os.linesep):
  77. tee_output(out, line)
  78. else:
  79. tee_output(out, line)
  80. prev_line = line
  81. except Exception as e:
  82. tee_output(out,
  83. "Docker dependency build failed; terminating\n")
  84. print(e)
  85. return 1
  86. # Build the test images
  87. for test_docker_file in test_docker_files:
  88. try:
  89. for line in docker.APIClient(
  90. base_url='unix://var/run/docker.sock').build(
  91. path=test.directory,
  92. dockerfile=test_docker_file,
  93. tag="tfb/test/%s" % test_docker_file.replace(
  94. ".dockerfile", ""),
  95. buildargs=docker_buildargs,
  96. forcerm=True):
  97. prev_line = os.linesep
  98. if line.startswith('{"stream":'):
  99. line = json.loads(line)
  100. line = line[line.keys()[0]].encode('utf-8')
  101. if prev_line.endswith(os.linesep):
  102. tee_output(out, line)
  103. else:
  104. tee_output(out, line)
  105. prev_line = line
  106. except Exception as e:
  107. tee_output(out, "Docker build failed; terminating\n")
  108. print(e)
  109. return 1
  110. return 0
  111. def run(benchmarker_config, docker_files, out):
  112. '''
  113. Run the given Docker container(s)
  114. '''
  115. client = docker.from_env()
  116. for docker_file in docker_files:
  117. try:
  118. def watch_container(container):
  119. for line in container.logs(stream=True):
  120. tee_output(out, line)
  121. extra_hosts = {
  122. socket.gethostname(): str(benchmarker_config.server_host),
  123. 'TFB-SERVER': str(benchmarker_config.server_host),
  124. 'TFB-DATABASE': str(benchmarker_config.database_host),
  125. 'TFB-CLIENT': str(benchmarker_config.client_host)
  126. }
  127. container = client.containers.run(
  128. "tfb/test/%s" % docker_file.replace(".dockerfile", ""),
  129. network_mode="host",
  130. privileged=True,
  131. stderr=True,
  132. detach=True,
  133. init=True,
  134. extra_hosts=extra_hosts)
  135. watch_thread = Thread(target=watch_container, args=(container, ))
  136. watch_thread.daemon = True
  137. watch_thread.start()
  138. except Exception as e:
  139. tee_output(out,
  140. "Running docker cointainer: %s failed" % docker_file)
  141. print(e)
  142. return 1
  143. return 0
  144. def successfully_running_containers(docker_files, database_container_id, out):
  145. '''
  146. Returns whether all the expected containers for the given docker_files are
  147. running.
  148. '''
  149. client = docker.from_env()
  150. running_container_length = len(
  151. client.containers.list(filters={'status': 'running'}))
  152. expected_length = len(docker_files)
  153. if database_container_id is not None:
  154. expected_length = expected_length + 1
  155. if (running_container_length != expected_length):
  156. tee_output(out, "Running Containers (id, name):" + os.linesep)
  157. for running_container in client.containers.list():
  158. tee_output(out, "%s, %s%s" % (running_container.short_id,
  159. running_container.image, os.linesep))
  160. tee_output(out, "Excepted %s running containers; saw %s%s" %
  161. (running_container_length, expected_length, os.linesep))
  162. return False
  163. return True
  164. def stop(config, database_container_id, test, out):
  165. '''
  166. Attempts to stop the running test container.
  167. '''
  168. client = docker.from_env()
  169. # Stop all the containers
  170. for container in client.containers.list():
  171. if container.status == "running" and container.id != database_container_id:
  172. container.stop()
  173. # Remove only the tfb/test image for this test
  174. try:
  175. client.images.remove("tfb/test/%s" % test.name, force=True)
  176. except:
  177. # This can be okay if the user hit ctrl+c before the image built/ran
  178. pass
  179. # Stop the database container
  180. if database_container_id:
  181. command = list(config.database_ssh_command)
  182. command.extend(['docker', 'stop', database_container_id])
  183. subprocess.check_call(command)
  184. client.images.prune()
  185. client.containers.prune()
  186. client.networks.prune()
  187. client.volumes.prune()
  188. def find(path, pattern):
  189. '''
  190. Finds and returns all the the files matching the given pattern recursively in
  191. the given path.
  192. '''
  193. for root, dirs, files in os.walk(path):
  194. for name in files:
  195. if fnmatch.fnmatch(name, pattern):
  196. return os.path.join(root, name)
  197. def start_database(config, database):
  198. '''
  199. Sets up a container for the given database and port, and starts said docker
  200. container.
  201. '''
  202. def __is_hex(s):
  203. try:
  204. int(s, 16)
  205. except ValueError:
  206. return False
  207. return len(s) % 2 == 0
  208. command = list(config.database_ssh_command)
  209. command.extend(['docker', 'images', '-q', database])
  210. out = subprocess.check_output(command)
  211. dbid = ''
  212. if len(out.splitlines()) > 0:
  213. dbid = out.splitlines()[len(out.splitlines()) - 1]
  214. # If the database image exists, then dbid will look like
  215. # fe12ca519b47, and we do not want to rebuild if it exists
  216. if len(dbid) != 12 and not __is_hex(dbid):
  217. def __scp_command(files):
  218. scpstr = ["scp", "-i", config.database_identity_file]
  219. for file in files:
  220. scpstr.append(file)
  221. scpstr.append("%s@%s:~/%s/" % (config.database_user,
  222. config.database_host, database))
  223. return scpstr
  224. command = list(config.database_ssh_command)
  225. command.extend(['mkdir', '-p', database])
  226. subprocess.check_call(command)
  227. dbpath = os.path.join(config.fwroot, "toolset", "setup", "docker",
  228. "databases", database)
  229. dbfiles = ""
  230. for dbfile in os.listdir(dbpath):
  231. dbfiles += "%s " % os.path.join(dbpath, dbfile)
  232. subprocess.check_call(__scp_command(dbfiles.split()))
  233. command = list(config.database_ssh_command)
  234. command.extend([
  235. 'docker', 'build', '-f',
  236. '~/%s/%s.dockerfile' % (database, database), '-t', database,
  237. '~/%s' % database
  238. ])
  239. subprocess.check_call(command)
  240. command = list(config.database_ssh_command)
  241. command.extend(
  242. ['docker', 'run', '-d', '--rm', '--init', '--network=host', database])
  243. docker_id = subprocess.check_output(command).strip()
  244. # Sleep until the database accepts connections
  245. slept = 0
  246. max_sleep = 60
  247. while not test_database(config, database) and slept < max_sleep:
  248. time.sleep(1)
  249. slept += 1
  250. return docker_id
  251. def __gather_dependencies(docker_file):
  252. '''
  253. Gathers all the known docker dependencies for the given docker image.
  254. '''
  255. # Avoid setting up a circular import
  256. from toolset.utils import setup_util
  257. deps = []
  258. docker_dir = os.path.join(setup_util.get_fwroot(), "toolset", "setup",
  259. "docker")
  260. if os.path.exists(docker_file):
  261. with open(docker_file) as fp:
  262. for line in fp.readlines():
  263. tokens = line.strip().split(' ')
  264. if tokens[0] == "FROM":
  265. # This is magic that our base image points to
  266. if tokens[1] != "ubuntu:16.04":
  267. dep_ref = tokens[1].strip().split(':')[0].strip()
  268. if '/' not in dep_ref:
  269. raise AttributeError(
  270. "Could not find docker FROM dependency: %s" %
  271. dep_ref)
  272. depToken = dep_ref.split('/')[1]
  273. deps.append(depToken)
  274. dep_docker_file = os.path.join(
  275. os.path.dirname(docker_file),
  276. depToken + ".dockerfile")
  277. if not os.path.exists(dep_docker_file):
  278. dep_docker_file = find(docker_dir,
  279. depToken + ".dockerfile")
  280. deps.extend(__gather_dependencies(dep_docker_file))
  281. return deps