docker_helper.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. import os
  2. import socket
  3. import fnmatch
  4. import subprocess
  5. import multiprocessing
  6. import json
  7. import docker
  8. import time
  9. from threading import Thread
  10. from toolset.utils import setup_util
  11. from toolset.utils.output_helper import tee_output
  12. from toolset.utils.metadata_helper import gather_tests
  13. from toolset.utils.ordered_set import OrderedSet
  14. from toolset.utils.database_helper import test_database
  15. def clean(config):
  16. '''
  17. Cleans all the docker images from the system
  18. '''
  19. # Clean the app server images
  20. subprocess.check_call(["docker", "image", "prune", "-f"])
  21. docker_ids = subprocess.check_output(["docker", "images",
  22. "-q"]).splitlines()
  23. for docker_id in docker_ids:
  24. subprocess.check_call(["docker", "image", "rmi", "-f", docker_id])
  25. subprocess.check_call(["docker", "system", "prune", "-a", "-f"])
  26. # Clean the database server images
  27. command = list(config.database_ssh_command)
  28. command.extend(["docker", "image", "prune", "-f"])
  29. subprocess.check_call(command)
  30. command = list(config.database_ssh_command)
  31. command.extend(["docker", "images", "-q"])
  32. docker_ids = subprocess.check_output(command).splitlines()
  33. for docker_id in docker_ids:
  34. command = list(config.database_ssh_command)
  35. command.extend(["docker", "image", "rmi", "-f", docker_id])
  36. subprocess.check_call(command)
  37. command = list(config.database_ssh_command)
  38. command.extend(["docker", "system", "prune", "-a", "-f"])
  39. subprocess.check_call(command)
  40. def build(benchmarker_config, test_names, out):
  41. '''
  42. Builds the dependency chain as well as the test implementation docker images
  43. for the given tests.
  44. '''
  45. tests = gather_tests(test_names)
  46. for test in tests:
  47. docker_buildargs = {
  48. 'CPU_COUNT': str(multiprocessing.cpu_count()),
  49. 'MAX_CONCURRENCY': str(max(benchmarker_config.concurrency_levels)),
  50. 'TFB_DATABASE': str(benchmarker_config.database_host)
  51. }
  52. test_docker_files = ["%s.dockerfile" % test.name]
  53. if test.docker_files is not None:
  54. if type(test.docker_files) is list:
  55. test_docker_files.extend(test.docker_files)
  56. else:
  57. raise Exception(
  58. "docker_files in benchmark_config.json must be an array")
  59. for test_docker_file in test_docker_files:
  60. deps = OrderedSet(
  61. list(
  62. reversed(
  63. __gather_dependencies(
  64. os.path.join(test.directory, test_docker_file)))))
  65. docker_dir = os.path.join(setup_util.get_fwroot(), "toolset",
  66. "setup", "docker")
  67. for dependency in deps:
  68. docker_file = os.path.join(test.directory,
  69. dependency + ".dockerfile")
  70. if not docker_file or not os.path.exists(docker_file):
  71. docker_file = find(docker_dir, dependency + ".dockerfile")
  72. if not docker_file:
  73. tee_output(
  74. out,
  75. "Docker build failed; %s could not be found; terminating\n"
  76. % (dependency + ".dockerfile"))
  77. return 1
  78. # Build the dependency image
  79. try:
  80. for line in docker.APIClient(
  81. base_url='unix://var/run/docker.sock').build(
  82. path=os.path.dirname(docker_file),
  83. dockerfile="%s.dockerfile" % dependency,
  84. tag="tfb/%s" % dependency,
  85. buildargs=docker_buildargs,
  86. forcerm=True):
  87. prev_line = os.linesep
  88. if line.startswith('{"stream":'):
  89. line = json.loads(line)
  90. line = line[line.keys()[0]].encode('utf-8')
  91. if prev_line.endswith(os.linesep):
  92. tee_output(out, line)
  93. else:
  94. tee_output(out, line)
  95. prev_line = line
  96. except Exception as e:
  97. tee_output(out,
  98. "Docker dependency build failed; terminating\n")
  99. print(e)
  100. return 1
  101. # Build the test images
  102. for test_docker_file in test_docker_files:
  103. try:
  104. for line in docker.APIClient(
  105. base_url='unix://var/run/docker.sock').build(
  106. path=test.directory,
  107. dockerfile=test_docker_file,
  108. tag="tfb/test/%s" % test_docker_file.replace(
  109. ".dockerfile", ""),
  110. buildargs=docker_buildargs,
  111. forcerm=True):
  112. prev_line = os.linesep
  113. if line.startswith('{"stream":'):
  114. line = json.loads(line)
  115. line = line[line.keys()[0]].encode('utf-8')
  116. if prev_line.endswith(os.linesep):
  117. tee_output(out, line)
  118. else:
  119. tee_output(out, line)
  120. prev_line = line
  121. except Exception as e:
  122. tee_output(out, "Docker build failed; terminating\n")
  123. print(e)
  124. return 1
  125. return 0
  126. def run(benchmarker_config, docker_files, out):
  127. '''
  128. Run the given Docker container(s)
  129. '''
  130. client = docker.from_env()
  131. for docker_file in docker_files:
  132. try:
  133. def watch_container(container):
  134. for line in container.logs(stream=True):
  135. tee_output(out, line)
  136. extra_hosts = {
  137. socket.gethostname(): str(benchmarker_config.server_host),
  138. 'TFB-SERVER': str(benchmarker_config.server_host),
  139. 'TFB-DATABASE': str(benchmarker_config.database_host),
  140. 'TFB-CLIENT': str(benchmarker_config.client_host)
  141. }
  142. container = client.containers.run(
  143. "tfb/test/%s" % docker_file.replace(".dockerfile", ""),
  144. network_mode="host",
  145. privileged=True,
  146. stderr=True,
  147. detach=True,
  148. init=True,
  149. extra_hosts=extra_hosts)
  150. watch_thread = Thread(target=watch_container, args=(container, ))
  151. watch_thread.daemon = True
  152. watch_thread.start()
  153. except Exception as e:
  154. tee_output(out,
  155. "Running docker cointainer: %s failed" % docker_file)
  156. print(e)
  157. return 1
  158. return 0
  159. def successfully_running_containers(docker_files, out):
  160. '''
  161. Returns whether all the expected containers for the given docker_files are
  162. running.
  163. '''
  164. client = docker.from_env()
  165. expected_running_container_images = []
  166. for docker_file in docker_files:
  167. # 'gemini.dockerfile' -> 'gemini'
  168. image_tag = docker_file.split('.')[0]
  169. expected_running_container_images.append(image_tag)
  170. running_container_images = []
  171. for container in client.containers.list():
  172. # 'tfb/test/gemini:latest' -> 'gemini'
  173. image_tag = container.image.tags[0].split(':')[0][9:]
  174. running_container_images.append(image_tag)
  175. for image_name in expected_running_container_images:
  176. if image_name not in running_container_images:
  177. tee_output(out,
  178. "ERROR: Expected tfb/test/%s to be running container" %
  179. image_name)
  180. return False
  181. return True
  182. def stop(config=None, database_container_id=None, test=None):
  183. '''
  184. Attempts to stop the running test container.
  185. '''
  186. client = docker.from_env()
  187. # Stop all the containers
  188. for container in client.containers.list():
  189. if container.status == "running" and container.id != database_container_id:
  190. container.stop()
  191. # Remove only the tfb/test image for this test
  192. try:
  193. client.images.remove("tfb/test/%s" % test.name, force=True)
  194. except:
  195. # This can be okay if the user hit ctrl+c before the image built/ran
  196. pass
  197. # Stop the database container
  198. if database_container_id:
  199. command = list(config.database_ssh_command)
  200. command.extend(['docker', 'stop', database_container_id])
  201. subprocess.check_call(command)
  202. client.images.prune()
  203. client.containers.prune()
  204. client.networks.prune()
  205. client.volumes.prune()
  206. def find(path, pattern):
  207. '''
  208. Finds and returns all the the files matching the given pattern recursively in
  209. the given path.
  210. '''
  211. for root, dirs, files in os.walk(path):
  212. for name in files:
  213. if fnmatch.fnmatch(name, pattern):
  214. return os.path.join(root, name)
  215. def start_database(config, database):
  216. '''
  217. Sets up a container for the given database and port, and starts said docker
  218. container.
  219. '''
  220. def __is_hex(s):
  221. try:
  222. int(s, 16)
  223. except ValueError:
  224. return False
  225. return len(s) % 2 == 0
  226. command = list(config.database_ssh_command)
  227. command.extend(['docker', 'images', '-q', database])
  228. out = subprocess.check_output(command)
  229. dbid = ''
  230. if len(out.splitlines()) > 0:
  231. dbid = out.splitlines()[len(out.splitlines()) - 1]
  232. # If the database image exists, then dbid will look like
  233. # fe12ca519b47, and we do not want to rebuild if it exists
  234. if len(dbid) != 12 and not __is_hex(dbid):
  235. def __scp_command(files):
  236. scpstr = ["scp", "-i", config.database_identity_file]
  237. for file in files:
  238. scpstr.append(file)
  239. scpstr.append("%s@%s:~/%s/" % (config.database_user,
  240. config.database_host, database))
  241. return scpstr
  242. command = list(config.database_ssh_command)
  243. command.extend(['mkdir', '-p', database])
  244. subprocess.check_call(command)
  245. dbpath = os.path.join(config.fwroot, "toolset", "setup", "docker",
  246. "databases", database)
  247. dbfiles = ""
  248. for dbfile in os.listdir(dbpath):
  249. dbfiles += "%s " % os.path.join(dbpath, dbfile)
  250. subprocess.check_call(__scp_command(dbfiles.split()))
  251. command = list(config.database_ssh_command)
  252. command.extend([
  253. 'docker', 'build', '-f',
  254. '~/%s/%s.dockerfile' % (database, database), '-t', database,
  255. '~/%s' % database
  256. ])
  257. subprocess.check_call(command)
  258. command = list(config.database_ssh_command)
  259. command.extend(
  260. ['docker', 'run', '-d', '--rm', '--init', '--network=host', database])
  261. docker_id = subprocess.check_output(command).strip()
  262. # Sleep until the database accepts connections
  263. slept = 0
  264. max_sleep = 60
  265. while not test_database(config, database) and slept < max_sleep:
  266. time.sleep(1)
  267. slept += 1
  268. return docker_id
  269. def __gather_dependencies(docker_file):
  270. '''
  271. Gathers all the known docker dependencies for the given docker image.
  272. '''
  273. # Avoid setting up a circular import
  274. from toolset.utils import setup_util
  275. deps = []
  276. docker_dir = os.path.join(setup_util.get_fwroot(), "toolset", "setup",
  277. "docker")
  278. if os.path.exists(docker_file):
  279. with open(docker_file) as fp:
  280. for line in fp.readlines():
  281. tokens = line.strip().split(' ')
  282. if tokens[0] == "FROM":
  283. # This is magic that our base image points to
  284. if tokens[1] != "ubuntu:16.04":
  285. dep_ref = tokens[1].strip().split(':')[0].strip()
  286. if '/' not in dep_ref:
  287. raise AttributeError(
  288. "Could not find docker FROM dependency: %s" %
  289. dep_ref)
  290. depToken = dep_ref.split('/')[1]
  291. deps.append(depToken)
  292. dep_docker_file = os.path.join(
  293. os.path.dirname(docker_file),
  294. depToken + ".dockerfile")
  295. if not os.path.exists(dep_docker_file):
  296. dep_docker_file = find(docker_dir,
  297. depToken + ".dockerfile")
  298. deps.extend(__gather_dependencies(dep_docker_file))
  299. return deps