utils.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178
  1. import subprocess
  2. import time
  3. import logging
  4. class ShellUtils():
  5. def __init__(self, directory, outfile, errfile, logger=None):
  6. '''
  7. outfile: A file-like object to write command output to.
  8. Must have write(string) method. Common choices are
  9. files, sys.stdout, or WrapLogger objects
  10. errfile: See outfile
  11. logger : If provided, used instead of outfile/errfile for
  12. finer-grained logging
  13. '''
  14. # Advanced notes: outfile and errfile do *not* have to be
  15. # thread-safe objects. They are only ever written to from one
  16. # thread at a time *unless* someone calls sh_async twice with
  17. # the same ShellUtils
  18. self.directory = directory
  19. self.outfile = outfile
  20. self.errfile = errfile
  21. self.logger = logger
  22. def __write_out(self, message, level=logging.INFO, stream=None):
  23. if self.logger:
  24. self.logger.log(level, message)
  25. elif stream == None:
  26. self.outfile.write(message)
  27. else:
  28. stream.write(message)
  29. def __write_err(self, message, level=logging.ERROR):
  30. self.__write_out(message, level, stream=self.errfile)
  31. def sh(self, command, **kwargs):
  32. '''Run a shell command, sending output to outfile and errfile.
  33. Blocks until command exits'''
  34. kwargs.setdefault('cwd', self.directory)
  35. kwargs.setdefault('executable', '/bin/bash')
  36. self.__write_out("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
  37. try:
  38. output = subprocess.check_output(command, shell=True, stderr=self.errfile, **kwargs)
  39. if output and output.strip():
  40. self.__write_out("Output:")
  41. self.__write_out(output.rstrip('\n'))
  42. else:
  43. self.__write_out("No Output")
  44. except subprocess.CalledProcessError:
  45. self.__write_err("Command returned non-zero exit code: %s" % command)
  46. # TODO modify this to start the subcommand as a new process group, so that
  47. # we can automatically kill the entire group!
  48. def sh_async(self, command, initial_logs=True, **kwargs):
  49. '''Run a shell command, sending output to outfile and errfile.
  50. If intial_logs, prints out logs for a few seconds before returning. '''
  51. # TODO add this - '''Continues to send output until command completes'''
  52. kwargs.setdefault('cwd', self.directory)
  53. # Open in line-buffered mode (bufsize=1) because NonBlockingStreamReader uses readline anyway
  54. self.__write_out("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
  55. process = subprocess.Popen(command, bufsize=1, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs)
  56. nbsr = NonBlockingStreamReader(process.stdout)
  57. nbsr_err = NonBlockingStreamReader(process.stderr)
  58. if initial_logs:
  59. time.sleep(8)
  60. # TODO put this read into a tight loop to prevent deadlock due to
  61. # filling up OS buffers
  62. out = nbsr.read()
  63. if len(out) == 0:
  64. self.outfile.write("No output")
  65. else:
  66. self.outfile.write("Initial Output:")
  67. for line in out:
  68. self.outfile.write(line.rstrip('\n'))
  69. err = nbsr_err.read()
  70. if len(err) != 0:
  71. self.errfile.write("Initial Error Logs:")
  72. for line in err:
  73. self.errfile.write(line.rstrip('\n'))
  74. from threading import Thread
  75. from Queue import Queue, Empty
  76. # TODO - no need to use a daemon, kill this off in stop!
  77. # NOTE - it is safe to use logging module in a multi-threaded
  78. # system, but not safe to log to the same file across multiple
  79. # processes. Our system has two main processes (main and __run_test),
  80. # and lots of minor ones from 'subprocess'. As long as we only use
  81. # one logger inside TestRunner and NonBlockingFoo, sd are good
  82. # Add credit for http://eyalarubas.com/python-subproc-nonblock.html
  83. class NonBlockingStreamReader:
  84. def __init__(self, stream):
  85. self._s = stream
  86. self._q = Queue()
  87. def _populateQueue(stream, queue):
  88. for line in iter(stream.readline, b''):
  89. queue.put(line)
  90. self._t = Thread(target = _populateQueue,
  91. args = (self._s, self._q))
  92. self._t.daemon = True
  93. self._t.start() #start collecting lines from the stream
  94. # TODO - This is only returning one line, if it is available.
  95. def readline(self, timeout = None):
  96. try:
  97. return self._q.get(block = timeout is not None,
  98. timeout = timeout)
  99. except Empty:
  100. return None
  101. def read(self):
  102. lines = []
  103. while True:
  104. line = self.readline(0.1)
  105. if not line:
  106. return lines
  107. lines.append(line)
  108. import tempfile
  109. class WrapLogger():
  110. """
  111. Used to convert a Logger into a file-like object. Adds easy integration
  112. of Logger into subprocess, which takes file parameters for stdout
  113. and stderr.
  114. Use:
  115. (out, err) = WrapLogger(logger, logging.INFO), WrapLogger(logger, logging.ERROR)
  116. subprocess.Popen(command, stdout=out, stderr=err)
  117. Note: When used with subprocess, this cannot guarantee that output will appear
  118. in real time. This is because subprocess tends to bypass the write() method and
  119. access the underlying file directly. This will eventually collect any output
  120. that was sent directly to the file, but it cannot do this in real time.
  121. Practically, this limitation means that WrapLogger is safe to use with
  122. all synchronous subprocess calls, but it will lag heavily with
  123. subprocess.Popen calls
  124. """
  125. # Note - Someone awesome with python could make this fully implement the file
  126. # interface, and remove the real-time limitation
  127. def __init__(self, logger, level):
  128. self.logger = logger
  129. self.level = level
  130. self.file = tempfile.TemporaryFile()
  131. def write(self, message):
  132. self.logger.log(self.level, message)
  133. def __getattr__(self, name):
  134. return getattr(self.file, name)
  135. def __del__(self):
  136. """Grabs any output that was written directly to the file (e.g. bypassing
  137. the write method). Subprocess.call, Popen, etc have a habit of accessing
  138. the file directly for faster writing. See http://bugs.python.org/issue1631
  139. """
  140. self.file.seek(0)
  141. for line in self.file.readlines():
  142. self.logger.log(self.level, line.rstrip('\n'))
  143. class Header():
  144. """
  145. """
  146. def __init__(self, message, top='-', bottom='-'):
  147. self.message = message
  148. self.top = top
  149. self.bottom = bottom
  150. def __str__(self):
  151. topheader = self.top * 80
  152. topheader = topheader[:80]
  153. bottomheader = self.bottom * 80
  154. bottomheader = bottomheader[:80]
  155. return "\n%s\n %s\n%s" % (topheader, self.message, bottomheader)