Procházet zdrojové kódy

Create ShellUtils class, use inside of TestRunner

The only reason to separate out ShellUtils is so setup.py
files can quickly use this new utility code without having to transition
over to being a subclass of TestRunner first. I don't think that transition
it hard at all, but I haven't looked at every setup.py, so I want to make
the process as easy as possible
Hamilton Turner před 11 roky
rodič
revize
59710637e4
2 změnil soubory, kde provedl 112 přidání a 70 odebrání
  1. 9 70
      toolset/benchmark/test_runner.py
  2. 103 0
      toolset/benchmark/utils.py

+ 9 - 70
toolset/benchmark/test_runner.py

@@ -1,102 +1,41 @@
 import time
 import logging
+from utils import WrapLogger
+from utils import ShellUtils
 
 class TestRunner:
   iAmTestRunnerClass = True
 
-  def sh(self, command, **kwargs):
-    kwargs.setdefault('cwd', self.dir)
-    self.stdout.write("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
-    try:
-      output = subprocess.check_output(command, shell=True, **kwargs)
-      if output and output.strip():
-        self.stdout.write("Output:")
-        self.stdout.write(output.rstrip('\n'))
-      else:
-        self.stdout.write("No Output")
-    except subprocess.CalledProcessError:
-      self.stdout.write("Process Returned non-zero exit code")
   def __init__(self, test, target, logger):
     self.test = test
     self.target = target
     self.logger = logger
 
-  def sh_async(self, command, initial_logs=True, **kwargs):
-    kwargs.setdefault('cwd', self.dir)
-    self.stdout.write("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
-    # Open in line-buffered mode, as NonBlockingStreamReader uses readline anyways
-    process = subprocess.Popen(command, bufsize=1, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs)
-    nbsr = NonBlockingStreamReader(process.stdout)
-    nbsr_err = NonBlockingStreamReader(process.stderr) 
-    if initial_logs:
-      time.sleep(8)
-      # TODO put this read into a tight loop to prevent deadlock due to 
-      # filling up OS buffers
-      out = nbsr.read()
-      err = nbsr_err.read()
-      if len(out) != 0:
-        self.stdout.write("Initial Output:")
-	for line in out:
-          self.stdout.write(line.rstrip('\n'))
-      else:
-        self.stdout.write("No output")
-      if len(err) != 0:
-        self.stdout.write("Initial Error Logs")
-	for line in err:
-          self.stdout.write(line.rstrip('\n'))
     # Create convenience variables to decouple the 
     # setup.py scripts from the internals of TFB
     self.benchmarker = test.benchmarker
     self.database_host = self.benchmarker.database_host
     self.dir = test.directory
+    
+    (out, err) = WrapLogger(logger, logging.INFO), WrapLogger(logger, logging.ERROR)
+    self.utils = ShellUtils(self.dir, out, err)
 
   def start(self):
     raise NotImplementedError()
 
-from threading import Thread
-from Queue import Queue, Empty
-
-# TODO - no need to use a daemon, kill this off in stop!
-# NOTE - it is safe to use logging module in a multi-threaded
-# system, but not safe to log to the same file across multiple 
-# processes. Our system has two main processes (main and __run_test), 
-# and lots of minor ones from 'subprocess'. As long as we only use
-# one logger inside TestRunner and NonBlockingFoo, sd are good
-# Add credit for http://eyalarubas.com/python-subproc-nonblock.html
-class NonBlockingStreamReader:
-  def __init__(self, stream):
-    self._s = stream
-    self._q = Queue()
   def stop(self):
     raise NotImplementedError()
 
-    def _populateQueue(stream, queue):
-      for line in iter(stream.readline, b''):
-        queue.put(line)
+  def sh(self, command, **kwargs):
+    self.utils.sh(command, **kwargs)
 
-    self._t = Thread(target = _populateQueue,
-                args = (self._s, self._q))
-    self._t.daemon = True
-    self._t.start() #start collecting lines from the stream
+  def sh_async(self, command, **kwargs):
+    self.utils.sh_async(command, **kwargs)
 
-  # TODO  - This is only returning one line, if it is available. 
-  def readline(self, timeout = None):
   @staticmethod
   def is_parent_of(target_class):
     ''' Checks if provided class object is a subclass of TestRunner '''
     try:
-      return self._q.get(block = timeout is not None,
-                 timeout = timeout)
-    except Empty:
-      return None
-  
-  def read(self):
-    lines = []
-    while True:
-      line = self.readline(0.1)
-      if not line:
-        return lines
-      lines.append(line)
       # issubclass will not work, as setup_module is loaded in different 
       # global context and therefore has a different copy of the module 
       # test_runner. A cheap trick is just to check for this attribute

+ 103 - 0
toolset/benchmark/utils.py

@@ -1,6 +1,109 @@
 import tempfile
 
+import subprocess
+import time
+class ShellUtils():
+  def __init__(self, directory, outfile, errfile):
+    '''
+    outfile: A file-like object to write command output to. 
+             Must have write(string) method. Common choices are 
+             files, sys.stdout, or WrapLogger objects
+    errfile: See outfile 
+    '''
+    # Advanced notes: outfile and errfile do *not* have to be 
+    # thread-safe objects. They are only ever written to from one 
+    # thread at a time *unless* someone calls sh_async twice with 
+    # the same ShellUtils
+    self.directory = directory
+    self.outfile = outfile
+    self.errfile = errfile
+  
+  def sh(self, command, **kwargs):
+    '''Run a shell command, sending output to outfile and errfile.
+    Blocks until command exits'''
+    kwargs.setdefault('cwd', self.directory)
+    kwargs.setdefault('executable', '/bin/bash')
+    self.outfile.write("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
+    try:
+      output = subprocess.check_output(command, shell=True, stderr=self.errfile, **kwargs)
+      if output and output.strip():
+        self.outfile.write("Output:")
+        self.outfile.write(output.rstrip('\n'))
+      else:
+        self.outfile.write("No Output")
+    except subprocess.CalledProcessError:
+      self.errfile.write("Command returned non-zero exit code: %s" % command)
 
+  # TODO modify this to start the subcommand as a new process group, so that 
+  # we can automatically kill the entire group!
+  def sh_async(self, command, initial_logs=True, **kwargs):
+    '''Run a shell command, sending output to outfile and errfile.
+    If intial_logs, prints out logs for a few seconds before returning. '''
+    # TODO add this - '''Continues to send output until command completes'''
+    kwargs.setdefault('cwd', self.directory)
+    self.outfile.write("Running %s (cwd=%s)" % (command, kwargs.get('cwd')))
+    
+    # Open in line-buffered mode (bufsize=1) because NonBlockingStreamReader uses readline anyway
+    process = subprocess.Popen(command, bufsize=1, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs)
+    nbsr = NonBlockingStreamReader(process.stdout)
+    nbsr_err = NonBlockingStreamReader(process.stderr) 
+    if initial_logs:
+      time.sleep(8)
+      # TODO put this read into a tight loop to prevent deadlock due to 
+      # filling up OS buffers
+      out = nbsr.read()
+      if len(out) == 0:
+        self.outfile.write("No output")
+      else: 
+        self.outfile.write("Initial Output:")
+        for line in out:
+            self.outfile.write(line.rstrip('\n'))
+        
+      err = nbsr_err.read()
+      if len(err) != 0:
+        self.errfile.write("Initial Error Logs:")
+        for line in err:
+          self.errfile.write(line.rstrip('\n'))
+
+from threading import Thread
+from Queue import Queue, Empty
+
+# TODO - no need to use a daemon, kill this off in stop!
+# NOTE - it is safe to use logging module in a multi-threaded
+# system, but not safe to log to the same file across multiple 
+# processes. Our system has two main processes (main and __run_test), 
+# and lots of minor ones from 'subprocess'. As long as we only use
+# one logger inside TestRunner and NonBlockingFoo, sd are good
+# Add credit for http://eyalarubas.com/python-subproc-nonblock.html
+class NonBlockingStreamReader:
+  def __init__(self, stream):
+    self._s = stream
+    self._q = Queue()
+
+    def _populateQueue(stream, queue):
+      for line in iter(stream.readline, b''):
+        queue.put(line)
+
+    self._t = Thread(target = _populateQueue,
+                args = (self._s, self._q))
+    self._t.daemon = True
+    self._t.start() #start collecting lines from the stream
+
+  # TODO  - This is only returning one line, if it is available. 
+  def readline(self, timeout = None):
+    try:
+      return self._q.get(block = timeout is not None,
+                 timeout = timeout)
+    except Empty:
+      return None
+  
+  def read(self):
+    lines = []
+    while True:
+      line = self.readline(0.1)
+      if not line:
+        return lines
+      lines.append(line)
 class WrapLogger():
   """
   Used to convert a Logger into file streams. Adds easy integration