Commit 4ed34189 authored by maruel@chromium.org's avatar maruel@chromium.org

Directly hook sys.stdout for thread annotated output.

In the next change, we can now remove all the options.stdout bookeeping since
it's not unnecessary.

TEST=none
BUG=none

Review URL: http://codereview.chromium.org/3398008

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@59795 0039d316-1c4b-4281-b951-d872f2087c98
parent e0de9cbe
...@@ -1173,7 +1173,8 @@ def Main(argv): ...@@ -1173,7 +1173,8 @@ def Main(argv):
# Make stdout auto-flush so buildbot doesn't kill us during lengthy # Make stdout auto-flush so buildbot doesn't kill us during lengthy
# operations. Python as a strong tendency to buffer sys.stdout. # operations. Python as a strong tendency to buffer sys.stdout.
sys.stdout = gclient_utils.MakeFileAutoFlush(sys.stdout) sys.stdout = gclient_utils.MakeFileAutoFlush(sys.stdout)
# Make stdout annotated with the thread ids.
sys.stdout = gclient_utils.MakeFileAnnotated(sys.stdout)
# Do it late so all commands are listed. # Do it late so all commands are listed.
CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([ CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([
' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip()) ' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip())
......
...@@ -297,8 +297,10 @@ def CheckCallAndFilterAndHeader(args, always=False, **kwargs): ...@@ -297,8 +297,10 @@ def CheckCallAndFilterAndHeader(args, always=False, **kwargs):
def SoftClone(obj): def SoftClone(obj):
"""Clones an object. copy.copy() doesn't work on 'file' objects.""" """Clones an object. copy.copy() doesn't work on 'file' objects."""
class NewObject(object): pass if obj.__class__.__name__ == 'SoftCloned':
new_obj = NewObject() return obj
class SoftCloned(object): pass
new_obj = SoftCloned()
for member in dir(obj): for member in dir(obj):
if member.startswith('_'): if member.startswith('_'):
continue continue
...@@ -314,10 +316,11 @@ def MakeFileAutoFlush(fileobj, delay=10): ...@@ -314,10 +316,11 @@ def MakeFileAutoFlush(fileobj, delay=10):
return fileobj return fileobj
new_fileobj = SoftClone(fileobj) new_fileobj = SoftClone(fileobj)
new_fileobj.lock = threading.Lock() if not hasattr(new_fileobj, 'lock'):
new_fileobj.lock = threading.Lock()
new_fileobj.last_flushed_at = time.time() new_fileobj.last_flushed_at = time.time()
new_fileobj.delay = delay new_fileobj.delay = delay
new_fileobj.old_auto_flush_write = fileobj.write new_fileobj.old_auto_flush_write = new_fileobj.write
# Silence pylint. # Silence pylint.
new_fileobj.flush = fileobj.flush new_fileobj.flush = fileobj.flush
...@@ -339,27 +342,68 @@ def MakeFileAutoFlush(fileobj, delay=10): ...@@ -339,27 +342,68 @@ def MakeFileAutoFlush(fileobj, delay=10):
return new_fileobj return new_fileobj
class StdoutAnnotated(object): def MakeFileAnnotated(fileobj):
"""Prepends every line with a string.""" """Creates a file object clone to automatically prepends every line in worker
def __init__(self, prepend, stdout): threads with a NN> prefix."""
self.prepend = prepend if hasattr(fileobj, 'output_buffers'):
self.buf = '' # Already patched.
self.stdout = stdout return fileobj
def write(self, out): new_fileobj = SoftClone(fileobj)
self.buf += out if not hasattr(new_fileobj, 'lock'):
while '\n' in self.buf: new_fileobj.lock = threading.Lock()
line, self.buf = self.buf.split('\n', 1) new_fileobj.output_buffers = {}
self.stdout.write(self.prepend + line + '\n') new_fileobj.old_annotated_write = new_fileobj.write
def annotated_write(out):
index = getattr(threading.currentThread(), 'index', None)
if index is None:
# Undexed threads aren't buffered.
new_fileobj.old_annotated_write(out)
return
def flush(self): new_fileobj.lock.acquire()
pass try:
# Use a dummy array to hold the string so the code can be lockless.
# Strings are immutable, requiring to keep a lock for the whole dictionary
# otherwise. Using an array is faster than using a dummy object.
if not index in new_fileobj.output_buffers:
obj = new_fileobj.output_buffers[index] = ['']
else:
obj = new_fileobj.output_buffers[index]
finally:
new_fileobj.lock.release()
def full_flush(self): # Continue lockless.
if self.buf: obj[0] += out
self.stdout.write(self.prepend + self.buf) while '\n' in obj[0]:
self.stdout.flush() line, remaining = obj[0].split('\n', 1)
self.buf = '' new_fileobj.old_annotated_write('%d>%s\n' % (index, line))
obj[0] = remaining
def full_flush():
"""Flush buffered output."""
orphans = []
new_fileobj.lock.acquire()
try:
# Detect threads no longer existing.
indexes = (getattr(t, 'index', None) for t in threading.enumerate())
indexed = filter(None, indexes)
for index in new_fileobj.output_buffers:
if not index in indexes:
orphans.append((index, new_fileobj.output_buffers[index][0]))
for orphan in orphans:
del new_fileobj.output_buffers[orphan[0]]
finally:
new_fileobj.lock.release()
# Don't keep the lock while writting. Will append \n when it shouldn't.
for orphan in orphans:
new_fileobj.old_annotated_write('%d>%s\n' % (orphan[0], orphan[1]))
new_fileobj.write = annotated_write
new_fileobj.full_flush = full_flush
return new_fileobj
def CheckCallAndFilter(args, stdout=None, filter_fn=None, def CheckCallAndFilter(args, stdout=None, filter_fn=None,
...@@ -628,12 +672,10 @@ class ExecutionQueue(object): ...@@ -628,12 +672,10 @@ class ExecutionQueue(object):
if self.jobs > 1: if self.jobs > 1:
# Start the thread. # Start the thread.
index = len(self.ran) + len(self.running) + 1 index = len(self.ran) + len(self.running) + 1
# Copy 'options' and add annotated stdout. # Copy 'options'.
task_kwargs = kwargs.copy() task_kwargs = kwargs.copy()
task_kwargs['options'] = copy.copy(task_kwargs['options']) task_kwargs['options'] = copy.copy(task_kwargs['options'])
task_kwargs['options'].stdout = StdoutAnnotated( new_thread = self._Worker(task_item, index, args, task_kwargs)
'%d>' % index, task_kwargs['options'].stdout)
new_thread = self._Worker(task_item, args, task_kwargs)
self.running.append(new_thread) self.running.append(new_thread)
new_thread.start() new_thread.start()
else: else:
...@@ -646,10 +688,11 @@ class ExecutionQueue(object): ...@@ -646,10 +688,11 @@ class ExecutionQueue(object):
class _Worker(threading.Thread): class _Worker(threading.Thread):
"""One thread to execute one WorkItem.""" """One thread to execute one WorkItem."""
def __init__(self, item, args, kwargs): def __init__(self, item, index, args, kwargs):
threading.Thread.__init__(self, name=item.name or 'Worker') threading.Thread.__init__(self, name=item.name or 'Worker')
logging.info(item.name) logging.info(item.name)
self.item = item self.item = item
self.index = index
self.args = args self.args = args
self.kwargs = kwargs self.kwargs = kwargs
......
...@@ -26,9 +26,10 @@ class GclientUtilsUnittest(GclientUtilBase): ...@@ -26,9 +26,10 @@ class GclientUtilsUnittest(GclientUtilBase):
'CheckCallAndFilterAndHeader', 'Error', 'ExecutionQueue', 'FileRead', 'CheckCallAndFilterAndHeader', 'Error', 'ExecutionQueue', 'FileRead',
'FileWrite', 'FindFileUpwards', 'FindGclientRoot', 'FileWrite', 'FindFileUpwards', 'FindGclientRoot',
'GetGClientRootAndEntries', 'GetNamedNodeText', 'MakeFileAutoFlush', 'GetGClientRootAndEntries', 'GetNamedNodeText', 'MakeFileAutoFlush',
'GetNodeNamedAttributeText', 'PathDifference', 'ParseXML', 'Popen', 'GetNodeNamedAttributeText', 'MakeFileAnnotated', 'PathDifference',
'ParseXML', 'Popen',
'PrintableObject', 'RemoveDirectory', 'SoftClone', 'SplitUrlRevision', 'PrintableObject', 'RemoveDirectory', 'SoftClone', 'SplitUrlRevision',
'StdoutAnnotated', 'SyntaxErrorToError', 'WorkItem', 'SyntaxErrorToError', 'WorkItem',
'copy', 'errno', 'logging', 'os', 'Queue', 're', 'stat', 'subprocess', 'copy', 'errno', 'logging', 'os', 'Queue', 're', 'stat', 'subprocess',
'sys','threading', 'time', 'xml', 'sys','threading', 'time', 'xml',
] ]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment