Commit 0bfc1b25 authored by Michal Majewski's avatar Michal Majewski Committed by Commit Bot

[test] Do not pass unnecessary results

Bug: v8:6917
Change-Id: I1a355bdfe3f873091a7d7c32a937a533a7d8b3d4
Cq-Include-Trybots: luci.v8.try:v8_linux64_fyi_rel_ng
Reviewed-on: https://chromium-review.googlesource.com/867053
Commit-Queue: Michał Majewski <majeski@google.com>
Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50650}
parent 96d06a63
......@@ -57,17 +57,20 @@ class TestCase(object):
self.origin = None # Test that this test is subtest of.
self.processor = None # Processor that created this subtest.
self.procid = '%s/%s' % (self.suite.name, self.name) # unique id
self.keep_output = False # Can output of this test be dropped
self._statusfile_outcomes = None
self._expected_outcomes = None # optimization: None == [statusfile.PASS]
self._statusfile_flags = None
self._prepare_outcomes()
def create_subtest(self, processor, subtest_id, variant=None, flags=None):
def create_subtest(self, processor, subtest_id, variant=None, flags=None,
keep_output=False):
subtest = copy.copy(self)
subtest.origin = self
subtest.processor = processor
subtest.procid += '.%s' % subtest_id
subtest.keep_output = keep_output
if variant is not None:
assert self.variant is None
subtest.variant = variant
......
......@@ -613,6 +613,8 @@ class StandardTestRunner(base_runner.BaseTestRunner):
tests = [t for s in suites for t in s.tests]
tests.sort(key=lambda t: t.is_slow, reverse=True)
loader.setup()
loader.load_tests(tests)
print '>>> Running %d base tests' % tests_counter.total
......
......@@ -32,10 +32,44 @@ Each subtest has:
"""
DROP_RESULT = 0
DROP_OUTPUT = 1
DROP_PASS_OUTPUT = 2
DROP_PASS_STDOUT = 3
def get_reduce_result_function(requirement):
if requirement == DROP_RESULT:
return lambda _: None
if requirement == DROP_OUTPUT:
def f(result):
result.output = None
return result
return f
if requirement == DROP_PASS_OUTPUT:
def f(result):
if not result.has_unexpected_output:
result.output = None
return result
return f
if requirement == DROP_PASS_STDOUT:
def f(result):
if not result.has_unexpected_output:
result.output.stdout = None
result.output.stderr = None
return result
return f
class TestProc(object):
def __init__(self):
self._prev_proc = None
self._next_proc = None
self._requirement = DROP_RESULT
self._prev_requirement = None
self._reduce_result = lambda result: result
def connect_to(self, next_proc):
"""Puts `next_proc` after itself in the chain."""
......@@ -48,6 +82,17 @@ class TestProc(object):
if self._next_proc:
self._next_proc._prev_proc = self._prev_proc
def setup(self, requirement=DROP_RESULT):
"""
Method called by previous processor or processor pipeline creator to let
the processors know what part of the result can be ignored.
"""
self._prev_requirement = requirement
if self._next_proc:
self._next_proc.setup(max(requirement, self._requirement))
if self._prev_requirement < self._requirement:
self._reduce_result = get_reduce_result_function(self._prev_requirement)
def next_test(self, test):
"""
Method called by previous processor whenever it produces new test.
......@@ -74,12 +119,15 @@ class TestProc(object):
def _send_result(self, test, result):
"""Helper method for sending result to the previous processor."""
result = self._reduce_result(result)
self._prev_proc.result_for(test, result)
class TestProcObserver(TestProc):
"""Processor used for observing the data."""
def __init__(self):
super(TestProcObserver, self).__init__()
def next_test(self, test):
self._on_next_test(test)
......
......@@ -11,22 +11,31 @@ from ..local import pool
# Global function for multiprocessing, because pickling a static method doesn't
# work on Windows.
def run_job(job):
return job.run()
def run_job(job, process_context):
return job.run(process_context)
def create_process_context(requirement):
return ProcessContext(base.get_reduce_result_function(requirement))
JobResult = collections.namedtuple('JobResult', ['id', 'result'])
ProcessContext = collections.namedtuple('ProcessContext', ['reduce_result_f'])
class Job(object):
def __init__(self, test_id, cmd, outproc):
def __init__(self, test_id, cmd, outproc, keep_output):
self.test_id = test_id
self.cmd = cmd
self.outproc = outproc
self.keep_output = keep_output
def run(self):
def run(self, process_ctx):
output = self.cmd.execute()
return JobResult(self.test_id, self.outproc.process(output))
result = self.outproc.process(output)
if not self.keep_output:
result = process_ctx.reduce_result_f(result)
return JobResult(self.test_id, result)
class ExecutionProc(base.TestProc):
......@@ -49,8 +58,8 @@ class ExecutionProc(base.TestProc):
it = self._pool.imap_unordered(
fn=run_job,
gen=[],
process_context_fn=None,
process_context_args=None,
process_context_fn=create_process_context,
process_context_args=[self._prev_requirement],
)
for pool_result in it:
if pool_result.heartbeat:
......@@ -80,7 +89,7 @@ class ExecutionProc(base.TestProc):
# TODO(majeski): Needs factory for outproc as in local/execution.py
outproc = test.output_proc
self._pool.add([Job(test_id, test.cmd, outproc)])
self._pool.add([Job(test_id, test.cmd, outproc, test.keep_output)])
def result_for(self, test, result):
assert False, 'ExecutionProc cannot receive results'
......@@ -34,6 +34,8 @@ class TestsCounter(base.TestProcObserver):
class ResultsTracker(base.TestProcObserver):
def __init__(self):
super(ResultsTracker, self).__init__()
self._requirement = base.DROP_OUTPUT
self.failed = 0
self.remaining = 0
self.total = 0
......@@ -56,6 +58,7 @@ class ProgressIndicator(base.TestProcObserver):
class SimpleProgressIndicator(ProgressIndicator):
def __init__(self):
super(SimpleProgressIndicator, self).__init__()
self._requirement = base.DROP_PASS_OUTPUT
self._failed = []
self._total = 0
......@@ -146,6 +149,8 @@ class DotsProgressIndicator(SimpleProgressIndicator):
class CompactProgressIndicator(ProgressIndicator):
def __init__(self, templates):
super(CompactProgressIndicator, self).__init__()
self._requirement = base.DROP_PASS_OUTPUT
self._templates = templates
self._last_status_length = 0
self._start_time = time.time()
......@@ -250,6 +255,8 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
class JUnitTestProgressIndicator(ProgressIndicator):
def __init__(self, junitout, junittestsuite):
super(JUnitTestProgressIndicator, self).__init__()
self._requirement = base.DROP_PASS_STDOUT
self.outputter = junit_output.JUnitTestOutput(junittestsuite)
if junitout:
self.outfile = open(junitout, "w")
......@@ -287,6 +294,12 @@ class JUnitTestProgressIndicator(ProgressIndicator):
class JsonTestProgressIndicator(ProgressIndicator):
def __init__(self, json_test_results, arch, mode, random_seed):
super(JsonTestProgressIndicator, self).__init__()
# We want to drop stdout/err for all passed tests on the first try, but we
# need to get outputs for all runs after the first one. To accommodate that,
# reruns are set to keep the result no matter what requirement says, i.e.
# keep_output set to True in the RerunProc.
self._requirement = base.DROP_PASS_STDOUT
self.json_test_results = json_test_results
self.arch = arch
self.mode = mode
......
......@@ -11,6 +11,8 @@ from .result import RerunResult
class RerunProc(base.TestProcProducer):
def __init__(self, rerun_max, rerun_max_total=None):
super(RerunProc, self).__init__('Rerun')
self._requirement = base.DROP_OUTPUT
self._rerun = {}
self._results = collections.defaultdict(list)
self._rerun_max = rerun_max
......@@ -49,7 +51,7 @@ class RerunProc(base.TestProcProducer):
result.has_unexpected_output)
def _send_next_subtest(self, test, run=0):
subtest = self._create_subtest(test, str(run + 1))
subtest = self._create_subtest(test, str(run + 1), keep_output=(run != 0))
self._send_test(subtest)
def _finalize_test(self, test):
......
......@@ -25,31 +25,35 @@ class VariantProc(base.TestProcProducer):
def __init__(self, variants):
super(VariantProc, self).__init__('VariantProc')
self._test_data = {} # procid: (generator, results)
self._next_variant = {}
self._variant_gens = {}
self._variants = variants
def setup(self, requirement=base.DROP_RESULT):
super(VariantProc, self).setup(requirement)
# VariantProc is optimized for dropping the result and it should be placed
# in the chain where it's possible.
assert requirement == base.DROP_RESULT
def _next_test(self, test):
test_data = gen, results = self._variants_gen(test), []
self._test_data[test.procid] = test_data
self._try_send_new_subtest(test, gen, results)
gen = self._variants_gen(test)
self._next_variant[test.procid] = gen
self._try_send_new_subtest(test, gen)
def _result_for(self, test, subtest, result):
gen, results = self._test_data[test.procid]
results.append((subtest, result))
self._try_send_new_subtest(test, gen, results)
gen = self._next_variant[test.procid]
self._try_send_new_subtest(test, gen)
def _try_send_new_subtest(self, test, variants_gen, results):
def _try_send_new_subtest(self, test, variants_gen):
for variant, flags, suffix in variants_gen:
subtest = self._create_subtest(test, '%s-%s' % (variant, suffix),
variant=variant, flags=flags)
self._send_test(subtest)
return
del self._test_data[test.procid]
# TODO(majeski): Don't group tests if previous processors don't need them.
result = GroupedResult.create(results)
self._send_result(test, result)
del self._next_variant[test.procid]
self._send_result(test, None)
def _variants_gen(self, test):
"""Generator producing (variant, flags, procid suffix) tuples."""
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment