Browse Source

test: remove flaky test functionality

Reverts https://github.com/joyent/node/pull/8689

PR-URL: https://github.com/iojs/io.js/pull/812
Reviewed-By: Jeremiah Senkpiel <fishrock123@rocketmail.com>
Reviewed-By: Johan Bergström <bugs@bergstroem.nu>
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
v1.8.0-commit
Rod Vagg 10 years ago
parent
commit
20f8e7f17a
  1. 1
      test/internet/internet.status
  2. 4
      test/parallel/simple.status
  3. 1
      test/pummel/pummel.status
  4. 48
      tools/test.py

1
test/internet/internet.status

@ -1 +0,0 @@
prefix internet

4
test/parallel/simple.status

@ -1,4 +0,0 @@
prefix simple
[$system==linux]
test-net-GH-5504 : PASS,FLAKY

1
test/pummel/pummel.status

@ -1 +0,0 @@
prefix pummel

48
tools/test.py

@ -57,9 +57,8 @@ VERBOSE = False
class ProgressIndicator(object): class ProgressIndicator(object):
def __init__(self, cases, flaky_tests_mode): def __init__(self, cases):
self.cases = cases self.cases = cases
self.flaky_tests_mode = flaky_tests_mode
self.parallel_queue = Queue(len(cases)) self.parallel_queue = Queue(len(cases))
self.sequential_queue = Queue(len(cases)) self.sequential_queue = Queue(len(cases))
for case in cases: for case in cases:
@ -248,19 +247,13 @@ class TapProgressIndicator(SimpleProgressIndicator):
self._done += 1 self._done += 1
command = basename(output.command[-1]) command = basename(output.command[-1])
if output.UnexpectedOutput(): if output.UnexpectedOutput():
status_line = 'not ok %i - %s' % (self._done, command) print 'not ok %i - %s' % (self._done, command)
if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare":
status_line = status_line + " # TODO : Fix flaky test"
print status_line
for l in output.output.stderr.splitlines(): for l in output.output.stderr.splitlines():
print '#' + l print '#' + l
for l in output.output.stdout.splitlines(): for l in output.output.stdout.splitlines():
print '#' + l print '#' + l
else: else:
status_line = 'ok %i - %s' % (self._done, command) print 'ok %i - %s' % (self._done, command)
if FLAKY in output.test.outcomes:
status_line = status_line + " # TODO : Fix flaky test"
print status_line
duration = output.test.duration duration = output.test.duration
@ -278,8 +271,8 @@ class TapProgressIndicator(SimpleProgressIndicator):
class CompactProgressIndicator(ProgressIndicator): class CompactProgressIndicator(ProgressIndicator):
def __init__(self, cases, flaky_tests_mode, templates): def __init__(self, cases, templates):
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode) super(CompactProgressIndicator, self).__init__(cases)
self.templates = templates self.templates = templates
self.last_status_length = 0 self.last_status_length = 0
self.start_time = time.time() self.start_time = time.time()
@ -334,13 +327,13 @@ class CompactProgressIndicator(ProgressIndicator):
class ColorProgressIndicator(CompactProgressIndicator): class ColorProgressIndicator(CompactProgressIndicator):
def __init__(self, cases, flaky_tests_mode): def __init__(self, cases):
templates = { templates = {
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
'stdout': "\033[1m%s\033[0m", 'stdout': "\033[1m%s\033[0m",
'stderr': "\033[31m%s\033[0m", 'stderr': "\033[31m%s\033[0m",
} }
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) super(ColorProgressIndicator, self).__init__(cases, templates)
def ClearLine(self, last_line_length): def ClearLine(self, last_line_length):
print "\033[1K\r", print "\033[1K\r",
@ -348,7 +341,7 @@ class ColorProgressIndicator(CompactProgressIndicator):
class MonochromeProgressIndicator(CompactProgressIndicator): class MonochromeProgressIndicator(CompactProgressIndicator):
def __init__(self, cases, flaky_tests_mode): def __init__(self, cases):
templates = { templates = {
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
'stdout': '%s', 'stdout': '%s',
@ -356,7 +349,7 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
'max_length': 78 'max_length': 78
} }
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) super(MonochromeProgressIndicator, self).__init__(cases, templates)
def ClearLine(self, last_line_length): def ClearLine(self, last_line_length):
print ("\r" + (" " * last_line_length) + "\r"), print ("\r" + (" " * last_line_length) + "\r"),
@ -776,8 +769,8 @@ class Context(object):
def GetTimeout(self, mode): def GetTimeout(self, mode):
return self.timeout * TIMEOUT_SCALEFACTOR[mode] return self.timeout * TIMEOUT_SCALEFACTOR[mode]
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode): def RunTestCases(cases_to_run, progress, tasks):
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode) progress = PROGRESS_INDICATORS[progress](cases_to_run)
return progress.Run(tasks) return progress.Run(tasks)
@ -801,7 +794,6 @@ OKAY = 'okay'
TIMEOUT = 'timeout' TIMEOUT = 'timeout'
CRASH = 'crash' CRASH = 'crash'
SLOW = 'slow' SLOW = 'slow'
FLAKY = 'flaky'
class Expression(object): class Expression(object):
@ -1248,9 +1240,6 @@ def BuildOptions():
default=False, action="store_true") default=False, action="store_true")
result.add_option("--cat", help="Print the source of the tests", result.add_option("--cat", help="Print the source of the tests",
default=False, action="store_true") default=False, action="store_true")
result.add_option("--flaky-tests",
help="Regard tests marked as flaky (run|skip|dontcare)",
default="run")
result.add_option("--warn-unused", help="Report unused rules", result.add_option("--warn-unused", help="Report unused rules",
default=False, action="store_true") default=False, action="store_true")
result.add_option("-j", help="The number of parallel tasks to run", result.add_option("-j", help="The number of parallel tasks to run",
@ -1280,35 +1269,24 @@ def ProcessOptions(options):
options.mode = options.mode.split(',') options.mode = options.mode.split(',')
if options.J: if options.J:
options.j = multiprocessing.cpu_count() options.j = multiprocessing.cpu_count()
def CheckTestMode(name, option):
if not option in ["run", "skip", "dontcare"]:
print "Unknown %s mode %s" % (name, option)
return False
return True
if not CheckTestMode("--flaky-tests", options.flaky_tests):
return False
return True return True
REPORT_TEMPLATE = """\ REPORT_TEMPLATE = """\
Total: %(total)i tests Total: %(total)i tests
* %(skipped)4d tests will be skipped * %(skipped)4d tests will be skipped
* %(nocrash)4d tests are expected to be flaky but not crash
* %(pass)4d tests are expected to pass * %(pass)4d tests are expected to pass
* %(fail_ok)4d tests are expected to fail that we won't fix * %(fail_ok)4d tests are expected to fail that we won't fix
* %(fail)4d tests are expected to fail that we should fix\ * %(fail)4d tests are expected to fail that we should fix\
""" """
def PrintReport(cases): def PrintReport(cases):
def IsFlaky(o):
return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o)
def IsFailOk(o): def IsFailOk(o):
return (len(o) == 2) and (FAIL in o) and (OKAY in o) return (len(o) == 2) and (FAIL in o) and (OKAY in o)
unskipped = [c for c in cases if not SKIP in c.outcomes] unskipped = [c for c in cases if not SKIP in c.outcomes]
print REPORT_TEMPLATE % { print REPORT_TEMPLATE % {
'total': len(cases), 'total': len(cases),
'skipped': len(cases) - len(unskipped), 'skipped': len(cases) - len(unskipped),
'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]),
'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]),
'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]),
'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]])
@ -1486,7 +1464,7 @@ def Main():
result = None result = None
def DoSkip(case): def DoSkip(case):
return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip") return SKIP in case.outcomes or SLOW in case.outcomes
cases_to_run = [ c for c in all_cases if not DoSkip(c) ] cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
if len(cases_to_run) == 0: if len(cases_to_run) == 0:
print "No tests to run." print "No tests to run."
@ -1494,7 +1472,7 @@ def Main():
else: else:
try: try:
start = time.time() start = time.time()
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests): if RunTestCases(cases_to_run, options.progress, options.j):
result = 0 result = 0
else: else:
result = 1 result = 1

Loading…
Cancel
Save