[Piglit] [PATCH v3] Filter expected failures from JUnit output

2014-09-23 Thread Mark Janes
Piglit test results are difficult to analyze with simpler JUnit
visualizers like those provided by Jenkins.  There are hundreds of
failures, but the engineer is typically interested only in NEW
failures.  Jenkins JUnit rules typically expect 100% pass rate, or
some static threshold of failures.

Specifying test names in the [expected-failures] or [expected-crashes]
sections of the config file enables JUnit reporting of just the test
failures which represent new regressions.

Test names are expected in the dotted JUnit format (eg:
piglit.spec.ARB_fragment_program.fp-formats) and are case
insensitive.
---

Thanks to Dylan Baker for review and feedback.  This patch
incorporates his suggestions.

 framework/results.py | 46 --
 piglit.conf.example  | 12 
 2 files changed, 56 insertions(+), 2 deletions(-)

diff --git a/framework/results.py b/framework/results.py
index b567553..fa2a9e5 100644
--- a/framework/results.py
+++ b/framework/results.py
@@ -29,6 +29,7 @@ import abc
 import threading
 import posixpath
 from cStringIO import StringIO
+
 try:
 import simplejson as json
 except ImportError:
@@ -38,6 +39,7 @@ try:
 except ImportError:
 import xml.etree.cElementTree as etree
 
+from framework.core import PIGLIT_CONFIG
 import framework.status as status
 
 __all__ = [
@@ -385,6 +387,18 @@ class JUnitBackend(FSyncMixin, Backend):
 self._file = open(os.path.join(dest, 'results.xml'), 'w')
 FSyncMixin.__init__(self, **options)
 
+# make dictionaries of all test names expected to crash/fail
+# for quick lookup when writing results.  Use lower-case to
+# provide case insensitive matches.
+self._expected_failures = {}
+if PIGLIT_CONFIG.has_section(expected-failures):
+for (fail, _) in PIGLIT_CONFIG.items(expected-failures):
+self._expected_failures[fail.lower()] = True
+self._expected_crashes = {}
+if PIGLIT_CONFIG.has_section(expected-crashes):
+for (fail, _) in PIGLIT_CONFIG.items(expected-crashes):
+self._expected_crashes[fail.lower()] = True
+
 # Write initial headers and other data that etree cannot write for us
 self._file.write('?xml version=1.0 encoding=UTF-8 ?\n')
 self._file.write('testsuites\n')
@@ -414,6 +428,21 @@ class JUnitBackend(FSyncMixin, Backend):
 # set different root names.
 classname = 'piglit.' + classname
 
+expected_result = pass
+
+# replace special characters and make case insensitive
+lname = (classname + . + testname).lower()
+lname = lname.replace(=, .)
+lname = lname.replace(:, .)
+
+if lname in self._expected_failures:
+expected_result = failure
+# a test can either fail or crash, but not both
+assert( lname not in self._expected_crashes )
+
+if lname in self._expected_crashes:
+expected_result = error
+
 # Create the root element
 element = etree.Element('testcase', name=testname + self._test_suffix,
 classname=classname,
@@ -432,10 +461,23 @@ class JUnitBackend(FSyncMixin, Backend):
 # one of these statuses
 if data['result'] == 'skip':
 etree.SubElement(element, 'skipped')
+
 elif data['result'] in ['warn', 'fail', 'dmesg-warn', 'dmesg-fail']:
-etree.SubElement(element, 'failure')
+if expected_result == failure:
+err.text += \n\nWARN: passing test as an expected failure
+else:
+etree.SubElement(element, 'failure')
+
 elif data['result'] == 'crash':
-etree.SubElement(element, 'error')
+if expected_result == error:
+err.text += \n\nWARN: passing test as an expected crash
+else:
+etree.SubElement(element, 'error')
+
+elif expected_result != pass:
+err.text += \n\nERROR: This test passed when it \
+expected {0}.format(expected_result)
+etree.SubElement(element, 'failure')
 
 self._file.write(etree.tostring(element))
 self._file.write('\n')
diff --git a/piglit.conf.example b/piglit.conf.example
index 9cb7dd1..a864c01 100644
--- a/piglit.conf.example
+++ b/piglit.conf.example
@@ -71,3 +71,15 @@ run_test=./%(test_name)s
 ; Options can be found running piglit run -h and reading the section for
 ; -b/--backend
 ;backend=json
+
+[expected-failures]
+; Provide a list of test names that are expected to fail.  These tests
+; will be listed as passing in JUnit output when they fail.  Any
+; unexpected result (pass/crash) will be listed as a failure.  The
+; test name must be written in the JUnit format ('.' instead of '/').
+; Special characters for config file format ('=' and ':') must be
+; replaced with '.'
+
+[expected-crashes]
+; Like expected-failures, but 

Re: [Piglit] [PATCH v3] Filter expected failures from JUnit output

2014-09-23 Thread Dylan Baker
Looks good to me. Thanks for making these changes.

Reviewed-by: Dylan Baker baker.dyla...@gmail.com

On Tuesday, September 23, 2014 03:05:19 PM Mark Janes wrote:
 Piglit test results are difficult to analyze with simpler JUnit
 visualizers like those provided by Jenkins.  There are hundreds of
 failures, but the engineer is typically interested only in NEW
 failures.  Jenkins JUnit rules typically expect 100% pass rate, or
 some static threshold of failures.
 
 Specifying test names in the [expected-failures] or [expected-crashes]
 sections of the config file enables JUnit reporting of just the test
 failures which represent new regressions.
 
 Test names are expected in the dotted JUnit format (eg:
 piglit.spec.ARB_fragment_program.fp-formats) and are case
 insensitive.
 ---
 
 Thanks to Dylan Baker for review and feedback.  This patch
 incorporates his suggestions.
 
  framework/results.py | 46 --
  piglit.conf.example  | 12 
  2 files changed, 56 insertions(+), 2 deletions(-)
 
 diff --git a/framework/results.py b/framework/results.py
 index b567553..fa2a9e5 100644
 --- a/framework/results.py
 +++ b/framework/results.py
 @@ -29,6 +29,7 @@ import abc
  import threading
  import posixpath
  from cStringIO import StringIO
 +
  try:
  import simplejson as json
  except ImportError:
 @@ -38,6 +39,7 @@ try:
  except ImportError:
  import xml.etree.cElementTree as etree
  
 +from framework.core import PIGLIT_CONFIG
  import framework.status as status
  
  __all__ = [
 @@ -385,6 +387,18 @@ class JUnitBackend(FSyncMixin, Backend):
  self._file = open(os.path.join(dest, 'results.xml'), 'w')
  FSyncMixin.__init__(self, **options)
  
 +# make dictionaries of all test names expected to crash/fail
 +# for quick lookup when writing results.  Use lower-case to
 +# provide case insensitive matches.
 +self._expected_failures = {}
 +if PIGLIT_CONFIG.has_section(expected-failures):
 +for (fail, _) in PIGLIT_CONFIG.items(expected-failures):
 +self._expected_failures[fail.lower()] = True
 +self._expected_crashes = {}
 +if PIGLIT_CONFIG.has_section(expected-crashes):
 +for (fail, _) in PIGLIT_CONFIG.items(expected-crashes):
 +self._expected_crashes[fail.lower()] = True
 +
  # Write initial headers and other data that etree cannot write for us
  self._file.write('?xml version=1.0 encoding=UTF-8 ?\n')
  self._file.write('testsuites\n')
 @@ -414,6 +428,21 @@ class JUnitBackend(FSyncMixin, Backend):
  # set different root names.
  classname = 'piglit.' + classname
  
 +expected_result = pass
 +
 +# replace special characters and make case insensitive
 +lname = (classname + . + testname).lower()
 +lname = lname.replace(=, .)
 +lname = lname.replace(:, .)
 +
 +if lname in self._expected_failures:
 +expected_result = failure
 +# a test can either fail or crash, but not both
 +assert( lname not in self._expected_crashes )
 +
 +if lname in self._expected_crashes:
 +expected_result = error
 +
  # Create the root element
  element = etree.Element('testcase', name=testname + 
 self._test_suffix,
  classname=classname,
 @@ -432,10 +461,23 @@ class JUnitBackend(FSyncMixin, Backend):
  # one of these statuses
  if data['result'] == 'skip':
  etree.SubElement(element, 'skipped')
 +
  elif data['result'] in ['warn', 'fail', 'dmesg-warn', 'dmesg-fail']:
 -etree.SubElement(element, 'failure')
 +if expected_result == failure:
 +err.text += \n\nWARN: passing test as an expected failure
 +else:
 +etree.SubElement(element, 'failure')
 +
  elif data['result'] == 'crash':
 -etree.SubElement(element, 'error')
 +if expected_result == error:
 +err.text += \n\nWARN: passing test as an expected crash
 +else:
 +etree.SubElement(element, 'error')
 +
 +elif expected_result != pass:
 +err.text += \n\nERROR: This test passed when it \
 +expected {0}.format(expected_result)
 +etree.SubElement(element, 'failure')
  
  self._file.write(etree.tostring(element))
  self._file.write('\n')
 diff --git a/piglit.conf.example b/piglit.conf.example
 index 9cb7dd1..a864c01 100644
 --- a/piglit.conf.example
 +++ b/piglit.conf.example
 @@ -71,3 +71,15 @@ run_test=./%(test_name)s
  ; Options can be found running piglit run -h and reading the section for
  ; -b/--backend
  ;backend=json
 +
 +[expected-failures]
 +; Provide a list of test names that are expected to fail.  These tests
 +; will be listed as passing in JUnit output when they fail.  Any