Brian Daniels / mbed-tools

Fork of mbed-tools by Morpheus

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers test_exporters.py Source File

test_exporters.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2014 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 
00017 Author: Przemyslaw Wirkus <Przemyslaw.wirkus@arm.com>
00018 """
00019 
00020 from tools.utils import construct_enum
00021 
00022 
00023 ResultExporterType = construct_enum(HTML='Html_Exporter',
00024                                     JUNIT='JUnit_Exporter',
00025                                     JUNIT_OPER='JUnit_Exporter_Interoperability',
00026                                     BUILD='Build_Exporter',
00027                                     PRINT='Print_Exporter')
00028 
00029 
00030 class ReportExporter ():
00031     """ Class exports extended test result Python data structure to
00032         different formats like HTML, JUnit XML.
00033 
00034     Parameter 'test_result_ext' format:
00035 
00036     u'uARM': {   u'LPC1768': {   'MBED_2': {   0: {   'copy_method': 'shutils.copy()',
00037                                                       'duration': 20,
00038                                                       'elapsed_time': 1.7929999828338623,
00039                                                       'output': 'Host test instrumentation on ...\r\n',
00040                                                       'result': 'OK',
00041                                                       'target_name': u'LPC1768',
00042                                                       'description': 'stdio',
00043                                                       'id': u'MBED_2',
00044                                                       'toolchain_name': u'uARM'}},
00045     """
00046     CSS_STYLE = """<style>
00047                    .name{
00048                     border: 1px solid;
00049                     border-radius: 25px;
00050                     width: 100px;
00051                    }
00052                    .tooltip{
00053                        position:absolute;
00054                        background-color: #F5DA81;
00055                        display:none;
00056                    }
00057                    </style>
00058                 """
00059 
00060     JAVASCRIPT = """
00061                  <script type="text/javascript">
00062                  function show (elem) {
00063                      elem.style.display = "block";
00064                  }
00065                  function hide (elem) {
00066                      elem.style.display = "";
00067                  }
00068                  </script>
00069                  """
00070 
00071     def __init__(self, result_exporter_type, package="test"):
00072         self.result_exporter_type  = result_exporter_type
00073         self.package  = package
00074 
00075     def report (self, test_summary_ext, test_suite_properties=None):
00076         """ Invokes report depending on exporter_type set in constructor
00077         """
00078         if self.result_exporter_type  == ResultExporterType.HTML:
00079             # HTML exporter
00080             return self.exporter_html (test_summary_ext, test_suite_properties)
00081         elif self.result_exporter_type  == ResultExporterType.JUNIT:
00082             # JUNIT exporter for results from test suite
00083             return self.exporter_junit (test_summary_ext, test_suite_properties)
00084         elif self.result_exporter_type  == ResultExporterType.JUNIT_OPER:
00085             # JUNIT exporter for interoperability test
00086             return self.exporter_junit_ioper (test_summary_ext, test_suite_properties)
00087         elif self.result_exporter_type  == ResultExporterType.PRINT:
00088             # JUNIT exporter for interoperability test
00089             return self.exporter_print (test_summary_ext)
00090         return None
00091 
00092     def report_to_file (self, test_summary_ext, file_name, test_suite_properties=None):
00093         """ Stores report to specified file
00094         """
00095         report = self.report (test_summary_ext, test_suite_properties=test_suite_properties)
00096         self.write_to_file (report, file_name)
00097 
00098     def write_to_file(self, report, file_name):
00099         if report is not None:
00100             with open(file_name, 'w') as f:
00101                 f.write(report)
00102 
00103     def get_tooltip_name (self, toolchain, target, test_id, loop_no):
00104         """ Generate simple unique tool-tip name which can be used.
00105             For example as HTML <div> section id attribute.
00106         """
00107         return "target_test_%s_%s_%s_%s"% (toolchain.lower(), target.lower(), test_id.lower(), loop_no)
00108 
00109     def get_result_div_sections (self, test, test_no):
00110         """ Generates separate <DIV> sections which contains test results output.
00111         """
00112 
00113         RESULT_COLORS = {'OK': 'LimeGreen',
00114                          'FAIL': 'Orange',
00115                          'ERROR': 'LightCoral',
00116                          'OTHER': 'LightGray',
00117                         }
00118 
00119         tooltip_name = self.get_tooltip_name (test['toolchain_name'], test['target_name'], test['id'], test_no)
00120         background_color = RESULT_COLORS[test['result'] if test['result'] in RESULT_COLORS else 'OTHER']
00121         result_div_style = "background-color: %s"% background_color
00122 
00123         result = """<div class="name" style="%s" onmouseover="show(%s)" onmouseout="hide(%s)">
00124                        <center>%s</center>
00125                        <div class = "tooltip" id= "%s">
00126                        <b>%s</b><br />
00127                        <hr />
00128                        <b>%s</b> in <b>%.2f sec</b><br />
00129                        <hr />
00130                        <small>
00131                        %s
00132                        </small>
00133                        </div>
00134                     </div>
00135                  """% (result_div_style,
00136                        tooltip_name,
00137                        tooltip_name,
00138                        test['result'],
00139                        tooltip_name,
00140                        test['target_name_unique'],
00141                        test['description'],
00142                        test['elapsed_time'],
00143                        test['output'].replace('\n', '<br />'))
00144         return result
00145 
00146     def get_result_tree (self, test_results):
00147         """ If test was run in a loop (we got few results from the same test)
00148             we will show it in a column to see all results.
00149             This function produces HTML table with corresponding results.
00150         """
00151         result = ''
00152         for i, test_result in enumerate(test_results):
00153             result += '<table>'
00154             test_ids = sorted(test_result.keys())
00155             for test_no in test_ids:
00156                 test = test_result[test_no]
00157                 result += """<tr>
00158                                  <td valign="top">%s</td>
00159                              </tr>"""% self.get_result_div_sections (test, "%d_%d" % (test_no, i))
00160             result += '</table>'
00161         return result
00162 
00163     def get_all_unique_test_ids (self, test_result_ext):
00164         """ Gets all unique test ids from all ran tests.
00165             We need this to create complete list of all test ran.
00166         """
00167         result = []
00168         targets = test_result_ext.keys()
00169         for target in targets:
00170             toolchains = test_result_ext[target].keys()
00171             for toolchain in toolchains:
00172                 tests = test_result_ext[target][toolchain].keys()
00173                 result.extend(tests)
00174         return sorted(list(set(result)))
00175 
00176     #
00177     # Exporters functions
00178     #
00179 
00180     def exporter_html (self, test_result_ext, test_suite_properties=None):
00181         """ Export test results in proprietary HTML format.
00182         """
00183         result = """<html>
00184                     <head>
00185                         <title>mbed SDK test suite test result report</title>
00186                         %s
00187                         %s
00188                     </head>
00189                     <body>
00190                  """% (self.CSS_STYLE , self.JAVASCRIPT )
00191 
00192         unique_test_ids = self.get_all_unique_test_ids (test_result_ext)
00193         targets = sorted(test_result_ext.keys())
00194         result += '<table><tr>'
00195         for target in targets:
00196             toolchains = sorted(test_result_ext[target].keys())
00197             for toolchain in toolchains:
00198                 result += '<td></td>'
00199                 result += '<td></td>'
00200 
00201                 tests = sorted(test_result_ext[target][toolchain].keys())
00202                 for test in unique_test_ids:
00203                     result += """<td align="center">%s</td>"""% test
00204                 result += """</tr>
00205                               <tr>
00206                               <td valign="center">%s</td>
00207                               <td valign="center"><b>%s</b></td>
00208                           """% (toolchain, target)
00209 
00210                 for test in unique_test_ids:
00211                     test_result = self.get_result_tree (test_result_ext[target][toolchain][test]) if test in tests else ''
00212                     result += '<td>%s</td>'% (test_result)
00213 
00214                 result += '</tr>'
00215         result += '</table>'
00216         result += '</body></html>'
00217         return result
00218 
00219     def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None):
00220         from junit_xml import TestSuite, TestCase
00221         test_suites = []
00222         test_cases = []
00223 
00224         for platform in sorted(test_result_ext.keys()):
00225             # {platform : ['Platform', 'Result', 'Scope', 'Description'])
00226             test_cases = []
00227             for tr_result in test_result_ext[platform]:
00228                 result, name, scope, description = tr_result
00229 
00230                 classname = 'test.ioper.%s.%s.%s' % (platform, name, scope)
00231                 elapsed_sec = 0
00232                 _stdout = description
00233                 _stderr = ''
00234                 # Test case
00235                 tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
00236                 # Test case extra failure / error info
00237                 if result == 'FAIL':
00238                     tc.add_failure_info(description, _stdout)
00239                 elif result == 'ERROR':
00240                     tc.add_error_info(description, _stdout)
00241                 elif result == 'SKIP' or result == 'NOT_SUPPORTED':
00242                     tc.add_skipped_info(description, _stdout)
00243 
00244                 test_cases.append(tc)
00245             ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases)
00246             test_suites.append(ts)
00247         return TestSuite.to_xml_string(test_suites)
00248 
00249     def exporter_junit (self, test_result_ext, test_suite_properties=None):
00250         """ Export test results in JUnit XML compliant format
00251         """
00252         from junit_xml import TestSuite, TestCase
00253         test_suites = []
00254         test_cases = []
00255 
00256         targets = sorted(test_result_ext.keys())
00257         for target in targets:
00258             toolchains = sorted(test_result_ext[target].keys())
00259             for toolchain in toolchains:
00260                 test_cases = []
00261                 tests = sorted(test_result_ext[target][toolchain].keys())
00262                 for test in tests:
00263                     test_results = test_result_ext[target][toolchain][test]
00264                     for test_res in test_results:
00265                         test_ids = sorted(test_res.keys())
00266                         for test_no in test_ids:
00267                             test_result = test_res[test_no]
00268                             name = test_result['description']
00269                             classname = '%s.%s.%s.%s'% (self.package , target, toolchain, test_result['id'])
00270                             elapsed_sec = test_result['elapsed_time']
00271                             _stdout = test_result['output']
00272 
00273                             if 'target_name_unique' in test_result:
00274                                 _stderr = test_result['target_name_unique']
00275                             else:
00276                                 _stderr = test_result['target_name']
00277 
00278                             # Test case
00279                             tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
00280 
00281                             # Test case extra failure / error info
00282                             message = test_result['result']
00283                             if test_result['result'] == 'FAIL':
00284                                 tc.add_failure_info(message, _stdout)
00285                             elif test_result['result'] == 'SKIP' or test_result["result"] == 'NOT_SUPPORTED':
00286                                 tc.add_skipped_info(message, _stdout)
00287                             elif test_result['result'] != 'OK':
00288                                 tc.add_error_info(message, _stdout)
00289 
00290                             test_cases.append(tc)
00291 
00292                 ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain])
00293                 test_suites.append(ts)
00294         return TestSuite.to_xml_string(test_suites)
00295 
00296     def exporter_print_helper(self, array):
00297         for item in array:
00298             print "  * %s::%s::%s" % (item["target_name"], item["toolchain_name"], item["id"])
00299 
00300     def exporter_print (self, test_result_ext):
00301         """ Export test results in print format.
00302         """
00303         failures = []
00304         skips = []
00305         successes = []
00306 
00307         unique_test_ids = self.get_all_unique_test_ids (test_result_ext)
00308         targets = sorted(test_result_ext.keys())
00309 
00310         for target in targets:
00311             toolchains = sorted(test_result_ext[target].keys())
00312             for toolchain in toolchains:
00313                 tests = sorted(test_result_ext[target][toolchain].keys())
00314                 for test in tests:
00315                     test_runs = test_result_ext[target][toolchain][test]
00316                     for test_runner in test_runs:
00317                         #test_run = test_result_ext[target][toolchain][test][test_run_number][0]
00318                         test_run = test_runner[0]
00319 
00320                         if test_run["result"] == "FAIL":
00321                             failures.append(test_run)
00322                         elif test_run["result"] == "SKIP" or test_run["result"] == "NOT_SUPPORTED":
00323                             skips.append(test_run)
00324                         elif test_run["result"] == "OK":
00325                             successes.append(test_run)
00326                         else:
00327                             raise Exception("Unhandled result type: %s" % (test_run["result"]))
00328 
00329         if successes:
00330             print "\n\nBuild successes:"
00331             self.exporter_print_helper (successes)
00332 
00333         if skips:
00334             print "\n\nBuild skips:"
00335             self.exporter_print_helper (skips)
00336 
00337         if failures:
00338             print "\n\nBuild failures:"
00339             self.exporter_print_helper (failures)
00340             return False
00341         else:
00342             return True