User | Revision | Line number | New contents of line |
switches |
0:0e018d759a2a
|
1
|
"""
|
switches |
0:0e018d759a2a
|
2
|
mbed SDK
|
switches |
0:0e018d759a2a
|
3
|
Copyright (c) 2011-2014 ARM Limited
|
switches |
0:0e018d759a2a
|
4
|
|
switches |
0:0e018d759a2a
|
5
|
Licensed under the Apache License, Version 2.0 (the "License");
|
switches |
0:0e018d759a2a
|
6
|
you may not use this file except in compliance with the License.
|
switches |
0:0e018d759a2a
|
7
|
You may obtain a copy of the License at
|
switches |
0:0e018d759a2a
|
8
|
|
switches |
0:0e018d759a2a
|
9
|
http://www.apache.org/licenses/LICENSE-2.0
|
switches |
0:0e018d759a2a
|
10
|
|
switches |
0:0e018d759a2a
|
11
|
Unless required by applicable law or agreed to in writing, software
|
switches |
0:0e018d759a2a
|
12
|
distributed under the License is distributed on an "AS IS" BASIS,
|
switches |
0:0e018d759a2a
|
13
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
switches |
0:0e018d759a2a
|
14
|
See the License for the specific language governing permissions and
|
switches |
0:0e018d759a2a
|
15
|
limitations under the License.
|
switches |
0:0e018d759a2a
|
16
|
|
switches |
0:0e018d759a2a
|
17
|
Author: Przemyslaw Wirkus <Przemyslaw.wirkus@arm.com>
|
switches |
0:0e018d759a2a
|
18
|
"""
|
switches |
0:0e018d759a2a
|
19
|
|
switches |
0:0e018d759a2a
|
20
|
import os
|
switches |
0:0e018d759a2a
|
21
|
import re
|
switches |
0:0e018d759a2a
|
22
|
import sys
|
switches |
0:0e018d759a2a
|
23
|
import json
|
switches |
0:0e018d759a2a
|
24
|
import uuid
|
switches |
0:0e018d759a2a
|
25
|
import pprint
|
switches |
0:0e018d759a2a
|
26
|
import random
|
switches |
0:0e018d759a2a
|
27
|
import argparse
|
switches |
0:0e018d759a2a
|
28
|
import datetime
|
switches |
0:0e018d759a2a
|
29
|
import threading
|
switches |
0:0e018d759a2a
|
30
|
import ctypes
|
switches |
0:0e018d759a2a
|
31
|
from types import ListType
|
switches |
0:0e018d759a2a
|
32
|
from colorama import Fore, Back, Style
|
switches |
0:0e018d759a2a
|
33
|
from prettytable import PrettyTable
|
switches |
0:0e018d759a2a
|
34
|
from copy import copy
|
switches |
0:0e018d759a2a
|
35
|
|
switches |
0:0e018d759a2a
|
36
|
from time import sleep, time
|
switches |
0:0e018d759a2a
|
37
|
from Queue import Queue, Empty
|
switches |
0:0e018d759a2a
|
38
|
from os.path import join, exists, basename, relpath
|
switches |
0:0e018d759a2a
|
39
|
from threading import Thread, Lock
|
switches |
0:0e018d759a2a
|
40
|
from multiprocessing import Pool, cpu_count
|
switches |
0:0e018d759a2a
|
41
|
from subprocess import Popen, PIPE
|
switches |
0:0e018d759a2a
|
42
|
|
switches |
0:0e018d759a2a
|
43
|
# Imports related to mbed build api
|
switches |
0:0e018d759a2a
|
44
|
from tools.tests import TESTS
|
switches |
0:0e018d759a2a
|
45
|
from tools.tests import TEST_MAP
|
switches |
0:0e018d759a2a
|
46
|
from tools.paths import BUILD_DIR
|
switches |
0:0e018d759a2a
|
47
|
from tools.paths import HOST_TESTS
|
switches |
0:0e018d759a2a
|
48
|
from tools.utils import ToolException
|
switches |
0:0e018d759a2a
|
49
|
from tools.utils import NotSupportedException
|
switches |
0:0e018d759a2a
|
50
|
from tools.utils import construct_enum
|
switches |
0:0e018d759a2a
|
51
|
from tools.memap import MemapParser
|
switches |
0:0e018d759a2a
|
52
|
from tools.targets import TARGET_MAP
|
switches |
0:0e018d759a2a
|
53
|
from tools.test_db import BaseDBAccess
|
switches |
0:0e018d759a2a
|
54
|
from tools.build_api import build_project, build_mbed_libs, build_lib
|
switches |
0:0e018d759a2a
|
55
|
from tools.build_api import get_target_supported_toolchains
|
switches |
0:0e018d759a2a
|
56
|
from tools.build_api import write_build_report
|
switches |
0:0e018d759a2a
|
57
|
from tools.build_api import prep_report
|
switches |
0:0e018d759a2a
|
58
|
from tools.build_api import prep_properties
|
switches |
0:0e018d759a2a
|
59
|
from tools.build_api import create_result
|
switches |
0:0e018d759a2a
|
60
|
from tools.build_api import add_result_to_report
|
switches |
0:0e018d759a2a
|
61
|
from tools.build_api import prepare_toolchain
|
switches |
0:0e018d759a2a
|
62
|
from tools.build_api import scan_resources
|
switches |
0:0e018d759a2a
|
63
|
from tools.libraries import LIBRARIES, LIBRARY_MAP
|
switches |
0:0e018d759a2a
|
64
|
from tools.options import extract_profile
|
switches |
0:0e018d759a2a
|
65
|
from tools.toolchains import TOOLCHAIN_PATHS
|
switches |
0:0e018d759a2a
|
66
|
from tools.toolchains import TOOLCHAINS
|
switches |
0:0e018d759a2a
|
67
|
from tools.test_exporters import ReportExporter, ResultExporterType
|
switches |
0:0e018d759a2a
|
68
|
from tools.utils import argparse_filestring_type
|
switches |
0:0e018d759a2a
|
69
|
from tools.utils import argparse_uppercase_type
|
switches |
0:0e018d759a2a
|
70
|
from tools.utils import argparse_lowercase_type
|
switches |
0:0e018d759a2a
|
71
|
from tools.utils import argparse_many
|
switches |
0:0e018d759a2a
|
72
|
from tools.utils import get_path_depth
|
switches |
0:0e018d759a2a
|
73
|
|
switches |
0:0e018d759a2a
|
74
|
import tools.host_tests.host_tests_plugins as host_tests_plugins
|
switches |
0:0e018d759a2a
|
75
|
|
switches |
0:0e018d759a2a
|
76
|
try:
|
switches |
0:0e018d759a2a
|
77
|
import mbed_lstools
|
switches |
0:0e018d759a2a
|
78
|
from tools.compliance.ioper_runner import get_available_oper_test_scopes
|
switches |
0:0e018d759a2a
|
79
|
except:
|
switches |
0:0e018d759a2a
|
80
|
pass
|
switches |
0:0e018d759a2a
|
81
|
|
switches |
0:0e018d759a2a
|
82
|
|
switches |
0:0e018d759a2a
|
83
|
class ProcessObserver(Thread):
|
switches |
0:0e018d759a2a
|
84
|
def __init__(self, proc):
|
switches |
0:0e018d759a2a
|
85
|
Thread.__init__(self)
|
switches |
0:0e018d759a2a
|
86
|
self.proc = proc
|
switches |
0:0e018d759a2a
|
87
|
self.queue = Queue()
|
switches |
0:0e018d759a2a
|
88
|
self.daemon = True
|
switches |
0:0e018d759a2a
|
89
|
self.active = True
|
switches |
0:0e018d759a2a
|
90
|
self.start()
|
switches |
0:0e018d759a2a
|
91
|
|
switches |
0:0e018d759a2a
|
92
|
def run(self):
|
switches |
0:0e018d759a2a
|
93
|
while self.active:
|
switches |
0:0e018d759a2a
|
94
|
c = self.proc.stdout.read(1)
|
switches |
0:0e018d759a2a
|
95
|
self.queue.put(c)
|
switches |
0:0e018d759a2a
|
96
|
|
switches |
0:0e018d759a2a
|
97
|
def stop(self):
|
switches |
0:0e018d759a2a
|
98
|
self.active = False
|
switches |
0:0e018d759a2a
|
99
|
try:
|
switches |
0:0e018d759a2a
|
100
|
self.proc.terminate()
|
switches |
0:0e018d759a2a
|
101
|
except Exception, _:
|
switches |
0:0e018d759a2a
|
102
|
pass
|
switches |
0:0e018d759a2a
|
103
|
|
switches |
0:0e018d759a2a
|
104
|
|
switches |
0:0e018d759a2a
|
105
|
class SingleTestExecutor(threading.Thread):
|
switches |
0:0e018d759a2a
|
106
|
""" Example: Single test class in separate thread usage
|
switches |
0:0e018d759a2a
|
107
|
"""
|
switches |
0:0e018d759a2a
|
108
|
def __init__(self, single_test):
|
switches |
0:0e018d759a2a
|
109
|
self.single_test = single_test
|
switches |
0:0e018d759a2a
|
110
|
threading.Thread.__init__(self)
|
switches |
0:0e018d759a2a
|
111
|
|
switches |
0:0e018d759a2a
|
112
|
def run(self):
|
switches |
0:0e018d759a2a
|
113
|
start = time()
|
switches |
0:0e018d759a2a
|
114
|
# Execute tests depending on options and filter applied
|
switches |
0:0e018d759a2a
|
115
|
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext = self.single_test.execute()
|
switches |
0:0e018d759a2a
|
116
|
elapsed_time = time() - start
|
switches |
0:0e018d759a2a
|
117
|
|
switches |
0:0e018d759a2a
|
118
|
# Human readable summary
|
switches |
0:0e018d759a2a
|
119
|
if not self.single_test.opts_suppress_summary:
|
switches |
0:0e018d759a2a
|
120
|
# prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
121
|
print self.single_test.generate_test_summary(test_summary, shuffle_seed)
|
switches |
0:0e018d759a2a
|
122
|
if self.single_test.opts_test_x_toolchain_summary:
|
switches |
0:0e018d759a2a
|
123
|
# prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
124
|
# table shows text x toolchain test result matrix
|
switches |
0:0e018d759a2a
|
125
|
print self.single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
|
switches |
0:0e018d759a2a
|
126
|
print "Completed in %.2f sec"% (elapsed_time)
|
switches |
0:0e018d759a2a
|
127
|
|
switches |
0:0e018d759a2a
|
128
|
|
switches |
0:0e018d759a2a
|
129
|
class SingleTestRunner(object):
|
switches |
0:0e018d759a2a
|
130
|
""" Object wrapper for single test run which may involve multiple MUTs
|
switches |
0:0e018d759a2a
|
131
|
"""
|
switches |
0:0e018d759a2a
|
132
|
RE_DETECT_TESTCASE_RESULT = None
|
switches |
0:0e018d759a2a
|
133
|
|
switches |
0:0e018d759a2a
|
134
|
# Return codes for test script
|
switches |
0:0e018d759a2a
|
135
|
TEST_RESULT_OK = "OK"
|
switches |
0:0e018d759a2a
|
136
|
TEST_RESULT_FAIL = "FAIL"
|
switches |
0:0e018d759a2a
|
137
|
TEST_RESULT_ERROR = "ERROR"
|
switches |
0:0e018d759a2a
|
138
|
TEST_RESULT_UNDEF = "UNDEF"
|
switches |
0:0e018d759a2a
|
139
|
TEST_RESULT_IOERR_COPY = "IOERR_COPY"
|
switches |
0:0e018d759a2a
|
140
|
TEST_RESULT_IOERR_DISK = "IOERR_DISK"
|
switches |
0:0e018d759a2a
|
141
|
TEST_RESULT_IOERR_SERIAL = "IOERR_SERIAL"
|
switches |
0:0e018d759a2a
|
142
|
TEST_RESULT_TIMEOUT = "TIMEOUT"
|
switches |
0:0e018d759a2a
|
143
|
TEST_RESULT_NO_IMAGE = "NO_IMAGE"
|
switches |
0:0e018d759a2a
|
144
|
TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
|
switches |
0:0e018d759a2a
|
145
|
TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
|
switches |
0:0e018d759a2a
|
146
|
TEST_RESULT_NOT_SUPPORTED = "NOT_SUPPORTED"
|
switches |
0:0e018d759a2a
|
147
|
|
switches |
0:0e018d759a2a
|
148
|
GLOBAL_LOOPS_COUNT = 1 # How many times each test should be repeated
|
switches |
0:0e018d759a2a
|
149
|
TEST_LOOPS_LIST = [] # We redefine no.of loops per test_id
|
switches |
0:0e018d759a2a
|
150
|
TEST_LOOPS_DICT = {} # TEST_LOOPS_LIST in dict format: { test_id : test_loop_count}
|
switches |
0:0e018d759a2a
|
151
|
|
switches |
0:0e018d759a2a
|
152
|
muts = {} # MUTs descriptor (from external file)
|
switches |
0:0e018d759a2a
|
153
|
test_spec = {} # Test specification (from external file)
|
switches |
0:0e018d759a2a
|
154
|
|
switches |
0:0e018d759a2a
|
155
|
# mbed test suite -> SingleTestRunner
|
switches |
0:0e018d759a2a
|
156
|
TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK,
|
switches |
0:0e018d759a2a
|
157
|
"failure" : TEST_RESULT_FAIL,
|
switches |
0:0e018d759a2a
|
158
|
"error" : TEST_RESULT_ERROR,
|
switches |
0:0e018d759a2a
|
159
|
"ioerr_copy" : TEST_RESULT_IOERR_COPY,
|
switches |
0:0e018d759a2a
|
160
|
"ioerr_disk" : TEST_RESULT_IOERR_DISK,
|
switches |
0:0e018d759a2a
|
161
|
"ioerr_serial" : TEST_RESULT_IOERR_SERIAL,
|
switches |
0:0e018d759a2a
|
162
|
"timeout" : TEST_RESULT_TIMEOUT,
|
switches |
0:0e018d759a2a
|
163
|
"no_image" : TEST_RESULT_NO_IMAGE,
|
switches |
0:0e018d759a2a
|
164
|
"end" : TEST_RESULT_UNDEF,
|
switches |
0:0e018d759a2a
|
165
|
"mbed_assert" : TEST_RESULT_MBED_ASSERT,
|
switches |
0:0e018d759a2a
|
166
|
"build_failed" : TEST_RESULT_BUILD_FAILED,
|
switches |
0:0e018d759a2a
|
167
|
"not_supproted" : TEST_RESULT_NOT_SUPPORTED
|
switches |
0:0e018d759a2a
|
168
|
}
|
switches |
0:0e018d759a2a
|
169
|
|
switches |
0:0e018d759a2a
|
170
|
def __init__(self,
|
switches |
0:0e018d759a2a
|
171
|
_global_loops_count=1,
|
switches |
0:0e018d759a2a
|
172
|
_test_loops_list=None,
|
switches |
0:0e018d759a2a
|
173
|
_muts={},
|
switches |
0:0e018d759a2a
|
174
|
_clean=False,
|
switches |
0:0e018d759a2a
|
175
|
_parser=None,
|
switches |
0:0e018d759a2a
|
176
|
_opts=None,
|
switches |
0:0e018d759a2a
|
177
|
_opts_db_url=None,
|
switches |
0:0e018d759a2a
|
178
|
_opts_log_file_name=None,
|
switches |
0:0e018d759a2a
|
179
|
_opts_report_html_file_name=None,
|
switches |
0:0e018d759a2a
|
180
|
_opts_report_junit_file_name=None,
|
switches |
0:0e018d759a2a
|
181
|
_opts_report_build_file_name=None,
|
switches |
0:0e018d759a2a
|
182
|
_opts_report_text_file_name=None,
|
switches |
0:0e018d759a2a
|
183
|
_opts_build_report={},
|
switches |
0:0e018d759a2a
|
184
|
_opts_build_properties={},
|
switches |
0:0e018d759a2a
|
185
|
_test_spec={},
|
switches |
0:0e018d759a2a
|
186
|
_opts_goanna_for_mbed_sdk=None,
|
switches |
0:0e018d759a2a
|
187
|
_opts_goanna_for_tests=None,
|
switches |
0:0e018d759a2a
|
188
|
_opts_shuffle_test_order=False,
|
switches |
0:0e018d759a2a
|
189
|
_opts_shuffle_test_seed=None,
|
switches |
0:0e018d759a2a
|
190
|
_opts_test_by_names=None,
|
switches |
0:0e018d759a2a
|
191
|
_opts_peripheral_by_names=None,
|
switches |
0:0e018d759a2a
|
192
|
_opts_test_only_peripheral=False,
|
switches |
0:0e018d759a2a
|
193
|
_opts_test_only_common=False,
|
switches |
0:0e018d759a2a
|
194
|
_opts_verbose_skipped_tests=False,
|
switches |
0:0e018d759a2a
|
195
|
_opts_verbose_test_result_only=False,
|
switches |
0:0e018d759a2a
|
196
|
_opts_verbose=False,
|
switches |
0:0e018d759a2a
|
197
|
_opts_firmware_global_name=None,
|
switches |
0:0e018d759a2a
|
198
|
_opts_only_build_tests=False,
|
switches |
0:0e018d759a2a
|
199
|
_opts_parallel_test_exec=False,
|
switches |
0:0e018d759a2a
|
200
|
_opts_suppress_summary=False,
|
switches |
0:0e018d759a2a
|
201
|
_opts_test_x_toolchain_summary=False,
|
switches |
0:0e018d759a2a
|
202
|
_opts_copy_method=None,
|
switches |
0:0e018d759a2a
|
203
|
_opts_mut_reset_type=None,
|
switches |
0:0e018d759a2a
|
204
|
_opts_jobs=None,
|
switches |
0:0e018d759a2a
|
205
|
_opts_waterfall_test=None,
|
switches |
0:0e018d759a2a
|
206
|
_opts_consolidate_waterfall_test=None,
|
switches |
0:0e018d759a2a
|
207
|
_opts_extend_test_timeout=None,
|
switches |
0:0e018d759a2a
|
208
|
_opts_auto_detect=None,
|
switches |
0:0e018d759a2a
|
209
|
_opts_include_non_automated=False):
|
switches |
0:0e018d759a2a
|
210
|
""" Let's try hard to init this object
|
switches |
0:0e018d759a2a
|
211
|
"""
|
switches |
0:0e018d759a2a
|
212
|
from colorama import init
|
switches |
0:0e018d759a2a
|
213
|
init()
|
switches |
0:0e018d759a2a
|
214
|
|
switches |
0:0e018d759a2a
|
215
|
PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
|
switches |
0:0e018d759a2a
|
216
|
self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
|
switches |
0:0e018d759a2a
|
217
|
# Settings related to test loops counters
|
switches |
0:0e018d759a2a
|
218
|
try:
|
switches |
0:0e018d759a2a
|
219
|
_global_loops_count = int(_global_loops_count)
|
switches |
0:0e018d759a2a
|
220
|
except:
|
switches |
0:0e018d759a2a
|
221
|
_global_loops_count = 1
|
switches |
0:0e018d759a2a
|
222
|
if _global_loops_count < 1:
|
switches |
0:0e018d759a2a
|
223
|
_global_loops_count = 1
|
switches |
0:0e018d759a2a
|
224
|
self.GLOBAL_LOOPS_COUNT = _global_loops_count
|
switches |
0:0e018d759a2a
|
225
|
self.TEST_LOOPS_LIST = _test_loops_list if _test_loops_list else []
|
switches |
0:0e018d759a2a
|
226
|
self.TEST_LOOPS_DICT = self.test_loop_list_to_dict(_test_loops_list)
|
switches |
0:0e018d759a2a
|
227
|
|
switches |
0:0e018d759a2a
|
228
|
self.shuffle_random_seed = 0.0
|
switches |
0:0e018d759a2a
|
229
|
self.SHUFFLE_SEED_ROUND = 10
|
switches |
0:0e018d759a2a
|
230
|
|
switches |
0:0e018d759a2a
|
231
|
# MUT list and test specification storage
|
switches |
0:0e018d759a2a
|
232
|
self.muts = _muts
|
switches |
0:0e018d759a2a
|
233
|
self.test_spec = _test_spec
|
switches |
0:0e018d759a2a
|
234
|
|
switches |
0:0e018d759a2a
|
235
|
# Settings passed e.g. from command line
|
switches |
0:0e018d759a2a
|
236
|
self.opts_db_url = _opts_db_url
|
switches |
0:0e018d759a2a
|
237
|
self.opts_log_file_name = _opts_log_file_name
|
switches |
0:0e018d759a2a
|
238
|
self.opts_report_html_file_name = _opts_report_html_file_name
|
switches |
0:0e018d759a2a
|
239
|
self.opts_report_junit_file_name = _opts_report_junit_file_name
|
switches |
0:0e018d759a2a
|
240
|
self.opts_report_build_file_name = _opts_report_build_file_name
|
switches |
0:0e018d759a2a
|
241
|
self.opts_report_text_file_name = _opts_report_text_file_name
|
switches |
0:0e018d759a2a
|
242
|
self.opts_goanna_for_mbed_sdk = _opts_goanna_for_mbed_sdk
|
switches |
0:0e018d759a2a
|
243
|
self.opts_goanna_for_tests = _opts_goanna_for_tests
|
switches |
0:0e018d759a2a
|
244
|
self.opts_shuffle_test_order = _opts_shuffle_test_order
|
switches |
0:0e018d759a2a
|
245
|
self.opts_shuffle_test_seed = _opts_shuffle_test_seed
|
switches |
0:0e018d759a2a
|
246
|
self.opts_test_by_names = _opts_test_by_names
|
switches |
0:0e018d759a2a
|
247
|
self.opts_peripheral_by_names = _opts_peripheral_by_names
|
switches |
0:0e018d759a2a
|
248
|
self.opts_test_only_peripheral = _opts_test_only_peripheral
|
switches |
0:0e018d759a2a
|
249
|
self.opts_test_only_common = _opts_test_only_common
|
switches |
0:0e018d759a2a
|
250
|
self.opts_verbose_skipped_tests = _opts_verbose_skipped_tests
|
switches |
0:0e018d759a2a
|
251
|
self.opts_verbose_test_result_only = _opts_verbose_test_result_only
|
switches |
0:0e018d759a2a
|
252
|
self.opts_verbose = _opts_verbose
|
switches |
0:0e018d759a2a
|
253
|
self.opts_firmware_global_name = _opts_firmware_global_name
|
switches |
0:0e018d759a2a
|
254
|
self.opts_only_build_tests = _opts_only_build_tests
|
switches |
0:0e018d759a2a
|
255
|
self.opts_parallel_test_exec = _opts_parallel_test_exec
|
switches |
0:0e018d759a2a
|
256
|
self.opts_suppress_summary = _opts_suppress_summary
|
switches |
0:0e018d759a2a
|
257
|
self.opts_test_x_toolchain_summary = _opts_test_x_toolchain_summary
|
switches |
0:0e018d759a2a
|
258
|
self.opts_copy_method = _opts_copy_method
|
switches |
0:0e018d759a2a
|
259
|
self.opts_mut_reset_type = _opts_mut_reset_type
|
switches |
0:0e018d759a2a
|
260
|
self.opts_jobs = _opts_jobs if _opts_jobs is not None else 1
|
switches |
0:0e018d759a2a
|
261
|
self.opts_waterfall_test = _opts_waterfall_test
|
switches |
0:0e018d759a2a
|
262
|
self.opts_consolidate_waterfall_test = _opts_consolidate_waterfall_test
|
switches |
0:0e018d759a2a
|
263
|
self.opts_extend_test_timeout = _opts_extend_test_timeout
|
switches |
0:0e018d759a2a
|
264
|
self.opts_clean = _clean
|
switches |
0:0e018d759a2a
|
265
|
self.opts_parser = _parser
|
switches |
0:0e018d759a2a
|
266
|
self.opts = _opts
|
switches |
0:0e018d759a2a
|
267
|
self.opts_auto_detect = _opts_auto_detect
|
switches |
0:0e018d759a2a
|
268
|
self.opts_include_non_automated = _opts_include_non_automated
|
switches |
0:0e018d759a2a
|
269
|
|
switches |
0:0e018d759a2a
|
270
|
self.build_report = _opts_build_report
|
switches |
0:0e018d759a2a
|
271
|
self.build_properties = _opts_build_properties
|
switches |
0:0e018d759a2a
|
272
|
|
switches |
0:0e018d759a2a
|
273
|
# File / screen logger initialization
|
switches |
0:0e018d759a2a
|
274
|
self.logger = CLITestLogger(file_name=self.opts_log_file_name) # Default test logger
|
switches |
0:0e018d759a2a
|
275
|
|
switches |
0:0e018d759a2a
|
276
|
# Database related initializations
|
switches |
0:0e018d759a2a
|
277
|
self.db_logger = factory_db_logger(self.opts_db_url)
|
switches |
0:0e018d759a2a
|
278
|
self.db_logger_build_id = None # Build ID (database index of build_id table)
|
switches |
0:0e018d759a2a
|
279
|
# Let's connect to database to set up credentials and confirm database is ready
|
switches |
0:0e018d759a2a
|
280
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
281
|
self.db_logger.connect_url(self.opts_db_url) # Save db access info inside db_logger object
|
switches |
0:0e018d759a2a
|
282
|
if self.db_logger.is_connected():
|
switches |
0:0e018d759a2a
|
283
|
# Get hostname and uname so we can use it as build description
|
switches |
0:0e018d759a2a
|
284
|
# when creating new build_id in external database
|
switches |
0:0e018d759a2a
|
285
|
(_hostname, _uname) = self.db_logger.get_hostname()
|
switches |
0:0e018d759a2a
|
286
|
_host_location = os.path.dirname(os.path.abspath(__file__))
|
switches |
0:0e018d759a2a
|
287
|
build_id_type = None if self.opts_only_build_tests is None else self.db_logger.BUILD_ID_TYPE_BUILD_ONLY
|
switches |
0:0e018d759a2a
|
288
|
self.db_logger_build_id = self.db_logger.get_next_build_id(_hostname, desc=_uname, location=_host_location, type=build_id_type)
|
switches |
0:0e018d759a2a
|
289
|
self.db_logger.disconnect()
|
switches |
0:0e018d759a2a
|
290
|
|
switches |
0:0e018d759a2a
|
291
|
def dump_options(self):
|
switches |
0:0e018d759a2a
|
292
|
""" Function returns data structure with common settings passed to SingelTestRunner
|
switches |
0:0e018d759a2a
|
293
|
It can be used for example to fill _extra fields in database storing test suite single run data
|
switches |
0:0e018d759a2a
|
294
|
Example:
|
switches |
0:0e018d759a2a
|
295
|
data = self.dump_options()
|
switches |
0:0e018d759a2a
|
296
|
or
|
switches |
0:0e018d759a2a
|
297
|
data_str = json.dumps(self.dump_options())
|
switches |
0:0e018d759a2a
|
298
|
"""
|
switches |
0:0e018d759a2a
|
299
|
result = {"db_url" : str(self.opts_db_url),
|
switches |
0:0e018d759a2a
|
300
|
"log_file_name" : str(self.opts_log_file_name),
|
switches |
0:0e018d759a2a
|
301
|
"shuffle_test_order" : str(self.opts_shuffle_test_order),
|
switches |
0:0e018d759a2a
|
302
|
"shuffle_test_seed" : str(self.opts_shuffle_test_seed),
|
switches |
0:0e018d759a2a
|
303
|
"test_by_names" : str(self.opts_test_by_names),
|
switches |
0:0e018d759a2a
|
304
|
"peripheral_by_names" : str(self.opts_peripheral_by_names),
|
switches |
0:0e018d759a2a
|
305
|
"test_only_peripheral" : str(self.opts_test_only_peripheral),
|
switches |
0:0e018d759a2a
|
306
|
"test_only_common" : str(self.opts_test_only_common),
|
switches |
0:0e018d759a2a
|
307
|
"verbose" : str(self.opts_verbose),
|
switches |
0:0e018d759a2a
|
308
|
"firmware_global_name" : str(self.opts_firmware_global_name),
|
switches |
0:0e018d759a2a
|
309
|
"only_build_tests" : str(self.opts_only_build_tests),
|
switches |
0:0e018d759a2a
|
310
|
"copy_method" : str(self.opts_copy_method),
|
switches |
0:0e018d759a2a
|
311
|
"mut_reset_type" : str(self.opts_mut_reset_type),
|
switches |
0:0e018d759a2a
|
312
|
"jobs" : str(self.opts_jobs),
|
switches |
0:0e018d759a2a
|
313
|
"extend_test_timeout" : str(self.opts_extend_test_timeout),
|
switches |
0:0e018d759a2a
|
314
|
"_dummy" : ''
|
switches |
0:0e018d759a2a
|
315
|
}
|
switches |
0:0e018d759a2a
|
316
|
return result
|
switches |
0:0e018d759a2a
|
317
|
|
switches |
0:0e018d759a2a
|
318
|
def shuffle_random_func(self):
|
switches |
0:0e018d759a2a
|
319
|
return self.shuffle_random_seed
|
switches |
0:0e018d759a2a
|
320
|
|
switches |
0:0e018d759a2a
|
321
|
def is_shuffle_seed_float(self):
|
switches |
0:0e018d759a2a
|
322
|
""" return true if function parameter can be converted to float
|
switches |
0:0e018d759a2a
|
323
|
"""
|
switches |
0:0e018d759a2a
|
324
|
result = True
|
switches |
0:0e018d759a2a
|
325
|
try:
|
switches |
0:0e018d759a2a
|
326
|
float(self.shuffle_random_seed)
|
switches |
0:0e018d759a2a
|
327
|
except ValueError:
|
switches |
0:0e018d759a2a
|
328
|
result = False
|
switches |
0:0e018d759a2a
|
329
|
return result
|
switches |
0:0e018d759a2a
|
330
|
|
switches |
0:0e018d759a2a
|
331
|
# This will store target / toolchain specific properties
|
switches |
0:0e018d759a2a
|
332
|
test_suite_properties_ext = {} # target : toolchain
|
switches |
0:0e018d759a2a
|
333
|
# Here we store test results
|
switches |
0:0e018d759a2a
|
334
|
test_summary = []
|
switches |
0:0e018d759a2a
|
335
|
# Here we store test results in extended data structure
|
switches |
0:0e018d759a2a
|
336
|
test_summary_ext = {}
|
switches |
0:0e018d759a2a
|
337
|
execute_thread_slice_lock = Lock()
|
switches |
0:0e018d759a2a
|
338
|
|
switches |
0:0e018d759a2a
|
339
|
def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report, build_properties):
|
switches |
0:0e018d759a2a
|
340
|
for toolchain in toolchains:
|
switches |
0:0e018d759a2a
|
341
|
tt_id = "%s::%s" % (toolchain, target)
|
switches |
0:0e018d759a2a
|
342
|
|
switches |
0:0e018d759a2a
|
343
|
T = TARGET_MAP[target]
|
switches |
0:0e018d759a2a
|
344
|
|
switches |
0:0e018d759a2a
|
345
|
# print target, toolchain
|
switches |
0:0e018d759a2a
|
346
|
# Test suite properties returned to external tools like CI
|
switches |
0:0e018d759a2a
|
347
|
test_suite_properties = {
|
switches |
0:0e018d759a2a
|
348
|
'jobs': self.opts_jobs,
|
switches |
0:0e018d759a2a
|
349
|
'clean': clean,
|
switches |
0:0e018d759a2a
|
350
|
'target': target,
|
switches |
0:0e018d759a2a
|
351
|
'vendor': T.extra_labels[0],
|
switches |
0:0e018d759a2a
|
352
|
'test_ids': ', '.join(test_ids),
|
switches |
0:0e018d759a2a
|
353
|
'toolchain': toolchain,
|
switches |
0:0e018d759a2a
|
354
|
'shuffle_random_seed': self.shuffle_random_seed
|
switches |
0:0e018d759a2a
|
355
|
}
|
switches |
0:0e018d759a2a
|
356
|
|
switches |
0:0e018d759a2a
|
357
|
|
switches |
0:0e018d759a2a
|
358
|
# print '=== %s::%s ===' % (target, toolchain)
|
switches |
0:0e018d759a2a
|
359
|
# Let's build our test
|
switches |
0:0e018d759a2a
|
360
|
if target not in TARGET_MAP:
|
switches |
0:0e018d759a2a
|
361
|
print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Target platform not found'% (target))
|
switches |
0:0e018d759a2a
|
362
|
continue
|
switches |
0:0e018d759a2a
|
363
|
|
switches |
0:0e018d759a2a
|
364
|
clean_mbed_libs_options = True if self.opts_goanna_for_mbed_sdk or clean or self.opts_clean else None
|
switches |
0:0e018d759a2a
|
365
|
|
switches |
0:0e018d759a2a
|
366
|
profile = extract_profile(self.opts_parser, self.opts, toolchain)
|
switches |
0:0e018d759a2a
|
367
|
|
switches |
0:0e018d759a2a
|
368
|
|
switches |
0:0e018d759a2a
|
369
|
try:
|
switches |
0:0e018d759a2a
|
370
|
build_mbed_libs_result = build_mbed_libs(T,
|
switches |
0:0e018d759a2a
|
371
|
toolchain,
|
switches |
0:0e018d759a2a
|
372
|
clean=clean_mbed_libs_options,
|
switches |
0:0e018d759a2a
|
373
|
verbose=self.opts_verbose,
|
switches |
0:0e018d759a2a
|
374
|
jobs=self.opts_jobs,
|
switches |
0:0e018d759a2a
|
375
|
report=build_report,
|
switches |
0:0e018d759a2a
|
376
|
properties=build_properties,
|
switches |
0:0e018d759a2a
|
377
|
build_profile=profile)
|
switches |
0:0e018d759a2a
|
378
|
|
switches |
0:0e018d759a2a
|
379
|
if not build_mbed_libs_result:
|
switches |
0:0e018d759a2a
|
380
|
print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
|
switches |
0:0e018d759a2a
|
381
|
continue
|
switches |
0:0e018d759a2a
|
382
|
|
switches |
0:0e018d759a2a
|
383
|
except ToolException:
|
switches |
0:0e018d759a2a
|
384
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
|
switches |
0:0e018d759a2a
|
385
|
continue
|
switches |
0:0e018d759a2a
|
386
|
|
switches |
0:0e018d759a2a
|
387
|
build_dir = join(BUILD_DIR, "test", target, toolchain)
|
switches |
0:0e018d759a2a
|
388
|
|
switches |
0:0e018d759a2a
|
389
|
test_suite_properties['build_mbed_libs_result'] = build_mbed_libs_result
|
switches |
0:0e018d759a2a
|
390
|
test_suite_properties['build_dir'] = build_dir
|
switches |
0:0e018d759a2a
|
391
|
test_suite_properties['skipped'] = []
|
switches |
0:0e018d759a2a
|
392
|
|
switches |
0:0e018d759a2a
|
393
|
# Enumerate through all tests and shuffle test order if requested
|
switches |
0:0e018d759a2a
|
394
|
test_map_keys = sorted(TEST_MAP.keys())
|
switches |
0:0e018d759a2a
|
395
|
|
switches |
0:0e018d759a2a
|
396
|
if self.opts_shuffle_test_order:
|
switches |
0:0e018d759a2a
|
397
|
random.shuffle(test_map_keys, self.shuffle_random_func)
|
switches |
0:0e018d759a2a
|
398
|
# Update database with shuffle seed f applicable
|
switches |
0:0e018d759a2a
|
399
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
400
|
self.db_logger.reconnect();
|
switches |
0:0e018d759a2a
|
401
|
if self.db_logger.is_connected():
|
switches |
0:0e018d759a2a
|
402
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _shuffle_seed=self.shuffle_random_func())
|
switches |
0:0e018d759a2a
|
403
|
self.db_logger.disconnect();
|
switches |
0:0e018d759a2a
|
404
|
|
switches |
0:0e018d759a2a
|
405
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
406
|
self.db_logger.reconnect();
|
switches |
0:0e018d759a2a
|
407
|
if self.db_logger.is_connected():
|
switches |
0:0e018d759a2a
|
408
|
# Update MUTs and Test Specification in database
|
switches |
0:0e018d759a2a
|
409
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _muts=self.muts, _test_spec=self.test_spec)
|
switches |
0:0e018d759a2a
|
410
|
# Update Extra information in database (some options passed to test suite)
|
switches |
0:0e018d759a2a
|
411
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _extra=json.dumps(self.dump_options()))
|
switches |
0:0e018d759a2a
|
412
|
self.db_logger.disconnect();
|
switches |
0:0e018d759a2a
|
413
|
|
switches |
0:0e018d759a2a
|
414
|
valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids, self.opts_include_non_automated)
|
switches |
0:0e018d759a2a
|
415
|
skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
|
switches |
0:0e018d759a2a
|
416
|
|
switches |
0:0e018d759a2a
|
417
|
for skipped_test_id in skipped_test_map_keys:
|
switches |
0:0e018d759a2a
|
418
|
test_suite_properties['skipped'].append(skipped_test_id)
|
switches |
0:0e018d759a2a
|
419
|
|
switches |
0:0e018d759a2a
|
420
|
|
switches |
0:0e018d759a2a
|
421
|
# First pass through all tests and determine which libraries need to be built
|
switches |
0:0e018d759a2a
|
422
|
libraries = []
|
switches |
0:0e018d759a2a
|
423
|
for test_id in valid_test_map_keys:
|
switches |
0:0e018d759a2a
|
424
|
test = TEST_MAP[test_id]
|
switches |
0:0e018d759a2a
|
425
|
|
switches |
0:0e018d759a2a
|
426
|
# Detect which lib should be added to test
|
switches |
0:0e018d759a2a
|
427
|
# Some libs have to compiled like RTOS or ETH
|
switches |
0:0e018d759a2a
|
428
|
for lib in LIBRARIES:
|
switches |
0:0e018d759a2a
|
429
|
if lib['build_dir'] in test.dependencies and lib['id'] not in libraries:
|
switches |
0:0e018d759a2a
|
430
|
libraries.append(lib['id'])
|
switches |
0:0e018d759a2a
|
431
|
|
switches |
0:0e018d759a2a
|
432
|
|
switches |
0:0e018d759a2a
|
433
|
clean_project_options = True if self.opts_goanna_for_tests or clean or self.opts_clean else None
|
switches |
0:0e018d759a2a
|
434
|
|
switches |
0:0e018d759a2a
|
435
|
# Build all required libraries
|
switches |
0:0e018d759a2a
|
436
|
for lib_id in libraries:
|
switches |
0:0e018d759a2a
|
437
|
try:
|
switches |
0:0e018d759a2a
|
438
|
build_lib(lib_id,
|
switches |
0:0e018d759a2a
|
439
|
T,
|
switches |
0:0e018d759a2a
|
440
|
toolchain,
|
switches |
0:0e018d759a2a
|
441
|
verbose=self.opts_verbose,
|
switches |
0:0e018d759a2a
|
442
|
clean=clean_mbed_libs_options,
|
switches |
0:0e018d759a2a
|
443
|
jobs=self.opts_jobs,
|
switches |
0:0e018d759a2a
|
444
|
report=build_report,
|
switches |
0:0e018d759a2a
|
445
|
properties=build_properties,
|
switches |
0:0e018d759a2a
|
446
|
build_profile=profile)
|
switches |
0:0e018d759a2a
|
447
|
|
switches |
0:0e018d759a2a
|
448
|
except ToolException:
|
switches |
0:0e018d759a2a
|
449
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
|
switches |
0:0e018d759a2a
|
450
|
continue
|
switches |
0:0e018d759a2a
|
451
|
|
switches |
0:0e018d759a2a
|
452
|
|
switches |
0:0e018d759a2a
|
453
|
for test_id in valid_test_map_keys:
|
switches |
0:0e018d759a2a
|
454
|
test = TEST_MAP[test_id]
|
switches |
0:0e018d759a2a
|
455
|
|
switches |
0:0e018d759a2a
|
456
|
test_suite_properties['test.libs.%s.%s.%s'% (target, toolchain, test_id)] = ', '.join(libraries)
|
switches |
0:0e018d759a2a
|
457
|
|
switches |
0:0e018d759a2a
|
458
|
# TODO: move this 2 below loops to separate function
|
switches |
0:0e018d759a2a
|
459
|
INC_DIRS = []
|
switches |
0:0e018d759a2a
|
460
|
for lib_id in libraries:
|
switches |
0:0e018d759a2a
|
461
|
if 'inc_dirs_ext' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['inc_dirs_ext']:
|
switches |
0:0e018d759a2a
|
462
|
INC_DIRS.extend(LIBRARY_MAP[lib_id]['inc_dirs_ext'])
|
switches |
0:0e018d759a2a
|
463
|
|
switches |
0:0e018d759a2a
|
464
|
MACROS = []
|
switches |
0:0e018d759a2a
|
465
|
for lib_id in libraries:
|
switches |
0:0e018d759a2a
|
466
|
if 'macros' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['macros']:
|
switches |
0:0e018d759a2a
|
467
|
MACROS.extend(LIBRARY_MAP[lib_id]['macros'])
|
switches |
0:0e018d759a2a
|
468
|
MACROS.append('TEST_SUITE_TARGET_NAME="%s"'% target)
|
switches |
0:0e018d759a2a
|
469
|
MACROS.append('TEST_SUITE_TEST_ID="%s"'% test_id)
|
switches |
0:0e018d759a2a
|
470
|
test_uuid = uuid.uuid4()
|
switches |
0:0e018d759a2a
|
471
|
MACROS.append('TEST_SUITE_UUID="%s"'% str(test_uuid))
|
switches |
0:0e018d759a2a
|
472
|
|
switches |
0:0e018d759a2a
|
473
|
# Prepare extended test results data structure (it can be used to generate detailed test report)
|
switches |
0:0e018d759a2a
|
474
|
if target not in self.test_summary_ext:
|
switches |
0:0e018d759a2a
|
475
|
self.test_summary_ext[target] = {} # test_summary_ext : toolchain
|
switches |
0:0e018d759a2a
|
476
|
if toolchain not in self.test_summary_ext[target]:
|
switches |
0:0e018d759a2a
|
477
|
self.test_summary_ext[target][toolchain] = {} # test_summary_ext : toolchain : target
|
switches |
0:0e018d759a2a
|
478
|
|
switches |
0:0e018d759a2a
|
479
|
tt_test_id = "%s::%s::%s" % (toolchain, target, test_id) # For logging only
|
switches |
0:0e018d759a2a
|
480
|
|
switches |
0:0e018d759a2a
|
481
|
project_name = self.opts_firmware_global_name if self.opts_firmware_global_name else None
|
switches |
0:0e018d759a2a
|
482
|
try:
|
switches |
0:0e018d759a2a
|
483
|
path = build_project(test.source_dir,
|
switches |
0:0e018d759a2a
|
484
|
join(build_dir, test_id),
|
switches |
0:0e018d759a2a
|
485
|
T,
|
switches |
0:0e018d759a2a
|
486
|
toolchain,
|
switches |
0:0e018d759a2a
|
487
|
test.dependencies,
|
switches |
0:0e018d759a2a
|
488
|
clean=clean_project_options,
|
switches |
0:0e018d759a2a
|
489
|
verbose=self.opts_verbose,
|
switches |
0:0e018d759a2a
|
490
|
name=project_name,
|
switches |
0:0e018d759a2a
|
491
|
macros=MACROS,
|
switches |
0:0e018d759a2a
|
492
|
inc_dirs=INC_DIRS,
|
switches |
0:0e018d759a2a
|
493
|
jobs=self.opts_jobs,
|
switches |
0:0e018d759a2a
|
494
|
report=build_report,
|
switches |
0:0e018d759a2a
|
495
|
properties=build_properties,
|
switches |
0:0e018d759a2a
|
496
|
project_id=test_id,
|
switches |
0:0e018d759a2a
|
497
|
project_description=test.get_description(),
|
switches |
0:0e018d759a2a
|
498
|
build_profile=profile)
|
switches |
0:0e018d759a2a
|
499
|
|
switches |
0:0e018d759a2a
|
500
|
except Exception, e:
|
switches |
0:0e018d759a2a
|
501
|
project_name_str = project_name if project_name is not None else test_id
|
switches |
0:0e018d759a2a
|
502
|
|
switches |
0:0e018d759a2a
|
503
|
|
switches |
0:0e018d759a2a
|
504
|
test_result = self.TEST_RESULT_FAIL
|
switches |
0:0e018d759a2a
|
505
|
|
switches |
0:0e018d759a2a
|
506
|
if isinstance(e, ToolException):
|
switches |
0:0e018d759a2a
|
507
|
print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
|
switches |
0:0e018d759a2a
|
508
|
test_result = self.TEST_RESULT_BUILD_FAILED
|
switches |
0:0e018d759a2a
|
509
|
elif isinstance(e, NotSupportedException):
|
switches |
0:0e018d759a2a
|
510
|
print self.logger.log_line(self.logger.LogType.INFO, 'The project %s is not supported'% (project_name_str))
|
switches |
0:0e018d759a2a
|
511
|
test_result = self.TEST_RESULT_NOT_SUPPORTED
|
switches |
0:0e018d759a2a
|
512
|
|
switches |
0:0e018d759a2a
|
513
|
|
switches |
0:0e018d759a2a
|
514
|
# Append test results to global test summary
|
switches |
0:0e018d759a2a
|
515
|
self.test_summary.append(
|
switches |
0:0e018d759a2a
|
516
|
(test_result, target, toolchain, test_id, test.get_description(), 0, 0, '-')
|
switches |
0:0e018d759a2a
|
517
|
)
|
switches |
0:0e018d759a2a
|
518
|
|
switches |
0:0e018d759a2a
|
519
|
# Add detailed test result to test summary structure
|
switches |
0:0e018d759a2a
|
520
|
if test_id not in self.test_summary_ext[target][toolchain]:
|
switches |
0:0e018d759a2a
|
521
|
self.test_summary_ext[target][toolchain][test_id] = []
|
switches |
0:0e018d759a2a
|
522
|
|
switches |
0:0e018d759a2a
|
523
|
self.test_summary_ext[target][toolchain][test_id].append({ 0: {
|
switches |
0:0e018d759a2a
|
524
|
'result' : test_result,
|
switches |
0:0e018d759a2a
|
525
|
'output' : '',
|
switches |
0:0e018d759a2a
|
526
|
'target_name' : target,
|
switches |
0:0e018d759a2a
|
527
|
'target_name_unique': target,
|
switches |
0:0e018d759a2a
|
528
|
'toolchain_name' : toolchain,
|
switches |
0:0e018d759a2a
|
529
|
'id' : test_id,
|
switches |
0:0e018d759a2a
|
530
|
'description' : test.get_description(),
|
switches |
0:0e018d759a2a
|
531
|
'elapsed_time' : 0,
|
switches |
0:0e018d759a2a
|
532
|
'duration' : 0,
|
switches |
0:0e018d759a2a
|
533
|
'copy_method' : None
|
switches |
0:0e018d759a2a
|
534
|
}})
|
switches |
0:0e018d759a2a
|
535
|
continue
|
switches |
0:0e018d759a2a
|
536
|
|
switches |
0:0e018d759a2a
|
537
|
if self.opts_only_build_tests:
|
switches |
0:0e018d759a2a
|
538
|
# With this option we are skipping testing phase
|
switches |
0:0e018d759a2a
|
539
|
continue
|
switches |
0:0e018d759a2a
|
540
|
|
switches |
0:0e018d759a2a
|
541
|
# Test duration can be increased by global value
|
switches |
0:0e018d759a2a
|
542
|
test_duration = test.duration
|
switches |
0:0e018d759a2a
|
543
|
if self.opts_extend_test_timeout is not None:
|
switches |
0:0e018d759a2a
|
544
|
test_duration += self.opts_extend_test_timeout
|
switches |
0:0e018d759a2a
|
545
|
|
switches |
0:0e018d759a2a
|
546
|
# For an automated test the duration act as a timeout after
|
switches |
0:0e018d759a2a
|
547
|
# which the test gets interrupted
|
switches |
0:0e018d759a2a
|
548
|
test_spec = self.shape_test_request(target, path, test_id, test_duration)
|
switches |
0:0e018d759a2a
|
549
|
test_loops = self.get_test_loop_count(test_id)
|
switches |
0:0e018d759a2a
|
550
|
|
switches |
0:0e018d759a2a
|
551
|
test_suite_properties['test.duration.%s.%s.%s'% (target, toolchain, test_id)] = test_duration
|
switches |
0:0e018d759a2a
|
552
|
test_suite_properties['test.loops.%s.%s.%s'% (target, toolchain, test_id)] = test_loops
|
switches |
0:0e018d759a2a
|
553
|
test_suite_properties['test.path.%s.%s.%s'% (target, toolchain, test_id)] = path
|
switches |
0:0e018d759a2a
|
554
|
|
switches |
0:0e018d759a2a
|
555
|
# read MUTs, test specification and perform tests
|
switches |
0:0e018d759a2a
|
556
|
handle_results = self.handle(test_spec, target, toolchain, test_loops=test_loops)
|
switches |
0:0e018d759a2a
|
557
|
|
switches |
0:0e018d759a2a
|
558
|
if handle_results is None:
|
switches |
0:0e018d759a2a
|
559
|
continue
|
switches |
0:0e018d759a2a
|
560
|
|
switches |
0:0e018d759a2a
|
561
|
for handle_result in handle_results:
|
switches |
0:0e018d759a2a
|
562
|
if handle_result:
|
switches |
0:0e018d759a2a
|
563
|
single_test_result, detailed_test_results = handle_result
|
switches |
0:0e018d759a2a
|
564
|
else:
|
switches |
0:0e018d759a2a
|
565
|
continue
|
switches |
0:0e018d759a2a
|
566
|
|
switches |
0:0e018d759a2a
|
567
|
# Append test results to global test summary
|
switches |
0:0e018d759a2a
|
568
|
if single_test_result is not None:
|
switches |
0:0e018d759a2a
|
569
|
self.test_summary.append(single_test_result)
|
switches |
0:0e018d759a2a
|
570
|
|
switches |
0:0e018d759a2a
|
571
|
# Add detailed test result to test summary structure
|
switches |
0:0e018d759a2a
|
572
|
if target not in self.test_summary_ext[target][toolchain]:
|
switches |
0:0e018d759a2a
|
573
|
if test_id not in self.test_summary_ext[target][toolchain]:
|
switches |
0:0e018d759a2a
|
574
|
self.test_summary_ext[target][toolchain][test_id] = []
|
switches |
0:0e018d759a2a
|
575
|
|
switches |
0:0e018d759a2a
|
576
|
append_test_result = detailed_test_results
|
switches |
0:0e018d759a2a
|
577
|
|
switches |
0:0e018d759a2a
|
578
|
# If waterfall and consolidate-waterfall options are enabled,
|
switches |
0:0e018d759a2a
|
579
|
# only include the last test result in the report.
|
switches |
0:0e018d759a2a
|
580
|
if self.opts_waterfall_test and self.opts_consolidate_waterfall_test:
|
switches |
0:0e018d759a2a
|
581
|
append_test_result = {0: detailed_test_results[len(detailed_test_results) - 1]}
|
switches |
0:0e018d759a2a
|
582
|
|
switches |
0:0e018d759a2a
|
583
|
self.test_summary_ext[target][toolchain][test_id].append(append_test_result)
|
switches |
0:0e018d759a2a
|
584
|
|
switches |
0:0e018d759a2a
|
585
|
test_suite_properties['skipped'] = ', '.join(test_suite_properties['skipped'])
|
switches |
0:0e018d759a2a
|
586
|
self.test_suite_properties_ext[target][toolchain] = test_suite_properties
|
switches |
0:0e018d759a2a
|
587
|
|
switches |
0:0e018d759a2a
|
588
|
q.put(target + '_'.join(toolchains))
|
switches |
0:0e018d759a2a
|
589
|
return
|
switches |
0:0e018d759a2a
|
590
|
|
switches |
0:0e018d759a2a
|
591
|
def execute(self):
|
switches |
0:0e018d759a2a
|
592
|
clean = self.test_spec.get('clean', False)
|
switches |
0:0e018d759a2a
|
593
|
test_ids = self.test_spec.get('test_ids', [])
|
switches |
0:0e018d759a2a
|
594
|
q = Queue()
|
switches |
0:0e018d759a2a
|
595
|
|
switches |
0:0e018d759a2a
|
596
|
# Generate seed for shuffle if seed is not provided in
|
switches |
0:0e018d759a2a
|
597
|
self.shuffle_random_seed = round(random.random(), self.SHUFFLE_SEED_ROUND)
|
switches |
0:0e018d759a2a
|
598
|
if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
|
switches |
0:0e018d759a2a
|
599
|
self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
|
switches |
0:0e018d759a2a
|
600
|
|
switches |
0:0e018d759a2a
|
601
|
|
switches |
0:0e018d759a2a
|
602
|
if self.opts_parallel_test_exec:
|
switches |
0:0e018d759a2a
|
603
|
###################################################################
|
switches |
0:0e018d759a2a
|
604
|
# Experimental, parallel test execution per singletest instance.
|
switches |
0:0e018d759a2a
|
605
|
###################################################################
|
switches |
0:0e018d759a2a
|
606
|
execute_threads = [] # Threads used to build mbed SDL, libs, test cases and execute tests
|
switches |
0:0e018d759a2a
|
607
|
# Note: We are building here in parallel for each target separately!
|
switches |
0:0e018d759a2a
|
608
|
# So we are not building the same thing multiple times and compilers
|
switches |
0:0e018d759a2a
|
609
|
# in separate threads do not collide.
|
switches |
0:0e018d759a2a
|
610
|
# Inside execute_thread_slice() function function handle() will be called to
|
switches |
0:0e018d759a2a
|
611
|
# get information about available MUTs (per target).
|
switches |
0:0e018d759a2a
|
612
|
for target, toolchains in self.test_spec['targets'].iteritems():
|
switches |
0:0e018d759a2a
|
613
|
self.test_suite_properties_ext[target] = {}
|
switches |
0:0e018d759a2a
|
614
|
t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, self.build_report, self.build_properties))
|
switches |
0:0e018d759a2a
|
615
|
t.daemon = True
|
switches |
0:0e018d759a2a
|
616
|
t.start()
|
switches |
0:0e018d759a2a
|
617
|
execute_threads.append(t)
|
switches |
0:0e018d759a2a
|
618
|
|
switches |
0:0e018d759a2a
|
619
|
for t in execute_threads:
|
switches |
0:0e018d759a2a
|
620
|
q.get() # t.join() would block some threads because we should not wait in any order for thread end
|
switches |
0:0e018d759a2a
|
621
|
else:
|
switches |
0:0e018d759a2a
|
622
|
# Serialized (not parallel) test execution
|
switches |
0:0e018d759a2a
|
623
|
for target, toolchains in self.test_spec['targets'].iteritems():
|
switches |
0:0e018d759a2a
|
624
|
if target not in self.test_suite_properties_ext:
|
switches |
0:0e018d759a2a
|
625
|
self.test_suite_properties_ext[target] = {}
|
switches |
0:0e018d759a2a
|
626
|
|
switches |
0:0e018d759a2a
|
627
|
self.execute_thread_slice(q, target, toolchains, clean, test_ids, self.build_report, self.build_properties)
|
switches |
0:0e018d759a2a
|
628
|
q.get()
|
switches |
0:0e018d759a2a
|
629
|
|
switches |
0:0e018d759a2a
|
630
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
631
|
self.db_logger.reconnect();
|
switches |
0:0e018d759a2a
|
632
|
if self.db_logger.is_connected():
|
switches |
0:0e018d759a2a
|
633
|
self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
|
switches |
0:0e018d759a2a
|
634
|
self.db_logger.disconnect();
|
switches |
0:0e018d759a2a
|
635
|
|
switches |
0:0e018d759a2a
|
636
|
return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, self.build_report, self.build_properties
|
switches |
0:0e018d759a2a
|
637
|
|
switches |
0:0e018d759a2a
|
638
|
def get_valid_tests(self, test_map_keys, target, toolchain, test_ids, include_non_automated):
|
switches |
0:0e018d759a2a
|
639
|
valid_test_map_keys = []
|
switches |
0:0e018d759a2a
|
640
|
|
switches |
0:0e018d759a2a
|
641
|
for test_id in test_map_keys:
|
switches |
0:0e018d759a2a
|
642
|
test = TEST_MAP[test_id]
|
switches |
0:0e018d759a2a
|
643
|
if self.opts_test_by_names and test_id not in self.opts_test_by_names:
|
switches |
0:0e018d759a2a
|
644
|
continue
|
switches |
0:0e018d759a2a
|
645
|
|
switches |
0:0e018d759a2a
|
646
|
if test_ids and test_id not in test_ids:
|
switches |
0:0e018d759a2a
|
647
|
continue
|
switches |
0:0e018d759a2a
|
648
|
|
switches |
0:0e018d759a2a
|
649
|
if self.opts_test_only_peripheral and not test.peripherals:
|
switches |
0:0e018d759a2a
|
650
|
if self.opts_verbose_skipped_tests:
|
switches |
0:0e018d759a2a
|
651
|
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
switches |
0:0e018d759a2a
|
652
|
continue
|
switches |
0:0e018d759a2a
|
653
|
|
switches |
0:0e018d759a2a
|
654
|
if self.opts_peripheral_by_names and test.peripherals and not len([i for i in test.peripherals if i in self.opts_peripheral_by_names]):
|
switches |
0:0e018d759a2a
|
655
|
# We will skip tests not forced with -p option
|
switches |
0:0e018d759a2a
|
656
|
if self.opts_verbose_skipped_tests:
|
switches |
0:0e018d759a2a
|
657
|
print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
|
switches |
0:0e018d759a2a
|
658
|
continue
|
switches |
0:0e018d759a2a
|
659
|
|
switches |
0:0e018d759a2a
|
660
|
if self.opts_test_only_common and test.peripherals:
|
switches |
0:0e018d759a2a
|
661
|
if self.opts_verbose_skipped_tests:
|
switches |
0:0e018d759a2a
|
662
|
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral test skipped for target %s'% (target))
|
switches |
0:0e018d759a2a
|
663
|
continue
|
switches |
0:0e018d759a2a
|
664
|
|
switches |
0:0e018d759a2a
|
665
|
if not include_non_automated and not test.automated:
|
switches |
0:0e018d759a2a
|
666
|
if self.opts_verbose_skipped_tests:
|
switches |
0:0e018d759a2a
|
667
|
print self.logger.log_line(self.logger.LogType.INFO, 'Non automated test skipped for target %s'% (target))
|
switches |
0:0e018d759a2a
|
668
|
continue
|
switches |
0:0e018d759a2a
|
669
|
|
switches |
0:0e018d759a2a
|
670
|
if test.is_supported(target, toolchain):
|
switches |
0:0e018d759a2a
|
671
|
if test.peripherals is None and self.opts_only_build_tests:
|
switches |
0:0e018d759a2a
|
672
|
# When users are using 'build only flag' and test do not have
|
switches |
0:0e018d759a2a
|
673
|
# specified peripherals we can allow test building by default
|
switches |
0:0e018d759a2a
|
674
|
pass
|
switches |
0:0e018d759a2a
|
675
|
elif self.opts_peripheral_by_names and test_id not in self.opts_peripheral_by_names:
|
switches |
0:0e018d759a2a
|
676
|
# If we force peripheral with option -p we expect test
|
switches |
0:0e018d759a2a
|
677
|
# to pass even if peripheral is not in MUTs file.
|
switches |
0:0e018d759a2a
|
678
|
pass
|
switches |
0:0e018d759a2a
|
679
|
elif not self.is_peripherals_available(target, test.peripherals):
|
switches |
0:0e018d759a2a
|
680
|
if self.opts_verbose_skipped_tests:
|
switches |
0:0e018d759a2a
|
681
|
if test.peripherals:
|
switches |
0:0e018d759a2a
|
682
|
print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral %s test skipped for target %s'% (",".join(test.peripherals), target))
|
switches |
0:0e018d759a2a
|
683
|
else:
|
switches |
0:0e018d759a2a
|
684
|
print self.logger.log_line(self.logger.LogType.INFO, 'Test %s skipped for target %s'% (test_id, target))
|
switches |
0:0e018d759a2a
|
685
|
continue
|
switches |
0:0e018d759a2a
|
686
|
|
switches |
0:0e018d759a2a
|
687
|
# The test has made it through all the filters, so add it to the valid tests list
|
switches |
0:0e018d759a2a
|
688
|
valid_test_map_keys.append(test_id)
|
switches |
0:0e018d759a2a
|
689
|
|
switches |
0:0e018d759a2a
|
690
|
return valid_test_map_keys
|
switches |
0:0e018d759a2a
|
691
|
|
switches |
0:0e018d759a2a
|
692
|
def get_skipped_tests(self, all_test_map_keys, valid_test_map_keys):
|
switches |
0:0e018d759a2a
|
693
|
# NOTE: This will not preserve order
|
switches |
0:0e018d759a2a
|
694
|
return list(set(all_test_map_keys) - set(valid_test_map_keys))
|
switches |
0:0e018d759a2a
|
695
|
|
switches |
0:0e018d759a2a
|
696
|
def generate_test_summary_by_target(self, test_summary, shuffle_seed=None):
|
switches |
0:0e018d759a2a
|
697
|
""" Prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
698
|
table shows text x toolchain test result matrix
|
switches |
0:0e018d759a2a
|
699
|
"""
|
switches |
0:0e018d759a2a
|
700
|
RESULT_INDEX = 0
|
switches |
0:0e018d759a2a
|
701
|
TARGET_INDEX = 1
|
switches |
0:0e018d759a2a
|
702
|
TOOLCHAIN_INDEX = 2
|
switches |
0:0e018d759a2a
|
703
|
TEST_INDEX = 3
|
switches |
0:0e018d759a2a
|
704
|
DESC_INDEX = 4
|
switches |
0:0e018d759a2a
|
705
|
|
switches |
0:0e018d759a2a
|
706
|
unique_targets = get_unique_value_from_summary(test_summary, TARGET_INDEX)
|
switches |
0:0e018d759a2a
|
707
|
unique_tests = get_unique_value_from_summary(test_summary, TEST_INDEX)
|
switches |
0:0e018d759a2a
|
708
|
unique_test_desc = get_unique_value_from_summary_ext(test_summary, TEST_INDEX, DESC_INDEX)
|
switches |
0:0e018d759a2a
|
709
|
unique_toolchains = get_unique_value_from_summary(test_summary, TOOLCHAIN_INDEX)
|
switches |
0:0e018d759a2a
|
710
|
|
switches |
0:0e018d759a2a
|
711
|
result = "Test summary:\n"
|
switches |
0:0e018d759a2a
|
712
|
for target in unique_targets:
|
switches |
0:0e018d759a2a
|
713
|
result_dict = {} # test : { toolchain : result }
|
switches |
0:0e018d759a2a
|
714
|
unique_target_toolchains = []
|
switches |
0:0e018d759a2a
|
715
|
for test in test_summary:
|
switches |
0:0e018d759a2a
|
716
|
if test[TARGET_INDEX] == target:
|
switches |
0:0e018d759a2a
|
717
|
if test[TOOLCHAIN_INDEX] not in unique_target_toolchains:
|
switches |
0:0e018d759a2a
|
718
|
unique_target_toolchains.append(test[TOOLCHAIN_INDEX])
|
switches |
0:0e018d759a2a
|
719
|
if test[TEST_INDEX] not in result_dict:
|
switches |
0:0e018d759a2a
|
720
|
result_dict[test[TEST_INDEX]] = {}
|
switches |
0:0e018d759a2a
|
721
|
result_dict[test[TEST_INDEX]][test[TOOLCHAIN_INDEX]] = test[RESULT_INDEX]
|
switches |
0:0e018d759a2a
|
722
|
|
switches |
0:0e018d759a2a
|
723
|
pt_cols = ["Target", "Test ID", "Test Description"] + unique_target_toolchains
|
switches |
0:0e018d759a2a
|
724
|
pt = PrettyTable(pt_cols)
|
switches |
0:0e018d759a2a
|
725
|
for col in pt_cols:
|
switches |
0:0e018d759a2a
|
726
|
pt.align[col] = "l"
|
switches |
0:0e018d759a2a
|
727
|
pt.padding_width = 1 # One space between column edges and contents (default)
|
switches |
0:0e018d759a2a
|
728
|
|
switches |
0:0e018d759a2a
|
729
|
for test in unique_tests:
|
switches |
0:0e018d759a2a
|
730
|
if test in result_dict:
|
switches |
0:0e018d759a2a
|
731
|
test_results = result_dict[test]
|
switches |
0:0e018d759a2a
|
732
|
if test in unique_test_desc:
|
switches |
0:0e018d759a2a
|
733
|
row = [target, test, unique_test_desc[test]]
|
switches |
0:0e018d759a2a
|
734
|
for toolchain in unique_toolchains:
|
switches |
0:0e018d759a2a
|
735
|
if toolchain in test_results:
|
switches |
0:0e018d759a2a
|
736
|
row.append(test_results[toolchain])
|
switches |
0:0e018d759a2a
|
737
|
pt.add_row(row)
|
switches |
0:0e018d759a2a
|
738
|
result += pt.get_string()
|
switches |
0:0e018d759a2a
|
739
|
shuffle_seed_text = "Shuffle Seed: %.*f"% (self.SHUFFLE_SEED_ROUND,
|
switches |
0:0e018d759a2a
|
740
|
shuffle_seed if shuffle_seed else self.shuffle_random_seed)
|
switches |
0:0e018d759a2a
|
741
|
result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
|
switches |
0:0e018d759a2a
|
742
|
return result
|
switches |
0:0e018d759a2a
|
743
|
|
switches |
0:0e018d759a2a
|
744
|
def generate_test_summary(self, test_summary, shuffle_seed=None):
|
switches |
0:0e018d759a2a
|
745
|
""" Prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
746
|
table shows target x test results matrix across
|
switches |
0:0e018d759a2a
|
747
|
"""
|
switches |
0:0e018d759a2a
|
748
|
success_code = 0 # Success code that can be leter returned to
|
switches |
0:0e018d759a2a
|
749
|
result = "Test summary:\n"
|
switches |
0:0e018d759a2a
|
750
|
# Pretty table package is used to print results
|
switches |
0:0e018d759a2a
|
751
|
pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description",
|
switches |
0:0e018d759a2a
|
752
|
"Elapsed Time (sec)", "Timeout (sec)", "Loops"])
|
switches |
0:0e018d759a2a
|
753
|
pt.align["Result"] = "l" # Left align
|
switches |
0:0e018d759a2a
|
754
|
pt.align["Target"] = "l" # Left align
|
switches |
0:0e018d759a2a
|
755
|
pt.align["Toolchain"] = "l" # Left align
|
switches |
0:0e018d759a2a
|
756
|
pt.align["Test ID"] = "l" # Left align
|
switches |
0:0e018d759a2a
|
757
|
pt.align["Test Description"] = "l" # Left align
|
switches |
0:0e018d759a2a
|
758
|
pt.padding_width = 1 # One space between column edges and contents (default)
|
switches |
0:0e018d759a2a
|
759
|
|
switches |
0:0e018d759a2a
|
760
|
result_dict = {self.TEST_RESULT_OK : 0,
|
switches |
0:0e018d759a2a
|
761
|
self.TEST_RESULT_FAIL : 0,
|
switches |
0:0e018d759a2a
|
762
|
self.TEST_RESULT_ERROR : 0,
|
switches |
0:0e018d759a2a
|
763
|
self.TEST_RESULT_UNDEF : 0,
|
switches |
0:0e018d759a2a
|
764
|
self.TEST_RESULT_IOERR_COPY : 0,
|
switches |
0:0e018d759a2a
|
765
|
self.TEST_RESULT_IOERR_DISK : 0,
|
switches |
0:0e018d759a2a
|
766
|
self.TEST_RESULT_IOERR_SERIAL : 0,
|
switches |
0:0e018d759a2a
|
767
|
self.TEST_RESULT_NO_IMAGE : 0,
|
switches |
0:0e018d759a2a
|
768
|
self.TEST_RESULT_TIMEOUT : 0,
|
switches |
0:0e018d759a2a
|
769
|
self.TEST_RESULT_MBED_ASSERT : 0,
|
switches |
0:0e018d759a2a
|
770
|
self.TEST_RESULT_BUILD_FAILED : 0,
|
switches |
0:0e018d759a2a
|
771
|
self.TEST_RESULT_NOT_SUPPORTED : 0
|
switches |
0:0e018d759a2a
|
772
|
}
|
switches |
0:0e018d759a2a
|
773
|
|
switches |
0:0e018d759a2a
|
774
|
for test in test_summary:
|
switches |
0:0e018d759a2a
|
775
|
if test[0] in result_dict:
|
switches |
0:0e018d759a2a
|
776
|
result_dict[test[0]] += 1
|
switches |
0:0e018d759a2a
|
777
|
pt.add_row(test)
|
switches |
0:0e018d759a2a
|
778
|
result += pt.get_string()
|
switches |
0:0e018d759a2a
|
779
|
result += "\n"
|
switches |
0:0e018d759a2a
|
780
|
|
switches |
0:0e018d759a2a
|
781
|
# Print result count
|
switches |
0:0e018d759a2a
|
782
|
result += "Result: " + ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.iteritems()])
|
switches |
0:0e018d759a2a
|
783
|
shuffle_seed_text = "Shuffle Seed: %.*f\n"% (self.SHUFFLE_SEED_ROUND,
|
switches |
0:0e018d759a2a
|
784
|
shuffle_seed if shuffle_seed else self.shuffle_random_seed)
|
switches |
0:0e018d759a2a
|
785
|
result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
|
switches |
0:0e018d759a2a
|
786
|
return result
|
switches |
0:0e018d759a2a
|
787
|
|
switches |
0:0e018d759a2a
|
788
|
def test_loop_list_to_dict(self, test_loops_str):
|
switches |
0:0e018d759a2a
|
789
|
""" Transforms test_id=X,test_id=X,test_id=X into dictionary {test_id : test_id_loops_count}
|
switches |
0:0e018d759a2a
|
790
|
"""
|
switches |
0:0e018d759a2a
|
791
|
result = {}
|
switches |
0:0e018d759a2a
|
792
|
if test_loops_str:
|
switches |
0:0e018d759a2a
|
793
|
test_loops = test_loops_str
|
switches |
0:0e018d759a2a
|
794
|
for test_loop in test_loops:
|
switches |
0:0e018d759a2a
|
795
|
test_loop_count = test_loop.split('=')
|
switches |
0:0e018d759a2a
|
796
|
if len(test_loop_count) == 2:
|
switches |
0:0e018d759a2a
|
797
|
_test_id, _test_loops = test_loop_count
|
switches |
0:0e018d759a2a
|
798
|
try:
|
switches |
0:0e018d759a2a
|
799
|
_test_loops = int(_test_loops)
|
switches |
0:0e018d759a2a
|
800
|
except:
|
switches |
0:0e018d759a2a
|
801
|
continue
|
switches |
0:0e018d759a2a
|
802
|
result[_test_id] = _test_loops
|
switches |
0:0e018d759a2a
|
803
|
return result
|
switches |
0:0e018d759a2a
|
804
|
|
switches |
0:0e018d759a2a
|
805
|
def get_test_loop_count(self, test_id):
|
switches |
0:0e018d759a2a
|
806
|
""" This function returns no. of loops per test (deducted by test_id_.
|
switches |
0:0e018d759a2a
|
807
|
If test is not in list of redefined loop counts it will use default value.
|
switches |
0:0e018d759a2a
|
808
|
"""
|
switches |
0:0e018d759a2a
|
809
|
result = self.GLOBAL_LOOPS_COUNT
|
switches |
0:0e018d759a2a
|
810
|
if test_id in self.TEST_LOOPS_DICT:
|
switches |
0:0e018d759a2a
|
811
|
result = self.TEST_LOOPS_DICT[test_id]
|
switches |
0:0e018d759a2a
|
812
|
return result
|
switches |
0:0e018d759a2a
|
813
|
|
switches |
0:0e018d759a2a
|
814
|
def delete_file(self, file_path):
|
switches |
0:0e018d759a2a
|
815
|
""" Remove file from the system
|
switches |
0:0e018d759a2a
|
816
|
"""
|
switches |
0:0e018d759a2a
|
817
|
result = True
|
switches |
0:0e018d759a2a
|
818
|
resutl_msg = ""
|
switches |
0:0e018d759a2a
|
819
|
try:
|
switches |
0:0e018d759a2a
|
820
|
os.remove(file_path)
|
switches |
0:0e018d759a2a
|
821
|
except Exception, e:
|
switches |
0:0e018d759a2a
|
822
|
resutl_msg = e
|
switches |
0:0e018d759a2a
|
823
|
result = False
|
switches |
0:0e018d759a2a
|
824
|
return result, resutl_msg
|
switches |
0:0e018d759a2a
|
825
|
|
switches |
0:0e018d759a2a
|
826
|
def handle_mut(self, mut, data, target_name, toolchain_name, test_loops=1):
|
switches |
0:0e018d759a2a
|
827
|
""" Test is being invoked for given MUT.
|
switches |
0:0e018d759a2a
|
828
|
"""
|
switches |
0:0e018d759a2a
|
829
|
# Get test information, image and test timeout
|
switches |
0:0e018d759a2a
|
830
|
test_id = data['test_id']
|
switches |
0:0e018d759a2a
|
831
|
test = TEST_MAP[test_id]
|
switches |
0:0e018d759a2a
|
832
|
test_description = TEST_MAP[test_id].get_description()
|
switches |
0:0e018d759a2a
|
833
|
image = data["image"]
|
switches |
0:0e018d759a2a
|
834
|
duration = data.get("duration", 10)
|
switches |
0:0e018d759a2a
|
835
|
|
switches |
0:0e018d759a2a
|
836
|
if mut is None:
|
switches |
0:0e018d759a2a
|
837
|
print "Error: No Mbed available: MUT[%s]" % data['mcu']
|
switches |
0:0e018d759a2a
|
838
|
return None
|
switches |
0:0e018d759a2a
|
839
|
|
switches |
0:0e018d759a2a
|
840
|
mcu = mut['mcu']
|
switches |
0:0e018d759a2a
|
841
|
copy_method = mut.get('copy_method') # Available board configuration selection e.g. core selection etc.
|
switches |
0:0e018d759a2a
|
842
|
|
switches |
0:0e018d759a2a
|
843
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
844
|
self.db_logger.reconnect()
|
switches |
0:0e018d759a2a
|
845
|
|
switches |
0:0e018d759a2a
|
846
|
selected_copy_method = self.opts_copy_method if copy_method is None else copy_method
|
switches |
0:0e018d759a2a
|
847
|
|
switches |
0:0e018d759a2a
|
848
|
# Tests can be looped so test results must be stored for the same test
|
switches |
0:0e018d759a2a
|
849
|
test_all_result = []
|
switches |
0:0e018d759a2a
|
850
|
# Test results for one test ran few times
|
switches |
0:0e018d759a2a
|
851
|
detailed_test_results = {} # { Loop_number: { results ... } }
|
switches |
0:0e018d759a2a
|
852
|
|
switches |
0:0e018d759a2a
|
853
|
for test_index in range(test_loops):
|
switches |
0:0e018d759a2a
|
854
|
|
switches |
0:0e018d759a2a
|
855
|
# If mbedls is available and we are auto detecting MUT info,
|
switches |
0:0e018d759a2a
|
856
|
# update MUT info (mounting may changed)
|
switches |
0:0e018d759a2a
|
857
|
if get_module_avail('mbed_lstools') and self.opts_auto_detect:
|
switches |
0:0e018d759a2a
|
858
|
platform_name_filter = [mcu]
|
switches |
0:0e018d759a2a
|
859
|
muts_list = {}
|
switches |
0:0e018d759a2a
|
860
|
found = False
|
switches |
0:0e018d759a2a
|
861
|
|
switches |
0:0e018d759a2a
|
862
|
for i in range(0, 60):
|
switches |
0:0e018d759a2a
|
863
|
print('Looking for %s with MBEDLS' % mcu)
|
switches |
0:0e018d759a2a
|
864
|
muts_list = get_autodetected_MUTS_list(platform_name_filter=platform_name_filter)
|
switches |
0:0e018d759a2a
|
865
|
|
switches |
0:0e018d759a2a
|
866
|
if 1 not in muts_list:
|
switches |
0:0e018d759a2a
|
867
|
sleep(3)
|
switches |
0:0e018d759a2a
|
868
|
else:
|
switches |
0:0e018d759a2a
|
869
|
found = True
|
switches |
0:0e018d759a2a
|
870
|
break
|
switches |
0:0e018d759a2a
|
871
|
|
switches |
0:0e018d759a2a
|
872
|
if not found:
|
switches |
0:0e018d759a2a
|
873
|
print "Error: mbed not found with MBEDLS: %s" % data['mcu']
|
switches |
0:0e018d759a2a
|
874
|
return None
|
switches |
0:0e018d759a2a
|
875
|
else:
|
switches |
0:0e018d759a2a
|
876
|
mut = muts_list[1]
|
switches |
0:0e018d759a2a
|
877
|
|
switches |
0:0e018d759a2a
|
878
|
disk = mut.get('disk')
|
switches |
0:0e018d759a2a
|
879
|
port = mut.get('port')
|
switches |
0:0e018d759a2a
|
880
|
|
switches |
0:0e018d759a2a
|
881
|
if disk is None or port is None:
|
switches |
0:0e018d759a2a
|
882
|
return None
|
switches |
0:0e018d759a2a
|
883
|
|
switches |
0:0e018d759a2a
|
884
|
target_by_mcu = TARGET_MAP[mut['mcu']]
|
switches |
0:0e018d759a2a
|
885
|
target_name_unique = mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu']
|
switches |
0:0e018d759a2a
|
886
|
# Some extra stuff can be declared in MUTs structure
|
switches |
0:0e018d759a2a
|
887
|
reset_type = mut.get('reset_type') # reboot.txt, reset.txt, shutdown.txt
|
switches |
0:0e018d759a2a
|
888
|
reset_tout = mut.get('reset_tout') # COPY_IMAGE -> RESET_PROC -> SLEEP(RESET_TOUT)
|
switches |
0:0e018d759a2a
|
889
|
|
switches |
0:0e018d759a2a
|
890
|
# When the build and test system were separate, this was relative to a
|
switches |
0:0e018d759a2a
|
891
|
# base network folder base path: join(NETWORK_BASE_PATH, )
|
switches |
0:0e018d759a2a
|
892
|
image_path = image
|
switches |
0:0e018d759a2a
|
893
|
|
switches |
0:0e018d759a2a
|
894
|
# Host test execution
|
switches |
0:0e018d759a2a
|
895
|
start_host_exec_time = time()
|
switches |
0:0e018d759a2a
|
896
|
|
switches |
0:0e018d759a2a
|
897
|
single_test_result = self.TEST_RESULT_UNDEF # single test run result
|
switches |
0:0e018d759a2a
|
898
|
_copy_method = selected_copy_method
|
switches |
0:0e018d759a2a
|
899
|
|
switches |
0:0e018d759a2a
|
900
|
if not exists(image_path):
|
switches |
0:0e018d759a2a
|
901
|
single_test_result = self.TEST_RESULT_NO_IMAGE
|
switches |
0:0e018d759a2a
|
902
|
elapsed_time = 0
|
switches |
0:0e018d759a2a
|
903
|
single_test_output = self.logger.log_line(self.logger.LogType.ERROR, 'Image file does not exist: %s'% image_path)
|
switches |
0:0e018d759a2a
|
904
|
print single_test_output
|
switches |
0:0e018d759a2a
|
905
|
else:
|
switches |
0:0e018d759a2a
|
906
|
# Host test execution
|
switches |
0:0e018d759a2a
|
907
|
start_host_exec_time = time()
|
switches |
0:0e018d759a2a
|
908
|
|
switches |
0:0e018d759a2a
|
909
|
host_test_verbose = self.opts_verbose_test_result_only or self.opts_verbose
|
switches |
0:0e018d759a2a
|
910
|
host_test_reset = self.opts_mut_reset_type if reset_type is None else reset_type
|
switches |
0:0e018d759a2a
|
911
|
host_test_result = self.run_host_test(test.host_test,
|
switches |
0:0e018d759a2a
|
912
|
image_path, disk, port, duration,
|
switches |
0:0e018d759a2a
|
913
|
micro=target_name,
|
switches |
0:0e018d759a2a
|
914
|
verbose=host_test_verbose,
|
switches |
0:0e018d759a2a
|
915
|
reset=host_test_reset,
|
switches |
0:0e018d759a2a
|
916
|
reset_tout=reset_tout,
|
switches |
0:0e018d759a2a
|
917
|
copy_method=selected_copy_method,
|
switches |
0:0e018d759a2a
|
918
|
program_cycle_s=target_by_mcu.program_cycle_s)
|
switches |
0:0e018d759a2a
|
919
|
single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
|
switches |
0:0e018d759a2a
|
920
|
|
switches |
0:0e018d759a2a
|
921
|
# Store test result
|
switches |
0:0e018d759a2a
|
922
|
test_all_result.append(single_test_result)
|
switches |
0:0e018d759a2a
|
923
|
total_elapsed_time = time() - start_host_exec_time # Test time with copy (flashing) / reset
|
switches |
0:0e018d759a2a
|
924
|
elapsed_time = single_testduration # TIme of single test case execution after reset
|
switches |
0:0e018d759a2a
|
925
|
|
switches |
0:0e018d759a2a
|
926
|
detailed_test_results[test_index] = {
|
switches |
0:0e018d759a2a
|
927
|
'result' : single_test_result,
|
switches |
0:0e018d759a2a
|
928
|
'output' : single_test_output,
|
switches |
0:0e018d759a2a
|
929
|
'target_name' : target_name,
|
switches |
0:0e018d759a2a
|
930
|
'target_name_unique' : target_name_unique,
|
switches |
0:0e018d759a2a
|
931
|
'toolchain_name' : toolchain_name,
|
switches |
0:0e018d759a2a
|
932
|
'id' : test_id,
|
switches |
0:0e018d759a2a
|
933
|
'description' : test_description,
|
switches |
0:0e018d759a2a
|
934
|
'elapsed_time' : round(elapsed_time, 2),
|
switches |
0:0e018d759a2a
|
935
|
'duration' : single_timeout,
|
switches |
0:0e018d759a2a
|
936
|
'copy_method' : _copy_method,
|
switches |
0:0e018d759a2a
|
937
|
}
|
switches |
0:0e018d759a2a
|
938
|
|
switches |
0:0e018d759a2a
|
939
|
print self.print_test_result(single_test_result, target_name_unique, toolchain_name,
|
switches |
0:0e018d759a2a
|
940
|
test_id, test_description, elapsed_time, single_timeout)
|
switches |
0:0e018d759a2a
|
941
|
|
switches |
0:0e018d759a2a
|
942
|
# Update database entries for ongoing test
|
switches |
0:0e018d759a2a
|
943
|
if self.db_logger and self.db_logger.is_connected():
|
switches |
0:0e018d759a2a
|
944
|
test_type = 'SingleTest'
|
switches |
0:0e018d759a2a
|
945
|
self.db_logger.insert_test_entry(self.db_logger_build_id,
|
switches |
0:0e018d759a2a
|
946
|
target_name,
|
switches |
0:0e018d759a2a
|
947
|
toolchain_name,
|
switches |
0:0e018d759a2a
|
948
|
test_type,
|
switches |
0:0e018d759a2a
|
949
|
test_id,
|
switches |
0:0e018d759a2a
|
950
|
single_test_result,
|
switches |
0:0e018d759a2a
|
951
|
single_test_output,
|
switches |
0:0e018d759a2a
|
952
|
elapsed_time,
|
switches |
0:0e018d759a2a
|
953
|
single_timeout,
|
switches |
0:0e018d759a2a
|
954
|
test_index)
|
switches |
0:0e018d759a2a
|
955
|
|
switches |
0:0e018d759a2a
|
956
|
# If we perform waterfall test we test until we get OK and we stop testing
|
switches |
0:0e018d759a2a
|
957
|
if self.opts_waterfall_test and single_test_result == self.TEST_RESULT_OK:
|
switches |
0:0e018d759a2a
|
958
|
break
|
switches |
0:0e018d759a2a
|
959
|
|
switches |
0:0e018d759a2a
|
960
|
if self.db_logger:
|
switches |
0:0e018d759a2a
|
961
|
self.db_logger.disconnect()
|
switches |
0:0e018d759a2a
|
962
|
|
switches |
0:0e018d759a2a
|
963
|
return (self.shape_global_test_loop_result(test_all_result, self.opts_waterfall_test and self.opts_consolidate_waterfall_test),
|
switches |
0:0e018d759a2a
|
964
|
target_name_unique,
|
switches |
0:0e018d759a2a
|
965
|
toolchain_name,
|
switches |
0:0e018d759a2a
|
966
|
test_id,
|
switches |
0:0e018d759a2a
|
967
|
test_description,
|
switches |
0:0e018d759a2a
|
968
|
round(elapsed_time, 2),
|
switches |
0:0e018d759a2a
|
969
|
single_timeout,
|
switches |
0:0e018d759a2a
|
970
|
self.shape_test_loop_ok_result_count(test_all_result)), detailed_test_results
|
switches |
0:0e018d759a2a
|
971
|
|
switches |
0:0e018d759a2a
|
972
|
def handle(self, test_spec, target_name, toolchain_name, test_loops=1):
|
switches |
0:0e018d759a2a
|
973
|
""" Function determines MUT's mbed disk/port and copies binary to
|
switches |
0:0e018d759a2a
|
974
|
target.
|
switches |
0:0e018d759a2a
|
975
|
"""
|
switches |
0:0e018d759a2a
|
976
|
handle_results = []
|
switches |
0:0e018d759a2a
|
977
|
data = json.loads(test_spec)
|
switches |
0:0e018d759a2a
|
978
|
|
switches |
0:0e018d759a2a
|
979
|
# Find a suitable MUT:
|
switches |
0:0e018d759a2a
|
980
|
mut = None
|
switches |
0:0e018d759a2a
|
981
|
for id, m in self.muts.iteritems():
|
switches |
0:0e018d759a2a
|
982
|
if m['mcu'] == data['mcu']:
|
switches |
0:0e018d759a2a
|
983
|
mut = m
|
switches |
0:0e018d759a2a
|
984
|
handle_result = self.handle_mut(mut, data, target_name, toolchain_name, test_loops=test_loops)
|
switches |
0:0e018d759a2a
|
985
|
handle_results.append(handle_result)
|
switches |
0:0e018d759a2a
|
986
|
|
switches |
0:0e018d759a2a
|
987
|
return handle_results
|
switches |
0:0e018d759a2a
|
988
|
|
switches |
0:0e018d759a2a
|
989
|
def print_test_result(self, test_result, target_name, toolchain_name,
|
switches |
0:0e018d759a2a
|
990
|
test_id, test_description, elapsed_time, duration):
|
switches |
0:0e018d759a2a
|
991
|
""" Use specific convention to print test result and related data
|
switches |
0:0e018d759a2a
|
992
|
"""
|
switches |
0:0e018d759a2a
|
993
|
tokens = []
|
switches |
0:0e018d759a2a
|
994
|
tokens.append("TargetTest")
|
switches |
0:0e018d759a2a
|
995
|
tokens.append(target_name)
|
switches |
0:0e018d759a2a
|
996
|
tokens.append(toolchain_name)
|
switches |
0:0e018d759a2a
|
997
|
tokens.append(test_id)
|
switches |
0:0e018d759a2a
|
998
|
tokens.append(test_description)
|
switches |
0:0e018d759a2a
|
999
|
separator = "::"
|
switches |
0:0e018d759a2a
|
1000
|
time_info = " in %.2f of %d sec" % (round(elapsed_time, 2), duration)
|
switches |
0:0e018d759a2a
|
1001
|
result = separator.join(tokens) + " [" + test_result +"]" + time_info
|
switches |
0:0e018d759a2a
|
1002
|
return Fore.MAGENTA + result + Fore.RESET
|
switches |
0:0e018d759a2a
|
1003
|
|
switches |
0:0e018d759a2a
|
1004
|
def shape_test_loop_ok_result_count(self, test_all_result):
|
switches |
0:0e018d759a2a
|
1005
|
""" Reformats list of results to simple string
|
switches |
0:0e018d759a2a
|
1006
|
"""
|
switches |
0:0e018d759a2a
|
1007
|
test_loop_count = len(test_all_result)
|
switches |
0:0e018d759a2a
|
1008
|
test_loop_ok_result = test_all_result.count(self.TEST_RESULT_OK)
|
switches |
0:0e018d759a2a
|
1009
|
return "%d/%d"% (test_loop_ok_result, test_loop_count)
|
switches |
0:0e018d759a2a
|
1010
|
|
switches |
0:0e018d759a2a
|
1011
|
def shape_global_test_loop_result(self, test_all_result, waterfall_and_consolidate):
|
switches |
0:0e018d759a2a
|
1012
|
""" Reformats list of results to simple string
|
switches |
0:0e018d759a2a
|
1013
|
"""
|
switches |
0:0e018d759a2a
|
1014
|
result = self.TEST_RESULT_FAIL
|
switches |
0:0e018d759a2a
|
1015
|
|
switches |
0:0e018d759a2a
|
1016
|
if all(test_all_result[0] == res for res in test_all_result):
|
switches |
0:0e018d759a2a
|
1017
|
result = test_all_result[0]
|
switches |
0:0e018d759a2a
|
1018
|
elif waterfall_and_consolidate and any(res == self.TEST_RESULT_OK for res in test_all_result):
|
switches |
0:0e018d759a2a
|
1019
|
result = self.TEST_RESULT_OK
|
switches |
0:0e018d759a2a
|
1020
|
|
switches |
0:0e018d759a2a
|
1021
|
return result
|
switches |
0:0e018d759a2a
|
1022
|
|
switches |
0:0e018d759a2a
|
1023
|
def run_host_test(self, name, image_path, disk, port, duration,
|
switches |
0:0e018d759a2a
|
1024
|
micro=None, reset=None, reset_tout=None,
|
switches |
0:0e018d759a2a
|
1025
|
verbose=False, copy_method=None, program_cycle_s=None):
|
switches |
0:0e018d759a2a
|
1026
|
""" Function creates new process with host test configured with particular test case.
|
switches |
0:0e018d759a2a
|
1027
|
Function also is pooling for serial port activity from process to catch all data
|
switches |
0:0e018d759a2a
|
1028
|
printed by test runner and host test during test execution
|
switches |
0:0e018d759a2a
|
1029
|
"""
|
switches |
0:0e018d759a2a
|
1030
|
|
switches |
0:0e018d759a2a
|
1031
|
def get_char_from_queue(obs):
|
switches |
0:0e018d759a2a
|
1032
|
""" Get character from queue safe way
|
switches |
0:0e018d759a2a
|
1033
|
"""
|
switches |
0:0e018d759a2a
|
1034
|
try:
|
switches |
0:0e018d759a2a
|
1035
|
c = obs.queue.get(block=True, timeout=0.5)
|
switches |
0:0e018d759a2a
|
1036
|
except Empty, _:
|
switches |
0:0e018d759a2a
|
1037
|
c = None
|
switches |
0:0e018d759a2a
|
1038
|
return c
|
switches |
0:0e018d759a2a
|
1039
|
|
switches |
0:0e018d759a2a
|
1040
|
def filter_queue_char(c):
|
switches |
0:0e018d759a2a
|
1041
|
""" Filters out non ASCII characters from serial port
|
switches |
0:0e018d759a2a
|
1042
|
"""
|
switches |
0:0e018d759a2a
|
1043
|
if ord(c) not in range(128):
|
switches |
0:0e018d759a2a
|
1044
|
c = ' '
|
switches |
0:0e018d759a2a
|
1045
|
return c
|
switches |
0:0e018d759a2a
|
1046
|
|
switches |
0:0e018d759a2a
|
1047
|
def get_test_result(output):
|
switches |
0:0e018d759a2a
|
1048
|
""" Parse test 'output' data
|
switches |
0:0e018d759a2a
|
1049
|
"""
|
switches |
0:0e018d759a2a
|
1050
|
result = self.TEST_RESULT_TIMEOUT
|
switches |
0:0e018d759a2a
|
1051
|
for line in "".join(output).splitlines():
|
switches |
0:0e018d759a2a
|
1052
|
search_result = self.RE_DETECT_TESTCASE_RESULT.search(line)
|
switches |
0:0e018d759a2a
|
1053
|
if search_result and len(search_result.groups()):
|
switches |
0:0e018d759a2a
|
1054
|
result = self.TEST_RESULT_MAPPING[search_result.groups(0)[0]]
|
switches |
0:0e018d759a2a
|
1055
|
break
|
switches |
0:0e018d759a2a
|
1056
|
return result
|
switches |
0:0e018d759a2a
|
1057
|
|
switches |
0:0e018d759a2a
|
1058
|
def get_auto_property_value(property_name, line):
|
switches |
0:0e018d759a2a
|
1059
|
""" Scans auto detection line from MUT and returns scanned parameter 'property_name'
|
switches |
0:0e018d759a2a
|
1060
|
Returns string
|
switches |
0:0e018d759a2a
|
1061
|
"""
|
switches |
0:0e018d759a2a
|
1062
|
result = None
|
switches |
0:0e018d759a2a
|
1063
|
if re.search("HOST: Property '%s'"% property_name, line) is not None:
|
switches |
0:0e018d759a2a
|
1064
|
property = re.search("HOST: Property '%s' = '([\w\d _]+)'"% property_name, line)
|
switches |
0:0e018d759a2a
|
1065
|
if property is not None and len(property.groups()) == 1:
|
switches |
0:0e018d759a2a
|
1066
|
result = property.groups()[0]
|
switches |
0:0e018d759a2a
|
1067
|
return result
|
switches |
0:0e018d759a2a
|
1068
|
|
switches |
0:0e018d759a2a
|
1069
|
# print "{%s} port:%s disk:%s" % (name, port, disk),
|
switches |
0:0e018d759a2a
|
1070
|
cmd = ["python",
|
switches |
0:0e018d759a2a
|
1071
|
'%s.py'% name,
|
switches |
0:0e018d759a2a
|
1072
|
'-d', disk,
|
switches |
0:0e018d759a2a
|
1073
|
'-f', '"%s"'% image_path,
|
switches |
0:0e018d759a2a
|
1074
|
'-p', port,
|
switches |
0:0e018d759a2a
|
1075
|
'-t', str(duration),
|
switches |
0:0e018d759a2a
|
1076
|
'-C', str(program_cycle_s)]
|
switches |
0:0e018d759a2a
|
1077
|
|
switches |
0:0e018d759a2a
|
1078
|
if get_module_avail('mbed_lstools') and self.opts_auto_detect:
|
switches |
0:0e018d759a2a
|
1079
|
cmd += ['--auto']
|
switches |
0:0e018d759a2a
|
1080
|
|
switches |
0:0e018d759a2a
|
1081
|
# Add extra parameters to host_test
|
switches |
0:0e018d759a2a
|
1082
|
if copy_method is not None:
|
switches |
0:0e018d759a2a
|
1083
|
cmd += ["-c", copy_method]
|
switches |
0:0e018d759a2a
|
1084
|
if micro is not None:
|
switches |
0:0e018d759a2a
|
1085
|
cmd += ["-m", micro]
|
switches |
0:0e018d759a2a
|
1086
|
if reset is not None:
|
switches |
0:0e018d759a2a
|
1087
|
cmd += ["-r", reset]
|
switches |
0:0e018d759a2a
|
1088
|
if reset_tout is not None:
|
switches |
0:0e018d759a2a
|
1089
|
cmd += ["-R", str(reset_tout)]
|
switches |
0:0e018d759a2a
|
1090
|
|
switches |
0:0e018d759a2a
|
1091
|
if verbose:
|
switches |
0:0e018d759a2a
|
1092
|
print Fore.MAGENTA + "Executing '" + " ".join(cmd) + "'" + Fore.RESET
|
switches |
0:0e018d759a2a
|
1093
|
print "Test::Output::Start"
|
switches |
0:0e018d759a2a
|
1094
|
|
switches |
0:0e018d759a2a
|
1095
|
proc = Popen(cmd, stdout=PIPE, cwd=HOST_TESTS)
|
switches |
0:0e018d759a2a
|
1096
|
obs = ProcessObserver(proc)
|
switches |
0:0e018d759a2a
|
1097
|
update_once_flag = {} # Stores flags checking if some auto-parameter was already set
|
switches |
0:0e018d759a2a
|
1098
|
line = ''
|
switches |
0:0e018d759a2a
|
1099
|
output = []
|
switches |
0:0e018d759a2a
|
1100
|
start_time = time()
|
switches |
0:0e018d759a2a
|
1101
|
while (time() - start_time) < (2 * duration):
|
switches |
0:0e018d759a2a
|
1102
|
c = get_char_from_queue(obs)
|
switches |
0:0e018d759a2a
|
1103
|
if c:
|
switches |
0:0e018d759a2a
|
1104
|
if verbose:
|
switches |
0:0e018d759a2a
|
1105
|
sys.stdout.write(c)
|
switches |
0:0e018d759a2a
|
1106
|
c = filter_queue_char(c)
|
switches |
0:0e018d759a2a
|
1107
|
output.append(c)
|
switches |
0:0e018d759a2a
|
1108
|
# Give the mbed under test a way to communicate the end of the test
|
switches |
0:0e018d759a2a
|
1109
|
if c in ['\n', '\r']:
|
switches |
0:0e018d759a2a
|
1110
|
|
switches |
0:0e018d759a2a
|
1111
|
# Checking for auto-detection information from the test about MUT reset moment
|
switches |
0:0e018d759a2a
|
1112
|
if 'reset_target' not in update_once_flag and "HOST: Reset target..." in line:
|
switches |
0:0e018d759a2a
|
1113
|
# We will update this marker only once to prevent multiple time resets
|
switches |
0:0e018d759a2a
|
1114
|
update_once_flag['reset_target'] = True
|
switches |
0:0e018d759a2a
|
1115
|
start_time = time()
|
switches |
0:0e018d759a2a
|
1116
|
|
switches |
0:0e018d759a2a
|
1117
|
# Checking for auto-detection information from the test about timeout
|
switches |
0:0e018d759a2a
|
1118
|
auto_timeout_val = get_auto_property_value('timeout', line)
|
switches |
0:0e018d759a2a
|
1119
|
if 'timeout' not in update_once_flag and auto_timeout_val is not None:
|
switches |
0:0e018d759a2a
|
1120
|
# We will update this marker only once to prevent multiple time resets
|
switches |
0:0e018d759a2a
|
1121
|
update_once_flag['timeout'] = True
|
switches |
0:0e018d759a2a
|
1122
|
duration = int(auto_timeout_val)
|
switches |
0:0e018d759a2a
|
1123
|
|
switches |
0:0e018d759a2a
|
1124
|
# Detect mbed assert:
|
switches |
0:0e018d759a2a
|
1125
|
if 'mbed assertation failed: ' in line:
|
switches |
0:0e018d759a2a
|
1126
|
output.append('{{mbed_assert}}')
|
switches |
0:0e018d759a2a
|
1127
|
break
|
switches |
0:0e018d759a2a
|
1128
|
|
switches |
0:0e018d759a2a
|
1129
|
# Check for test end
|
switches |
0:0e018d759a2a
|
1130
|
if '{end}' in line:
|
switches |
0:0e018d759a2a
|
1131
|
break
|
switches |
0:0e018d759a2a
|
1132
|
line = ''
|
switches |
0:0e018d759a2a
|
1133
|
else:
|
switches |
0:0e018d759a2a
|
1134
|
line += c
|
switches |
0:0e018d759a2a
|
1135
|
end_time = time()
|
switches |
0:0e018d759a2a
|
1136
|
testcase_duration = end_time - start_time # Test case duration from reset to {end}
|
switches |
0:0e018d759a2a
|
1137
|
|
switches |
0:0e018d759a2a
|
1138
|
c = get_char_from_queue(obs)
|
switches |
0:0e018d759a2a
|
1139
|
|
switches |
0:0e018d759a2a
|
1140
|
if c:
|
switches |
0:0e018d759a2a
|
1141
|
if verbose:
|
switches |
0:0e018d759a2a
|
1142
|
sys.stdout.write(c)
|
switches |
0:0e018d759a2a
|
1143
|
c = filter_queue_char(c)
|
switches |
0:0e018d759a2a
|
1144
|
output.append(c)
|
switches |
0:0e018d759a2a
|
1145
|
|
switches |
0:0e018d759a2a
|
1146
|
if verbose:
|
switches |
0:0e018d759a2a
|
1147
|
print "Test::Output::Finish"
|
switches |
0:0e018d759a2a
|
1148
|
# Stop test process
|
switches |
0:0e018d759a2a
|
1149
|
obs.stop()
|
switches |
0:0e018d759a2a
|
1150
|
|
switches |
0:0e018d759a2a
|
1151
|
result = get_test_result(output)
|
switches |
0:0e018d759a2a
|
1152
|
return (result, "".join(output), testcase_duration, duration)
|
switches |
0:0e018d759a2a
|
1153
|
|
switches |
0:0e018d759a2a
|
1154
|
def is_peripherals_available(self, target_mcu_name, peripherals=None):
|
switches |
0:0e018d759a2a
|
1155
|
""" Checks if specified target should run specific peripheral test case defined in MUTs file
|
switches |
0:0e018d759a2a
|
1156
|
"""
|
switches |
0:0e018d759a2a
|
1157
|
if peripherals is not None:
|
switches |
0:0e018d759a2a
|
1158
|
peripherals = set(peripherals)
|
switches |
0:0e018d759a2a
|
1159
|
for id, mut in self.muts.iteritems():
|
switches |
0:0e018d759a2a
|
1160
|
# Target MCU name check
|
switches |
0:0e018d759a2a
|
1161
|
if mut["mcu"] != target_mcu_name:
|
switches |
0:0e018d759a2a
|
1162
|
continue
|
switches |
0:0e018d759a2a
|
1163
|
# Peripherals check
|
switches |
0:0e018d759a2a
|
1164
|
if peripherals is not None:
|
switches |
0:0e018d759a2a
|
1165
|
if 'peripherals' not in mut:
|
switches |
0:0e018d759a2a
|
1166
|
continue
|
switches |
0:0e018d759a2a
|
1167
|
if not peripherals.issubset(set(mut['peripherals'])):
|
switches |
0:0e018d759a2a
|
1168
|
continue
|
switches |
0:0e018d759a2a
|
1169
|
return True
|
switches |
0:0e018d759a2a
|
1170
|
return False
|
switches |
0:0e018d759a2a
|
1171
|
|
switches |
0:0e018d759a2a
|
1172
|
def shape_test_request(self, mcu, image_path, test_id, duration=10):
|
switches |
0:0e018d759a2a
|
1173
|
""" Function prepares JSON structure describing test specification
|
switches |
0:0e018d759a2a
|
1174
|
"""
|
switches |
0:0e018d759a2a
|
1175
|
test_spec = {
|
switches |
0:0e018d759a2a
|
1176
|
"mcu": mcu,
|
switches |
0:0e018d759a2a
|
1177
|
"image": image_path,
|
switches |
0:0e018d759a2a
|
1178
|
"duration": duration,
|
switches |
0:0e018d759a2a
|
1179
|
"test_id": test_id,
|
switches |
0:0e018d759a2a
|
1180
|
}
|
switches |
0:0e018d759a2a
|
1181
|
return json.dumps(test_spec)
|
switches |
0:0e018d759a2a
|
1182
|
|
switches |
0:0e018d759a2a
|
1183
|
|
switches |
0:0e018d759a2a
|
1184
|
def get_unique_value_from_summary(test_summary, index):
|
switches |
0:0e018d759a2a
|
1185
|
""" Gets list of unique target names
|
switches |
0:0e018d759a2a
|
1186
|
"""
|
switches |
0:0e018d759a2a
|
1187
|
result = []
|
switches |
0:0e018d759a2a
|
1188
|
for test in test_summary:
|
switches |
0:0e018d759a2a
|
1189
|
target_name = test[index]
|
switches |
0:0e018d759a2a
|
1190
|
if target_name not in result:
|
switches |
0:0e018d759a2a
|
1191
|
result.append(target_name)
|
switches |
0:0e018d759a2a
|
1192
|
return sorted(result)
|
switches |
0:0e018d759a2a
|
1193
|
|
switches |
0:0e018d759a2a
|
1194
|
|
switches |
0:0e018d759a2a
|
1195
|
def get_unique_value_from_summary_ext(test_summary, index_key, index_val):
|
switches |
0:0e018d759a2a
|
1196
|
""" Gets list of unique target names and return dictionary
|
switches |
0:0e018d759a2a
|
1197
|
"""
|
switches |
0:0e018d759a2a
|
1198
|
result = {}
|
switches |
0:0e018d759a2a
|
1199
|
for test in test_summary:
|
switches |
0:0e018d759a2a
|
1200
|
key = test[index_key]
|
switches |
0:0e018d759a2a
|
1201
|
val = test[index_val]
|
switches |
0:0e018d759a2a
|
1202
|
if key not in result:
|
switches |
0:0e018d759a2a
|
1203
|
result[key] = val
|
switches |
0:0e018d759a2a
|
1204
|
return result
|
switches |
0:0e018d759a2a
|
1205
|
|
switches |
0:0e018d759a2a
|
1206
|
|
switches |
0:0e018d759a2a
|
1207
|
def show_json_file_format_error(json_spec_filename, line, column):
|
switches |
0:0e018d759a2a
|
1208
|
""" Prints JSON broken content
|
switches |
0:0e018d759a2a
|
1209
|
"""
|
switches |
0:0e018d759a2a
|
1210
|
with open(json_spec_filename) as data_file:
|
switches |
0:0e018d759a2a
|
1211
|
line_no = 1
|
switches |
0:0e018d759a2a
|
1212
|
for json_line in data_file:
|
switches |
0:0e018d759a2a
|
1213
|
if line_no + 5 >= line: # Print last few lines before error
|
switches |
0:0e018d759a2a
|
1214
|
print 'Line %d:\t'%line_no + json_line, # Prints line
|
switches |
0:0e018d759a2a
|
1215
|
if line_no == line:
|
switches |
0:0e018d759a2a
|
1216
|
print ' ' * len('Line %d:'%line_no) + '\t', '-' * (column-1) + '^'
|
switches |
0:0e018d759a2a
|
1217
|
break
|
switches |
0:0e018d759a2a
|
1218
|
line_no += 1
|
switches |
0:0e018d759a2a
|
1219
|
|
switches |
0:0e018d759a2a
|
1220
|
|
switches |
0:0e018d759a2a
|
1221
|
def json_format_error_defect_pos(json_error_msg):
|
switches |
0:0e018d759a2a
|
1222
|
""" Gets first error line and column in JSON file format.
|
switches |
0:0e018d759a2a
|
1223
|
Parsed from exception thrown by json.loads() string
|
switches |
0:0e018d759a2a
|
1224
|
"""
|
switches |
0:0e018d759a2a
|
1225
|
result = None
|
switches |
0:0e018d759a2a
|
1226
|
line, column = 0, 0
|
switches |
0:0e018d759a2a
|
1227
|
# Line value search
|
switches |
0:0e018d759a2a
|
1228
|
line_search = re.search('line [0-9]+', json_error_msg)
|
switches |
0:0e018d759a2a
|
1229
|
if line_search is not None:
|
switches |
0:0e018d759a2a
|
1230
|
ls = line_search.group().split(' ')
|
switches |
0:0e018d759a2a
|
1231
|
if len(ls) == 2:
|
switches |
0:0e018d759a2a
|
1232
|
line = int(ls[1])
|
switches |
0:0e018d759a2a
|
1233
|
# Column position search
|
switches |
0:0e018d759a2a
|
1234
|
column_search = re.search('column [0-9]+', json_error_msg)
|
switches |
0:0e018d759a2a
|
1235
|
if column_search is not None:
|
switches |
0:0e018d759a2a
|
1236
|
cs = column_search.group().split(' ')
|
switches |
0:0e018d759a2a
|
1237
|
if len(cs) == 2:
|
switches |
0:0e018d759a2a
|
1238
|
column = int(cs[1])
|
switches |
0:0e018d759a2a
|
1239
|
result = [line, column]
|
switches |
0:0e018d759a2a
|
1240
|
return result
|
switches |
0:0e018d759a2a
|
1241
|
|
switches |
0:0e018d759a2a
|
1242
|
|
switches |
0:0e018d759a2a
|
1243
|
def get_json_data_from_file(json_spec_filename, verbose=False):
|
switches |
0:0e018d759a2a
|
1244
|
""" Loads from file JSON formatted string to data structure
|
switches |
0:0e018d759a2a
|
1245
|
"""
|
switches |
0:0e018d759a2a
|
1246
|
result = None
|
switches |
0:0e018d759a2a
|
1247
|
try:
|
switches |
0:0e018d759a2a
|
1248
|
with open(json_spec_filename) as data_file:
|
switches |
0:0e018d759a2a
|
1249
|
try:
|
switches |
0:0e018d759a2a
|
1250
|
result = json.load(data_file)
|
switches |
0:0e018d759a2a
|
1251
|
except ValueError as json_error_msg:
|
switches |
0:0e018d759a2a
|
1252
|
result = None
|
switches |
0:0e018d759a2a
|
1253
|
print 'JSON file %s parsing failed. Reason: %s' % (json_spec_filename, json_error_msg)
|
switches |
0:0e018d759a2a
|
1254
|
# We can print where error occurred inside JSON file if we can parse exception msg
|
switches |
0:0e018d759a2a
|
1255
|
json_format_defect_pos = json_format_error_defect_pos(str(json_error_msg))
|
switches |
0:0e018d759a2a
|
1256
|
if json_format_defect_pos is not None:
|
switches |
0:0e018d759a2a
|
1257
|
line = json_format_defect_pos[0]
|
switches |
0:0e018d759a2a
|
1258
|
column = json_format_defect_pos[1]
|
switches |
0:0e018d759a2a
|
1259
|
print
|
switches |
0:0e018d759a2a
|
1260
|
show_json_file_format_error(json_spec_filename, line, column)
|
switches |
0:0e018d759a2a
|
1261
|
|
switches |
0:0e018d759a2a
|
1262
|
except IOError as fileopen_error_msg:
|
switches |
0:0e018d759a2a
|
1263
|
print 'JSON file %s not opened. Reason: %s'% (json_spec_filename, fileopen_error_msg)
|
switches |
0:0e018d759a2a
|
1264
|
print
|
switches |
0:0e018d759a2a
|
1265
|
if verbose and result:
|
switches |
0:0e018d759a2a
|
1266
|
pp = pprint.PrettyPrinter(indent=4)
|
switches |
0:0e018d759a2a
|
1267
|
pp.pprint(result)
|
switches |
0:0e018d759a2a
|
1268
|
return result
|
switches |
0:0e018d759a2a
|
1269
|
|
switches |
0:0e018d759a2a
|
1270
|
|
switches |
0:0e018d759a2a
|
1271
|
def print_muts_configuration_from_json(json_data, join_delim=", ", platform_filter=None):
|
switches |
0:0e018d759a2a
|
1272
|
""" Prints MUTs configuration passed to test script for verboseness
|
switches |
0:0e018d759a2a
|
1273
|
"""
|
switches |
0:0e018d759a2a
|
1274
|
muts_info_cols = []
|
switches |
0:0e018d759a2a
|
1275
|
# We need to check all unique properties for each defined MUT
|
switches |
0:0e018d759a2a
|
1276
|
for k in json_data:
|
switches |
0:0e018d759a2a
|
1277
|
mut_info = json_data[k]
|
switches |
0:0e018d759a2a
|
1278
|
for mut_property in mut_info:
|
switches |
0:0e018d759a2a
|
1279
|
if mut_property not in muts_info_cols:
|
switches |
0:0e018d759a2a
|
1280
|
muts_info_cols.append(mut_property)
|
switches |
0:0e018d759a2a
|
1281
|
|
switches |
0:0e018d759a2a
|
1282
|
# Prepare pretty table object to display all MUTs
|
switches |
0:0e018d759a2a
|
1283
|
pt_cols = ["index"] + muts_info_cols
|
switches |
0:0e018d759a2a
|
1284
|
pt = PrettyTable(pt_cols)
|
switches |
0:0e018d759a2a
|
1285
|
for col in pt_cols:
|
switches |
0:0e018d759a2a
|
1286
|
pt.align[col] = "l"
|
switches |
0:0e018d759a2a
|
1287
|
|
switches |
0:0e018d759a2a
|
1288
|
# Add rows to pretty print object
|
switches |
0:0e018d759a2a
|
1289
|
for k in json_data:
|
switches |
0:0e018d759a2a
|
1290
|
row = [k]
|
switches |
0:0e018d759a2a
|
1291
|
mut_info = json_data[k]
|
switches |
0:0e018d759a2a
|
1292
|
|
switches |
0:0e018d759a2a
|
1293
|
add_row = True
|
switches |
0:0e018d759a2a
|
1294
|
if platform_filter and 'mcu' in mut_info:
|
switches |
0:0e018d759a2a
|
1295
|
add_row = re.search(platform_filter, mut_info['mcu']) is not None
|
switches |
0:0e018d759a2a
|
1296
|
if add_row:
|
switches |
0:0e018d759a2a
|
1297
|
for col in muts_info_cols:
|
switches |
0:0e018d759a2a
|
1298
|
cell_val = mut_info[col] if col in mut_info else None
|
switches |
0:0e018d759a2a
|
1299
|
if type(cell_val) == ListType:
|
switches |
0:0e018d759a2a
|
1300
|
cell_val = join_delim.join(cell_val)
|
switches |
0:0e018d759a2a
|
1301
|
row.append(cell_val)
|
switches |
0:0e018d759a2a
|
1302
|
pt.add_row(row)
|
switches |
0:0e018d759a2a
|
1303
|
return pt.get_string()
|
switches |
0:0e018d759a2a
|
1304
|
|
switches |
0:0e018d759a2a
|
1305
|
|
switches |
0:0e018d759a2a
|
1306
|
def print_test_configuration_from_json(json_data, join_delim=", "):
|
switches |
0:0e018d759a2a
|
1307
|
""" Prints test specification configuration passed to test script for verboseness
|
switches |
0:0e018d759a2a
|
1308
|
"""
|
switches |
0:0e018d759a2a
|
1309
|
toolchains_info_cols = []
|
switches |
0:0e018d759a2a
|
1310
|
# We need to check all toolchains for each device
|
switches |
0:0e018d759a2a
|
1311
|
for k in json_data:
|
switches |
0:0e018d759a2a
|
1312
|
# k should be 'targets'
|
switches |
0:0e018d759a2a
|
1313
|
targets = json_data[k]
|
switches |
0:0e018d759a2a
|
1314
|
for target in targets:
|
switches |
0:0e018d759a2a
|
1315
|
toolchains = targets[target]
|
switches |
0:0e018d759a2a
|
1316
|
for toolchain in toolchains:
|
switches |
0:0e018d759a2a
|
1317
|
if toolchain not in toolchains_info_cols:
|
switches |
0:0e018d759a2a
|
1318
|
toolchains_info_cols.append(toolchain)
|
switches |
0:0e018d759a2a
|
1319
|
|
switches |
0:0e018d759a2a
|
1320
|
# Prepare pretty table object to display test specification
|
switches |
0:0e018d759a2a
|
1321
|
pt_cols = ["mcu"] + sorted(toolchains_info_cols)
|
switches |
0:0e018d759a2a
|
1322
|
pt = PrettyTable(pt_cols)
|
switches |
0:0e018d759a2a
|
1323
|
for col in pt_cols:
|
switches |
0:0e018d759a2a
|
1324
|
pt.align[col] = "l"
|
switches |
0:0e018d759a2a
|
1325
|
|
switches |
0:0e018d759a2a
|
1326
|
# { target : [conflicted toolchains] }
|
switches |
0:0e018d759a2a
|
1327
|
toolchain_conflicts = {}
|
switches |
0:0e018d759a2a
|
1328
|
toolchain_path_conflicts = []
|
switches |
0:0e018d759a2a
|
1329
|
for k in json_data:
|
switches |
0:0e018d759a2a
|
1330
|
# k should be 'targets'
|
switches |
0:0e018d759a2a
|
1331
|
targets = json_data[k]
|
switches |
0:0e018d759a2a
|
1332
|
for target in targets:
|
switches |
0:0e018d759a2a
|
1333
|
target_supported_toolchains = get_target_supported_toolchains(target)
|
switches |
0:0e018d759a2a
|
1334
|
if not target_supported_toolchains:
|
switches |
0:0e018d759a2a
|
1335
|
target_supported_toolchains = []
|
switches |
0:0e018d759a2a
|
1336
|
target_name = target if target in TARGET_MAP else "%s*"% target
|
switches |
0:0e018d759a2a
|
1337
|
row = [target_name]
|
switches |
0:0e018d759a2a
|
1338
|
toolchains = targets[target]
|
switches |
0:0e018d759a2a
|
1339
|
|
switches |
0:0e018d759a2a
|
1340
|
for toolchain in sorted(toolchains_info_cols):
|
switches |
0:0e018d759a2a
|
1341
|
# Check for conflicts: target vs toolchain
|
switches |
0:0e018d759a2a
|
1342
|
conflict = False
|
switches |
0:0e018d759a2a
|
1343
|
conflict_path = False
|
switches |
0:0e018d759a2a
|
1344
|
if toolchain in toolchains:
|
switches |
0:0e018d759a2a
|
1345
|
if toolchain not in target_supported_toolchains:
|
switches |
0:0e018d759a2a
|
1346
|
conflict = True
|
switches |
0:0e018d759a2a
|
1347
|
if target not in toolchain_conflicts:
|
switches |
0:0e018d759a2a
|
1348
|
toolchain_conflicts[target] = []
|
switches |
0:0e018d759a2a
|
1349
|
toolchain_conflicts[target].append(toolchain)
|
switches |
0:0e018d759a2a
|
1350
|
# Add marker inside table about target usage / conflict
|
switches |
0:0e018d759a2a
|
1351
|
cell_val = 'Yes' if toolchain in toolchains else '-'
|
switches |
0:0e018d759a2a
|
1352
|
if conflict:
|
switches |
0:0e018d759a2a
|
1353
|
cell_val += '*'
|
switches |
0:0e018d759a2a
|
1354
|
# Check for conflicts: toolchain vs toolchain path
|
switches |
0:0e018d759a2a
|
1355
|
if toolchain in TOOLCHAIN_PATHS:
|
switches |
0:0e018d759a2a
|
1356
|
toolchain_path = TOOLCHAIN_PATHS[toolchain]
|
switches |
0:0e018d759a2a
|
1357
|
if not os.path.isdir(toolchain_path):
|
switches |
0:0e018d759a2a
|
1358
|
conflict_path = True
|
switches |
0:0e018d759a2a
|
1359
|
if toolchain not in toolchain_path_conflicts:
|
switches |
0:0e018d759a2a
|
1360
|
toolchain_path_conflicts.append(toolchain)
|
switches |
0:0e018d759a2a
|
1361
|
if conflict_path:
|
switches |
0:0e018d759a2a
|
1362
|
cell_val += '#'
|
switches |
0:0e018d759a2a
|
1363
|
row.append(cell_val)
|
switches |
0:0e018d759a2a
|
1364
|
pt.add_row(row)
|
switches |
0:0e018d759a2a
|
1365
|
|
switches |
0:0e018d759a2a
|
1366
|
# generate result string
|
switches |
0:0e018d759a2a
|
1367
|
result = pt.get_string() # Test specification table
|
switches |
0:0e018d759a2a
|
1368
|
if toolchain_conflicts or toolchain_path_conflicts:
|
switches |
0:0e018d759a2a
|
1369
|
result += "\n"
|
switches |
0:0e018d759a2a
|
1370
|
result += "Toolchain conflicts:\n"
|
switches |
0:0e018d759a2a
|
1371
|
for target in toolchain_conflicts:
|
switches |
0:0e018d759a2a
|
1372
|
if target not in TARGET_MAP:
|
switches |
0:0e018d759a2a
|
1373
|
result += "\t* Target %s unknown\n"% (target)
|
switches |
0:0e018d759a2a
|
1374
|
conflict_target_list = join_delim.join(toolchain_conflicts[target])
|
switches |
0:0e018d759a2a
|
1375
|
sufix = 's' if len(toolchain_conflicts[target]) > 1 else ''
|
switches |
0:0e018d759a2a
|
1376
|
result += "\t* Target %s does not support %s toolchain%s\n"% (target, conflict_target_list, sufix)
|
switches |
0:0e018d759a2a
|
1377
|
|
switches |
0:0e018d759a2a
|
1378
|
for toolchain in toolchain_path_conflicts:
|
switches |
0:0e018d759a2a
|
1379
|
# Let's check toolchain configuration
|
switches |
0:0e018d759a2a
|
1380
|
if toolchain in TOOLCHAIN_PATHS:
|
switches |
0:0e018d759a2a
|
1381
|
toolchain_path = TOOLCHAIN_PATHS[toolchain]
|
switches |
0:0e018d759a2a
|
1382
|
if not os.path.isdir(toolchain_path):
|
switches |
0:0e018d759a2a
|
1383
|
result += "\t# Toolchain %s path not found: %s\n"% (toolchain, toolchain_path)
|
switches |
0:0e018d759a2a
|
1384
|
return result
|
switches |
0:0e018d759a2a
|
1385
|
|
switches |
0:0e018d759a2a
|
1386
|
|
switches |
0:0e018d759a2a
|
1387
|
def get_avail_tests_summary_table(cols=None, result_summary=True, join_delim=',',platform_filter=None):
|
switches |
0:0e018d759a2a
|
1388
|
""" Generates table summary with all test cases and additional test cases
|
switches |
0:0e018d759a2a
|
1389
|
information using pretty print functionality. Allows test suite user to
|
switches |
0:0e018d759a2a
|
1390
|
see test cases
|
switches |
0:0e018d759a2a
|
1391
|
"""
|
switches |
0:0e018d759a2a
|
1392
|
# get all unique test ID prefixes
|
switches |
0:0e018d759a2a
|
1393
|
unique_test_id = []
|
switches |
0:0e018d759a2a
|
1394
|
for test in TESTS:
|
switches |
0:0e018d759a2a
|
1395
|
split = test['id'].split('_')[:-1]
|
switches |
0:0e018d759a2a
|
1396
|
test_id_prefix = '_'.join(split)
|
switches |
0:0e018d759a2a
|
1397
|
if test_id_prefix not in unique_test_id:
|
switches |
0:0e018d759a2a
|
1398
|
unique_test_id.append(test_id_prefix)
|
switches |
0:0e018d759a2a
|
1399
|
unique_test_id.sort()
|
switches |
0:0e018d759a2a
|
1400
|
counter_dict_test_id_types = dict((t, 0) for t in unique_test_id)
|
switches |
0:0e018d759a2a
|
1401
|
counter_dict_test_id_types_all = dict((t, 0) for t in unique_test_id)
|
switches |
0:0e018d759a2a
|
1402
|
|
switches |
0:0e018d759a2a
|
1403
|
test_properties = ['id',
|
switches |
0:0e018d759a2a
|
1404
|
'automated',
|
switches |
0:0e018d759a2a
|
1405
|
'description',
|
switches |
0:0e018d759a2a
|
1406
|
'peripherals',
|
switches |
0:0e018d759a2a
|
1407
|
'host_test',
|
switches |
0:0e018d759a2a
|
1408
|
'duration'] if cols is None else cols
|
switches |
0:0e018d759a2a
|
1409
|
|
switches |
0:0e018d759a2a
|
1410
|
# All tests status table print
|
switches |
0:0e018d759a2a
|
1411
|
pt = PrettyTable(test_properties)
|
switches |
0:0e018d759a2a
|
1412
|
for col in test_properties:
|
switches |
0:0e018d759a2a
|
1413
|
pt.align[col] = "l"
|
switches |
0:0e018d759a2a
|
1414
|
pt.align['duration'] = "r"
|
switches |
0:0e018d759a2a
|
1415
|
|
switches |
0:0e018d759a2a
|
1416
|
counter_all = 0
|
switches |
0:0e018d759a2a
|
1417
|
counter_automated = 0
|
switches |
0:0e018d759a2a
|
1418
|
pt.padding_width = 1 # One space between column edges and contents (default)
|
switches |
0:0e018d759a2a
|
1419
|
|
switches |
0:0e018d759a2a
|
1420
|
for test_id in sorted(TEST_MAP.keys()):
|
switches |
0:0e018d759a2a
|
1421
|
if platform_filter is not None:
|
switches |
0:0e018d759a2a
|
1422
|
# FIlter out platforms using regex
|
switches |
0:0e018d759a2a
|
1423
|
if re.search(platform_filter, test_id) is None:
|
switches |
0:0e018d759a2a
|
1424
|
continue
|
switches |
0:0e018d759a2a
|
1425
|
row = []
|
switches |
0:0e018d759a2a
|
1426
|
test = TEST_MAP[test_id]
|
switches |
0:0e018d759a2a
|
1427
|
split = test_id.split('_')[:-1]
|
switches |
0:0e018d759a2a
|
1428
|
test_id_prefix = '_'.join(split)
|
switches |
0:0e018d759a2a
|
1429
|
|
switches |
0:0e018d759a2a
|
1430
|
for col in test_properties:
|
switches |
0:0e018d759a2a
|
1431
|
col_value = test[col]
|
switches |
0:0e018d759a2a
|
1432
|
if type(test[col]) == ListType:
|
switches |
0:0e018d759a2a
|
1433
|
col_value = join_delim.join(test[col])
|
switches |
0:0e018d759a2a
|
1434
|
elif test[col] == None:
|
switches |
0:0e018d759a2a
|
1435
|
col_value = "-"
|
switches |
0:0e018d759a2a
|
1436
|
|
switches |
0:0e018d759a2a
|
1437
|
row.append(col_value)
|
switches |
0:0e018d759a2a
|
1438
|
if test['automated'] == True:
|
switches |
0:0e018d759a2a
|
1439
|
counter_dict_test_id_types[test_id_prefix] += 1
|
switches |
0:0e018d759a2a
|
1440
|
counter_automated += 1
|
switches |
0:0e018d759a2a
|
1441
|
pt.add_row(row)
|
switches |
0:0e018d759a2a
|
1442
|
# Update counters
|
switches |
0:0e018d759a2a
|
1443
|
counter_all += 1
|
switches |
0:0e018d759a2a
|
1444
|
counter_dict_test_id_types_all[test_id_prefix] += 1
|
switches |
0:0e018d759a2a
|
1445
|
result = pt.get_string()
|
switches |
0:0e018d759a2a
|
1446
|
result += "\n\n"
|
switches |
0:0e018d759a2a
|
1447
|
|
switches |
0:0e018d759a2a
|
1448
|
if result_summary and not platform_filter:
|
switches |
0:0e018d759a2a
|
1449
|
# Automation result summary
|
switches |
0:0e018d759a2a
|
1450
|
test_id_cols = ['automated', 'all', 'percent [%]', 'progress']
|
switches |
0:0e018d759a2a
|
1451
|
pt = PrettyTable(test_id_cols)
|
switches |
0:0e018d759a2a
|
1452
|
pt.align['automated'] = "r"
|
switches |
0:0e018d759a2a
|
1453
|
pt.align['all'] = "r"
|
switches |
0:0e018d759a2a
|
1454
|
pt.align['percent [%]'] = "r"
|
switches |
0:0e018d759a2a
|
1455
|
|
switches |
0:0e018d759a2a
|
1456
|
percent_progress = round(100.0 * counter_automated / float(counter_all), 1)
|
switches |
0:0e018d759a2a
|
1457
|
str_progress = progress_bar(percent_progress, 75)
|
switches |
0:0e018d759a2a
|
1458
|
pt.add_row([counter_automated, counter_all, percent_progress, str_progress])
|
switches |
0:0e018d759a2a
|
1459
|
result += "Automation coverage:\n"
|
switches |
0:0e018d759a2a
|
1460
|
result += pt.get_string()
|
switches |
0:0e018d759a2a
|
1461
|
result += "\n\n"
|
switches |
0:0e018d759a2a
|
1462
|
|
switches |
0:0e018d759a2a
|
1463
|
# Test automation coverage table print
|
switches |
0:0e018d759a2a
|
1464
|
test_id_cols = ['id', 'automated', 'all', 'percent [%]', 'progress']
|
switches |
0:0e018d759a2a
|
1465
|
pt = PrettyTable(test_id_cols)
|
switches |
0:0e018d759a2a
|
1466
|
pt.align['id'] = "l"
|
switches |
0:0e018d759a2a
|
1467
|
pt.align['automated'] = "r"
|
switches |
0:0e018d759a2a
|
1468
|
pt.align['all'] = "r"
|
switches |
0:0e018d759a2a
|
1469
|
pt.align['percent [%]'] = "r"
|
switches |
0:0e018d759a2a
|
1470
|
for unique_id in unique_test_id:
|
switches |
0:0e018d759a2a
|
1471
|
# print "\t\t%s: %d / %d" % (unique_id, counter_dict_test_id_types[unique_id], counter_dict_test_id_types_all[unique_id])
|
switches |
0:0e018d759a2a
|
1472
|
percent_progress = round(100.0 * counter_dict_test_id_types[unique_id] / float(counter_dict_test_id_types_all[unique_id]), 1)
|
switches |
0:0e018d759a2a
|
1473
|
str_progress = progress_bar(percent_progress, 75)
|
switches |
0:0e018d759a2a
|
1474
|
row = [unique_id,
|
switches |
0:0e018d759a2a
|
1475
|
counter_dict_test_id_types[unique_id],
|
switches |
0:0e018d759a2a
|
1476
|
counter_dict_test_id_types_all[unique_id],
|
switches |
0:0e018d759a2a
|
1477
|
percent_progress,
|
switches |
0:0e018d759a2a
|
1478
|
"[" + str_progress + "]"]
|
switches |
0:0e018d759a2a
|
1479
|
pt.add_row(row)
|
switches |
0:0e018d759a2a
|
1480
|
result += "Test automation coverage:\n"
|
switches |
0:0e018d759a2a
|
1481
|
result += pt.get_string()
|
switches |
0:0e018d759a2a
|
1482
|
result += "\n\n"
|
switches |
0:0e018d759a2a
|
1483
|
return result
|
switches |
0:0e018d759a2a
|
1484
|
|
switches |
0:0e018d759a2a
|
1485
|
|
switches |
0:0e018d759a2a
|
1486
|
def progress_bar(percent_progress, saturation=0):
|
switches |
0:0e018d759a2a
|
1487
|
""" This function creates progress bar with optional simple saturation mark
|
switches |
0:0e018d759a2a
|
1488
|
"""
|
switches |
0:0e018d759a2a
|
1489
|
step = int(percent_progress / 2) # Scale by to (scale: 1 - 50)
|
switches |
0:0e018d759a2a
|
1490
|
str_progress = '#' * step + '.' * int(50 - step)
|
switches |
0:0e018d759a2a
|
1491
|
c = '!' if str_progress[38] == '.' else '|'
|
switches |
0:0e018d759a2a
|
1492
|
if saturation > 0:
|
switches |
0:0e018d759a2a
|
1493
|
saturation = saturation / 2
|
switches |
0:0e018d759a2a
|
1494
|
str_progress = str_progress[:saturation] + c + str_progress[saturation:]
|
switches |
0:0e018d759a2a
|
1495
|
return str_progress
|
switches |
0:0e018d759a2a
|
1496
|
|
switches |
0:0e018d759a2a
|
1497
|
|
switches |
0:0e018d759a2a
|
1498
|
def singletest_in_cli_mode(single_test):
|
switches |
0:0e018d759a2a
|
1499
|
""" Runs SingleTestRunner object in CLI (Command line interface) mode
|
switches |
0:0e018d759a2a
|
1500
|
|
switches |
0:0e018d759a2a
|
1501
|
@return returns success code (0 == success) for building and running tests
|
switches |
0:0e018d759a2a
|
1502
|
"""
|
switches |
0:0e018d759a2a
|
1503
|
start = time()
|
switches |
0:0e018d759a2a
|
1504
|
# Execute tests depending on options and filter applied
|
switches |
0:0e018d759a2a
|
1505
|
test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report, build_properties = single_test.execute()
|
switches |
0:0e018d759a2a
|
1506
|
elapsed_time = time() - start
|
switches |
0:0e018d759a2a
|
1507
|
|
switches |
0:0e018d759a2a
|
1508
|
# Human readable summary
|
switches |
0:0e018d759a2a
|
1509
|
if not single_test.opts_suppress_summary:
|
switches |
0:0e018d759a2a
|
1510
|
# prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
1511
|
print single_test.generate_test_summary(test_summary, shuffle_seed)
|
switches |
0:0e018d759a2a
|
1512
|
if single_test.opts_test_x_toolchain_summary:
|
switches |
0:0e018d759a2a
|
1513
|
# prints well-formed summary with results (SQL table like)
|
switches |
0:0e018d759a2a
|
1514
|
# table shows text x toolchain test result matrix
|
switches |
0:0e018d759a2a
|
1515
|
print single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
|
switches |
0:0e018d759a2a
|
1516
|
|
switches |
0:0e018d759a2a
|
1517
|
print "Completed in %.2f sec"% (elapsed_time)
|
switches |
0:0e018d759a2a
|
1518
|
print
|
switches |
0:0e018d759a2a
|
1519
|
# Write summary of the builds
|
switches |
0:0e018d759a2a
|
1520
|
|
switches |
0:0e018d759a2a
|
1521
|
print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
|
switches |
0:0e018d759a2a
|
1522
|
status = print_report_exporter.report(build_report)
|
switches |
0:0e018d759a2a
|
1523
|
|
switches |
0:0e018d759a2a
|
1524
|
# Store extra reports in files
|
switches |
0:0e018d759a2a
|
1525
|
if single_test.opts_report_html_file_name:
|
switches |
0:0e018d759a2a
|
1526
|
# Export results in form of HTML report to separate file
|
switches |
0:0e018d759a2a
|
1527
|
report_exporter = ReportExporter(ResultExporterType.HTML)
|
switches |
0:0e018d759a2a
|
1528
|
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_html_file_name, test_suite_properties=test_suite_properties_ext)
|
switches |
0:0e018d759a2a
|
1529
|
if single_test.opts_report_junit_file_name:
|
switches |
0:0e018d759a2a
|
1530
|
# Export results in form of JUnit XML report to separate file
|
switches |
0:0e018d759a2a
|
1531
|
report_exporter = ReportExporter(ResultExporterType.JUNIT)
|
switches |
0:0e018d759a2a
|
1532
|
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
|
switches |
0:0e018d759a2a
|
1533
|
if single_test.opts_report_text_file_name:
|
switches |
0:0e018d759a2a
|
1534
|
# Export results in form of a text file
|
switches |
0:0e018d759a2a
|
1535
|
report_exporter = ReportExporter(ResultExporterType.TEXT)
|
switches |
0:0e018d759a2a
|
1536
|
report_exporter.report_to_file(test_summary_ext, single_test.opts_report_text_file_name, test_suite_properties=test_suite_properties_ext)
|
switches |
0:0e018d759a2a
|
1537
|
if single_test.opts_report_build_file_name:
|
switches |
0:0e018d759a2a
|
1538
|
# Export build results as html report to sparate file
|
switches |
0:0e018d759a2a
|
1539
|
report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
|
switches |
0:0e018d759a2a
|
1540
|
report_exporter.report_to_file(build_report, single_test.opts_report_build_file_name, test_suite_properties=build_properties)
|
switches |
0:0e018d759a2a
|
1541
|
|
switches |
0:0e018d759a2a
|
1542
|
# Returns True if no build failures of the test projects or their dependencies
|
switches |
0:0e018d759a2a
|
1543
|
return status
|
switches |
0:0e018d759a2a
|
1544
|
|
switches |
0:0e018d759a2a
|
1545
|
class TestLogger():
|
switches |
0:0e018d759a2a
|
1546
|
""" Super-class for logging and printing ongoing events for test suite pass
|
switches |
0:0e018d759a2a
|
1547
|
"""
|
switches |
0:0e018d759a2a
|
1548
|
def __init__(self, store_log=True):
|
switches |
0:0e018d759a2a
|
1549
|
""" We can control if logger actually stores log in memory
|
switches |
0:0e018d759a2a
|
1550
|
or just handled all log entries immediately
|
switches |
0:0e018d759a2a
|
1551
|
"""
|
switches |
0:0e018d759a2a
|
1552
|
self.log = []
|
switches |
0:0e018d759a2a
|
1553
|
self.log_to_file = False
|
switches |
0:0e018d759a2a
|
1554
|
self.log_file_name = None
|
switches |
0:0e018d759a2a
|
1555
|
self.store_log = store_log
|
switches |
0:0e018d759a2a
|
1556
|
|
switches |
0:0e018d759a2a
|
1557
|
self.LogType = construct_enum(INFO='Info',
|
switches |
0:0e018d759a2a
|
1558
|
WARN='Warning',
|
switches |
0:0e018d759a2a
|
1559
|
NOTIF='Notification',
|
switches |
0:0e018d759a2a
|
1560
|
ERROR='Error',
|
switches |
0:0e018d759a2a
|
1561
|
EXCEPT='Exception')
|
switches |
0:0e018d759a2a
|
1562
|
|
switches |
0:0e018d759a2a
|
1563
|
self.LogToFileAttr = construct_enum(CREATE=1, # Create or overwrite existing log file
|
switches |
0:0e018d759a2a
|
1564
|
APPEND=2) # Append to existing log file
|
switches |
0:0e018d759a2a
|
1565
|
|
switches |
0:0e018d759a2a
|
1566
|
def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
|
switches |
0:0e018d759a2a
|
1567
|
""" Log one line of text
|
switches |
0:0e018d759a2a
|
1568
|
"""
|
switches |
0:0e018d759a2a
|
1569
|
log_timestamp = time()
|
switches |
0:0e018d759a2a
|
1570
|
log_entry = {'log_type' : LogType,
|
switches |
0:0e018d759a2a
|
1571
|
'log_timestamp' : log_timestamp,
|
switches |
0:0e018d759a2a
|
1572
|
'log_line' : log_line,
|
switches |
0:0e018d759a2a
|
1573
|
'_future' : None
|
switches |
0:0e018d759a2a
|
1574
|
}
|
switches |
0:0e018d759a2a
|
1575
|
# Store log in memory
|
switches |
0:0e018d759a2a
|
1576
|
if self.store_log:
|
switches |
0:0e018d759a2a
|
1577
|
self.log.append(log_entry)
|
switches |
0:0e018d759a2a
|
1578
|
return log_entry
|
switches |
0:0e018d759a2a
|
1579
|
|
switches |
0:0e018d759a2a
|
1580
|
|
switches |
0:0e018d759a2a
|
1581
|
class CLITestLogger(TestLogger):
|
switches |
0:0e018d759a2a
|
1582
|
""" Logger used with CLI (Command line interface) test suite. Logs on screen and to file if needed
|
switches |
0:0e018d759a2a
|
1583
|
"""
|
switches |
0:0e018d759a2a
|
1584
|
def __init__(self, store_log=True, file_name=None):
|
switches |
0:0e018d759a2a
|
1585
|
TestLogger.__init__(self)
|
switches |
0:0e018d759a2a
|
1586
|
self.log_file_name = file_name
|
switches |
0:0e018d759a2a
|
1587
|
#self.TIMESTAMP_FORMAT = '%y-%m-%d %H:%M:%S' # Full date and time
|
switches |
0:0e018d759a2a
|
1588
|
self.TIMESTAMP_FORMAT = '%H:%M:%S' # Time only
|
switches |
0:0e018d759a2a
|
1589
|
|
switches |
0:0e018d759a2a
|
1590
|
def log_print(self, log_entry, timestamp=True):
|
switches |
0:0e018d759a2a
|
1591
|
""" Prints on screen formatted log entry
|
switches |
0:0e018d759a2a
|
1592
|
"""
|
switches |
0:0e018d759a2a
|
1593
|
ts = log_entry['log_timestamp']
|
switches |
0:0e018d759a2a
|
1594
|
timestamp_str = datetime.datetime.fromtimestamp(ts).strftime("[%s] "% self.TIMESTAMP_FORMAT) if timestamp else ''
|
switches |
0:0e018d759a2a
|
1595
|
log_line_str = "%(log_type)s: %(log_line)s"% (log_entry)
|
switches |
0:0e018d759a2a
|
1596
|
return timestamp_str + log_line_str
|
switches |
0:0e018d759a2a
|
1597
|
|
switches |
0:0e018d759a2a
|
1598
|
def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
|
switches |
0:0e018d759a2a
|
1599
|
""" Logs line, if log file output was specified log line will be appended
|
switches |
0:0e018d759a2a
|
1600
|
at the end of log file
|
switches |
0:0e018d759a2a
|
1601
|
"""
|
switches |
0:0e018d759a2a
|
1602
|
log_entry = TestLogger.log_line(self, LogType, log_line)
|
switches |
0:0e018d759a2a
|
1603
|
log_line_str = self.log_print(log_entry, timestamp)
|
switches |
0:0e018d759a2a
|
1604
|
if self.log_file_name is not None:
|
switches |
0:0e018d759a2a
|
1605
|
try:
|
switches |
0:0e018d759a2a
|
1606
|
with open(self.log_file_name, 'a') as f:
|
switches |
0:0e018d759a2a
|
1607
|
f.write(log_line_str + line_delim)
|
switches |
0:0e018d759a2a
|
1608
|
except IOError:
|
switches |
0:0e018d759a2a
|
1609
|
pass
|
switches |
0:0e018d759a2a
|
1610
|
return log_line_str
|
switches |
0:0e018d759a2a
|
1611
|
|
switches |
0:0e018d759a2a
|
1612
|
|
switches |
0:0e018d759a2a
|
1613
|
def factory_db_logger(db_url):
|
switches |
0:0e018d759a2a
|
1614
|
""" Factory database driver depending on database type supplied in database connection string db_url
|
switches |
0:0e018d759a2a
|
1615
|
"""
|
switches |
0:0e018d759a2a
|
1616
|
if db_url is not None:
|
switches |
0:0e018d759a2a
|
1617
|
from tools.test_mysql import MySQLDBAccess
|
switches |
0:0e018d759a2a
|
1618
|
connection_info = BaseDBAccess().parse_db_connection_string(db_url)
|
switches |
0:0e018d759a2a
|
1619
|
if connection_info is not None:
|
switches |
0:0e018d759a2a
|
1620
|
(db_type, username, password, host, db_name) = BaseDBAccess().parse_db_connection_string(db_url)
|
switches |
0:0e018d759a2a
|
1621
|
if db_type == 'mysql':
|
switches |
0:0e018d759a2a
|
1622
|
return MySQLDBAccess()
|
switches |
0:0e018d759a2a
|
1623
|
return None
|
switches |
0:0e018d759a2a
|
1624
|
|
switches |
0:0e018d759a2a
|
1625
|
|
switches |
0:0e018d759a2a
|
1626
|
def detect_database_verbose(db_url):
|
switches |
0:0e018d759a2a
|
1627
|
""" uses verbose mode (prints) database detection sequence to check it database connection string is valid
|
switches |
0:0e018d759a2a
|
1628
|
"""
|
switches |
0:0e018d759a2a
|
1629
|
result = BaseDBAccess().parse_db_connection_string(db_url)
|
switches |
0:0e018d759a2a
|
1630
|
if result is not None:
|
switches |
0:0e018d759a2a
|
1631
|
# Parsing passed
|
switches |
0:0e018d759a2a
|
1632
|
(db_type, username, password, host, db_name) = result
|
switches |
0:0e018d759a2a
|
1633
|
#print "DB type '%s', user name '%s', password '%s', host '%s', db name '%s'"% result
|
switches |
0:0e018d759a2a
|
1634
|
# Let's try to connect
|
switches |
0:0e018d759a2a
|
1635
|
db_ = factory_db_logger(db_url)
|
switches |
0:0e018d759a2a
|
1636
|
if db_ is not None:
|
switches |
0:0e018d759a2a
|
1637
|
print "Connecting to database '%s'..."% db_url,
|
switches |
0:0e018d759a2a
|
1638
|
db_.connect(host, username, password, db_name)
|
switches |
0:0e018d759a2a
|
1639
|
if db_.is_connected():
|
switches |
0:0e018d759a2a
|
1640
|
print "ok"
|
switches |
0:0e018d759a2a
|
1641
|
print "Detecting database..."
|
switches |
0:0e018d759a2a
|
1642
|
print db_.detect_database(verbose=True)
|
switches |
0:0e018d759a2a
|
1643
|
print "Disconnecting...",
|
switches |
0:0e018d759a2a
|
1644
|
db_.disconnect()
|
switches |
0:0e018d759a2a
|
1645
|
print "done"
|
switches |
0:0e018d759a2a
|
1646
|
else:
|
switches |
0:0e018d759a2a
|
1647
|
print "Database type '%s' unknown"% db_type
|
switches |
0:0e018d759a2a
|
1648
|
else:
|
switches |
0:0e018d759a2a
|
1649
|
print "Parse error: '%s' - DB Url error"% (db_url)
|
switches |
0:0e018d759a2a
|
1650
|
|
switches |
0:0e018d759a2a
|
1651
|
|
switches |
0:0e018d759a2a
|
1652
|
def get_module_avail(module_name):
|
switches |
0:0e018d759a2a
|
1653
|
""" This function returns True if module_name is already impored module
|
switches |
0:0e018d759a2a
|
1654
|
"""
|
switches |
0:0e018d759a2a
|
1655
|
return module_name in sys.modules.keys()
|
switches |
0:0e018d759a2a
|
1656
|
|
switches |
0:0e018d759a2a
|
1657
|
|
switches |
0:0e018d759a2a
|
1658
|
def get_autodetected_MUTS_list(platform_name_filter=None):
|
switches |
0:0e018d759a2a
|
1659
|
oldError = None
|
switches |
0:0e018d759a2a
|
1660
|
if os.name == 'nt':
|
switches |
0:0e018d759a2a
|
1661
|
# Disable Windows error box temporarily
|
switches |
0:0e018d759a2a
|
1662
|
oldError = ctypes.windll.kernel32.SetErrorMode(1) #note that SEM_FAILCRITICALERRORS = 1
|
switches |
0:0e018d759a2a
|
1663
|
|
switches |
0:0e018d759a2a
|
1664
|
mbeds = mbed_lstools.create()
|
switches |
0:0e018d759a2a
|
1665
|
detect_muts_list = mbeds.list_mbeds()
|
switches |
0:0e018d759a2a
|
1666
|
|
switches |
0:0e018d759a2a
|
1667
|
if os.name == 'nt':
|
switches |
0:0e018d759a2a
|
1668
|
ctypes.windll.kernel32.SetErrorMode(oldError)
|
switches |
0:0e018d759a2a
|
1669
|
|
switches |
0:0e018d759a2a
|
1670
|
return get_autodetected_MUTS(detect_muts_list, platform_name_filter=platform_name_filter)
|
switches |
0:0e018d759a2a
|
1671
|
|
switches |
0:0e018d759a2a
|
1672
|
def get_autodetected_MUTS(mbeds_list, platform_name_filter=None):
|
switches |
0:0e018d759a2a
|
1673
|
""" Function detects all connected to host mbed-enabled devices and generates artificial MUTS file.
|
switches |
0:0e018d759a2a
|
1674
|
If function fails to auto-detect devices it will return empty dictionary.
|
switches |
0:0e018d759a2a
|
1675
|
|
switches |
0:0e018d759a2a
|
1676
|
if get_module_avail('mbed_lstools'):
|
switches |
0:0e018d759a2a
|
1677
|
mbeds = mbed_lstools.create()
|
switches |
0:0e018d759a2a
|
1678
|
mbeds_list = mbeds.list_mbeds()
|
switches |
0:0e018d759a2a
|
1679
|
|
switches |
0:0e018d759a2a
|
1680
|
@param mbeds_list list of mbeds captured from mbed_lstools
|
switches |
0:0e018d759a2a
|
1681
|
@param platform_name You can filter 'platform_name' with list of filtered targets from 'platform_name_filter'
|
switches |
0:0e018d759a2a
|
1682
|
"""
|
switches |
0:0e018d759a2a
|
1683
|
result = {} # Should be in muts_all.json format
|
switches |
0:0e018d759a2a
|
1684
|
# Align mbeds_list from mbed_lstools to MUT file format (JSON dictionary with muts)
|
switches |
0:0e018d759a2a
|
1685
|
# mbeds_list = [{'platform_name': 'NUCLEO_F302R8', 'mount_point': 'E:', 'target_id': '07050200623B61125D5EF72A', 'serial_port': u'COM34'}]
|
switches |
0:0e018d759a2a
|
1686
|
index = 1
|
switches |
0:0e018d759a2a
|
1687
|
for mut in mbeds_list:
|
switches |
0:0e018d759a2a
|
1688
|
# Filter the MUTS if a filter is specified
|
switches |
0:0e018d759a2a
|
1689
|
|
switches |
0:0e018d759a2a
|
1690
|
if platform_name_filter and not mut['platform_name'] in platform_name_filter:
|
switches |
0:0e018d759a2a
|
1691
|
continue
|
switches |
0:0e018d759a2a
|
1692
|
|
switches |
0:0e018d759a2a
|
1693
|
# For mcu_unique - we are assigning 'platform_name_unique' value from mbedls output (if its existing)
|
switches |
0:0e018d759a2a
|
1694
|
# if not we are creating our own unique value (last few chars from platform's target_id).
|
switches |
0:0e018d759a2a
|
1695
|
m = {'mcu': mut['platform_name'],
|
switches |
0:0e018d759a2a
|
1696
|
'mcu_unique' : mut['platform_name_unique'] if 'platform_name_unique' in mut else "%s[%s]" % (mut['platform_name'], mut['target_id'][-4:]),
|
switches |
0:0e018d759a2a
|
1697
|
'port': mut['serial_port'],
|
switches |
0:0e018d759a2a
|
1698
|
'disk': mut['mount_point'],
|
switches |
0:0e018d759a2a
|
1699
|
'peripherals': [] # No peripheral detection
|
switches |
0:0e018d759a2a
|
1700
|
}
|
switches |
0:0e018d759a2a
|
1701
|
if index not in result:
|
switches |
0:0e018d759a2a
|
1702
|
result[index] = {}
|
switches |
0:0e018d759a2a
|
1703
|
result[index] = m
|
switches |
0:0e018d759a2a
|
1704
|
index += 1
|
switches |
0:0e018d759a2a
|
1705
|
return result
|
switches |
0:0e018d759a2a
|
1706
|
|
switches |
0:0e018d759a2a
|
1707
|
|
switches |
0:0e018d759a2a
|
1708
|
def get_autodetected_TEST_SPEC(mbeds_list,
|
switches |
0:0e018d759a2a
|
1709
|
use_default_toolchain=True,
|
switches |
0:0e018d759a2a
|
1710
|
use_supported_toolchains=False,
|
switches |
0:0e018d759a2a
|
1711
|
toolchain_filter=None,
|
switches |
0:0e018d759a2a
|
1712
|
platform_name_filter=None):
|
switches |
0:0e018d759a2a
|
1713
|
""" Function detects all connected to host mbed-enabled devices and generates artificial test_spec file.
|
switches |
0:0e018d759a2a
|
1714
|
If function fails to auto-detect devices it will return empty 'targets' test_spec description.
|
switches |
0:0e018d759a2a
|
1715
|
|
switches |
0:0e018d759a2a
|
1716
|
use_default_toolchain - if True add default toolchain to test_spec
|
switches |
0:0e018d759a2a
|
1717
|
use_supported_toolchains - if True add all supported toolchains to test_spec
|
switches |
0:0e018d759a2a
|
1718
|
toolchain_filter - if [...list of toolchains...] add from all toolchains only those in filter to test_spec
|
switches |
0:0e018d759a2a
|
1719
|
"""
|
switches |
0:0e018d759a2a
|
1720
|
result = {'targets': {} }
|
switches |
0:0e018d759a2a
|
1721
|
|
switches |
0:0e018d759a2a
|
1722
|
for mut in mbeds_list:
|
switches |
0:0e018d759a2a
|
1723
|
mcu = mut['mcu']
|
switches |
0:0e018d759a2a
|
1724
|
if platform_name_filter is None or (platform_name_filter and mut['mcu'] in platform_name_filter):
|
switches |
0:0e018d759a2a
|
1725
|
if mcu in TARGET_MAP:
|
switches |
0:0e018d759a2a
|
1726
|
default_toolchain = TARGET_MAP[mcu].default_toolchain
|
switches |
0:0e018d759a2a
|
1727
|
supported_toolchains = TARGET_MAP[mcu].supported_toolchains
|
switches |
0:0e018d759a2a
|
1728
|
|
switches |
0:0e018d759a2a
|
1729
|
# Decide which toolchains should be added to test specification toolchain pool for each target
|
switches |
0:0e018d759a2a
|
1730
|
toolchains = []
|
switches |
0:0e018d759a2a
|
1731
|
if use_default_toolchain:
|
switches |
0:0e018d759a2a
|
1732
|
toolchains.append(default_toolchain)
|
switches |
0:0e018d759a2a
|
1733
|
if use_supported_toolchains:
|
switches |
0:0e018d759a2a
|
1734
|
toolchains += supported_toolchains
|
switches |
0:0e018d759a2a
|
1735
|
if toolchain_filter is not None:
|
switches |
0:0e018d759a2a
|
1736
|
all_toolchains = supported_toolchains + [default_toolchain]
|
switches |
0:0e018d759a2a
|
1737
|
for toolchain in toolchain_filter:
|
switches |
0:0e018d759a2a
|
1738
|
if toolchain in all_toolchains:
|
switches |
0:0e018d759a2a
|
1739
|
toolchains.append(toolchain)
|
switches |
0:0e018d759a2a
|
1740
|
|
switches |
0:0e018d759a2a
|
1741
|
result['targets'][mcu] = list(set(toolchains))
|
switches |
0:0e018d759a2a
|
1742
|
return result
|
switches |
0:0e018d759a2a
|
1743
|
|
switches |
0:0e018d759a2a
|
1744
|
|
switches |
0:0e018d759a2a
|
1745
|
def get_default_test_options_parser():
|
switches |
0:0e018d759a2a
|
1746
|
""" Get common test script options used by CLI, web services etc.
|
switches |
0:0e018d759a2a
|
1747
|
"""
|
switches |
0:0e018d759a2a
|
1748
|
parser = argparse.ArgumentParser()
|
switches |
0:0e018d759a2a
|
1749
|
parser.add_argument('-i', '--tests',
|
switches |
0:0e018d759a2a
|
1750
|
dest='test_spec_filename',
|
switches |
0:0e018d759a2a
|
1751
|
metavar="FILE",
|
switches |
0:0e018d759a2a
|
1752
|
type=argparse_filestring_type,
|
switches |
0:0e018d759a2a
|
1753
|
help='Points to file with test specification')
|
switches |
0:0e018d759a2a
|
1754
|
|
switches |
0:0e018d759a2a
|
1755
|
parser.add_argument('-M', '--MUTS',
|
switches |
0:0e018d759a2a
|
1756
|
dest='muts_spec_filename',
|
switches |
0:0e018d759a2a
|
1757
|
metavar="FILE",
|
switches |
0:0e018d759a2a
|
1758
|
type=argparse_filestring_type,
|
switches |
0:0e018d759a2a
|
1759
|
help='Points to file with MUTs specification (overwrites settings.py and private_settings.py)')
|
switches |
0:0e018d759a2a
|
1760
|
|
switches |
0:0e018d759a2a
|
1761
|
parser.add_argument("-j", "--jobs",
|
switches |
0:0e018d759a2a
|
1762
|
dest='jobs',
|
switches |
0:0e018d759a2a
|
1763
|
metavar="NUMBER",
|
switches |
0:0e018d759a2a
|
1764
|
type=int,
|
switches |
0:0e018d759a2a
|
1765
|
help="Define number of compilation jobs. Default value is 1")
|
switches |
0:0e018d759a2a
|
1766
|
|
switches |
0:0e018d759a2a
|
1767
|
if get_module_avail('mbed_lstools'):
|
switches |
0:0e018d759a2a
|
1768
|
# Additional features available when mbed_lstools is installed on host and imported
|
switches |
0:0e018d759a2a
|
1769
|
# mbed_lstools allow users to detect connected to host mbed-enabled devices
|
switches |
0:0e018d759a2a
|
1770
|
parser.add_argument('--auto',
|
switches |
0:0e018d759a2a
|
1771
|
dest='auto_detect',
|
switches |
0:0e018d759a2a
|
1772
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1773
|
help='Use mbed-ls module to detect all connected mbed devices')
|
switches |
0:0e018d759a2a
|
1774
|
|
switches |
0:0e018d759a2a
|
1775
|
toolchain_list = list(TOOLCHAINS) + ["DEFAULT", "ALL"]
|
switches |
0:0e018d759a2a
|
1776
|
parser.add_argument('--tc',
|
switches |
0:0e018d759a2a
|
1777
|
dest='toolchains_filter',
|
switches |
0:0e018d759a2a
|
1778
|
type=argparse_many(argparse_uppercase_type(toolchain_list, "toolchains")),
|
switches |
0:0e018d759a2a
|
1779
|
help="Toolchain filter for --auto argument. Use toolchains names separated by comma, 'default' or 'all' to select toolchains")
|
switches |
0:0e018d759a2a
|
1780
|
|
switches |
0:0e018d759a2a
|
1781
|
test_scopes = ','.join(["'%s'" % n for n in get_available_oper_test_scopes()])
|
switches |
0:0e018d759a2a
|
1782
|
parser.add_argument('--oper',
|
switches |
0:0e018d759a2a
|
1783
|
dest='operability_checks',
|
switches |
0:0e018d759a2a
|
1784
|
type=argparse_lowercase_type(get_available_oper_test_scopes(), "scopes"),
|
switches |
0:0e018d759a2a
|
1785
|
help='Perform interoperability tests between host and connected mbed devices. Available test scopes are: %s' % test_scopes)
|
switches |
0:0e018d759a2a
|
1786
|
|
switches |
0:0e018d759a2a
|
1787
|
parser.add_argument('--clean',
|
switches |
0:0e018d759a2a
|
1788
|
dest='clean',
|
switches |
0:0e018d759a2a
|
1789
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1790
|
help='Clean the build directory')
|
switches |
0:0e018d759a2a
|
1791
|
|
switches |
0:0e018d759a2a
|
1792
|
parser.add_argument('-P', '--only-peripherals',
|
switches |
0:0e018d759a2a
|
1793
|
dest='test_only_peripheral',
|
switches |
0:0e018d759a2a
|
1794
|
default=False,
|
switches |
0:0e018d759a2a
|
1795
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1796
|
help='Test only peripheral declared for MUT and skip common tests')
|
switches |
0:0e018d759a2a
|
1797
|
|
switches |
0:0e018d759a2a
|
1798
|
parser.add_argument("--profile", dest="profile", action="append",
|
switches |
0:0e018d759a2a
|
1799
|
type=argparse_filestring_type,
|
switches |
0:0e018d759a2a
|
1800
|
default=[])
|
switches |
0:0e018d759a2a
|
1801
|
|
switches |
0:0e018d759a2a
|
1802
|
parser.add_argument('-C', '--only-commons',
|
switches |
0:0e018d759a2a
|
1803
|
dest='test_only_common',
|
switches |
0:0e018d759a2a
|
1804
|
default=False,
|
switches |
0:0e018d759a2a
|
1805
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1806
|
help='Test only board internals. Skip perpherials tests and perform common tests')
|
switches |
0:0e018d759a2a
|
1807
|
|
switches |
0:0e018d759a2a
|
1808
|
parser.add_argument('-n', '--test-by-names',
|
switches |
0:0e018d759a2a
|
1809
|
dest='test_by_names',
|
switches |
0:0e018d759a2a
|
1810
|
type=argparse_many(str),
|
switches |
0:0e018d759a2a
|
1811
|
help='Runs only test enumerated it this switch. Use comma to separate test case names')
|
switches |
0:0e018d759a2a
|
1812
|
|
switches |
0:0e018d759a2a
|
1813
|
parser.add_argument('-p', '--peripheral-by-names',
|
switches |
0:0e018d759a2a
|
1814
|
dest='peripheral_by_names',
|
switches |
0:0e018d759a2a
|
1815
|
type=argparse_many(str),
|
switches |
0:0e018d759a2a
|
1816
|
help='Forces discovery of particular peripherals. Use comma to separate peripheral names')
|
switches |
0:0e018d759a2a
|
1817
|
|
switches |
0:0e018d759a2a
|
1818
|
copy_methods = host_tests_plugins.get_plugin_caps('CopyMethod')
|
switches |
0:0e018d759a2a
|
1819
|
copy_methods_str = "Plugin support: " + ', '.join(copy_methods)
|
switches |
0:0e018d759a2a
|
1820
|
|
switches |
0:0e018d759a2a
|
1821
|
parser.add_argument('-c', '--copy-method',
|
switches |
0:0e018d759a2a
|
1822
|
dest='copy_method',
|
switches |
0:0e018d759a2a
|
1823
|
type=argparse_uppercase_type(copy_methods, "flash method"),
|
switches |
0:0e018d759a2a
|
1824
|
help="Select binary copy (flash) method. Default is Python's shutil.copy() method. %s"% copy_methods_str)
|
switches |
0:0e018d759a2a
|
1825
|
|
switches |
0:0e018d759a2a
|
1826
|
reset_methods = host_tests_plugins.get_plugin_caps('ResetMethod')
|
switches |
0:0e018d759a2a
|
1827
|
reset_methods_str = "Plugin support: " + ', '.join(reset_methods)
|
switches |
0:0e018d759a2a
|
1828
|
|
switches |
0:0e018d759a2a
|
1829
|
parser.add_argument('-r', '--reset-type',
|
switches |
0:0e018d759a2a
|
1830
|
dest='mut_reset_type',
|
switches |
0:0e018d759a2a
|
1831
|
default=None,
|
switches |
0:0e018d759a2a
|
1832
|
type=argparse_uppercase_type(reset_methods, "reset method"),
|
switches |
0:0e018d759a2a
|
1833
|
help='Extra reset method used to reset MUT by host test script. %s'% reset_methods_str)
|
switches |
0:0e018d759a2a
|
1834
|
|
switches |
0:0e018d759a2a
|
1835
|
parser.add_argument('-g', '--goanna-for-tests',
|
switches |
0:0e018d759a2a
|
1836
|
dest='goanna_for_tests',
|
switches |
0:0e018d759a2a
|
1837
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1838
|
help='Run Goanna static analyse tool for tests. (Project will be rebuilded)')
|
switches |
0:0e018d759a2a
|
1839
|
|
switches |
0:0e018d759a2a
|
1840
|
parser.add_argument('-G', '--goanna-for-sdk',
|
switches |
0:0e018d759a2a
|
1841
|
dest='goanna_for_mbed_sdk',
|
switches |
0:0e018d759a2a
|
1842
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1843
|
help='Run Goanna static analyse tool for mbed SDK (Project will be rebuilded)')
|
switches |
0:0e018d759a2a
|
1844
|
|
switches |
0:0e018d759a2a
|
1845
|
parser.add_argument('-s', '--suppress-summary',
|
switches |
0:0e018d759a2a
|
1846
|
dest='suppress_summary',
|
switches |
0:0e018d759a2a
|
1847
|
default=False,
|
switches |
0:0e018d759a2a
|
1848
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1849
|
help='Suppresses display of wellformatted table with test results')
|
switches |
0:0e018d759a2a
|
1850
|
|
switches |
0:0e018d759a2a
|
1851
|
parser.add_argument('-t', '--test-summary',
|
switches |
0:0e018d759a2a
|
1852
|
dest='test_x_toolchain_summary',
|
switches |
0:0e018d759a2a
|
1853
|
default=False,
|
switches |
0:0e018d759a2a
|
1854
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1855
|
help='Displays wellformatted table with test x toolchain test result per target')
|
switches |
0:0e018d759a2a
|
1856
|
|
switches |
0:0e018d759a2a
|
1857
|
parser.add_argument('-A', '--test-automation-report',
|
switches |
0:0e018d759a2a
|
1858
|
dest='test_automation_report',
|
switches |
0:0e018d759a2a
|
1859
|
default=False,
|
switches |
0:0e018d759a2a
|
1860
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1861
|
help='Prints information about all tests and exits')
|
switches |
0:0e018d759a2a
|
1862
|
|
switches |
0:0e018d759a2a
|
1863
|
parser.add_argument('-R', '--test-case-report',
|
switches |
0:0e018d759a2a
|
1864
|
dest='test_case_report',
|
switches |
0:0e018d759a2a
|
1865
|
default=False,
|
switches |
0:0e018d759a2a
|
1866
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1867
|
help='Prints information about all test cases and exits')
|
switches |
0:0e018d759a2a
|
1868
|
|
switches |
0:0e018d759a2a
|
1869
|
parser.add_argument("-S", "--supported-toolchains",
|
switches |
0:0e018d759a2a
|
1870
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1871
|
dest="supported_toolchains",
|
switches |
0:0e018d759a2a
|
1872
|
default=False,
|
switches |
0:0e018d759a2a
|
1873
|
help="Displays supported matrix of MCUs and toolchains")
|
switches |
0:0e018d759a2a
|
1874
|
|
switches |
0:0e018d759a2a
|
1875
|
parser.add_argument("-O", "--only-build",
|
switches |
0:0e018d759a2a
|
1876
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1877
|
dest="only_build_tests",
|
switches |
0:0e018d759a2a
|
1878
|
default=False,
|
switches |
0:0e018d759a2a
|
1879
|
help="Only build tests, skips actual test procedures (flashing etc.)")
|
switches |
0:0e018d759a2a
|
1880
|
|
switches |
0:0e018d759a2a
|
1881
|
parser.add_argument('--parallel',
|
switches |
0:0e018d759a2a
|
1882
|
dest='parallel_test_exec',
|
switches |
0:0e018d759a2a
|
1883
|
default=False,
|
switches |
0:0e018d759a2a
|
1884
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1885
|
help='Experimental, you execute test runners for connected to your host MUTs in parallel (speeds up test result collection)')
|
switches |
0:0e018d759a2a
|
1886
|
|
switches |
0:0e018d759a2a
|
1887
|
parser.add_argument('--config',
|
switches |
0:0e018d759a2a
|
1888
|
dest='verbose_test_configuration_only',
|
switches |
0:0e018d759a2a
|
1889
|
default=False,
|
switches |
0:0e018d759a2a
|
1890
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1891
|
help='Displays full test specification and MUTs configration and exits')
|
switches |
0:0e018d759a2a
|
1892
|
|
switches |
0:0e018d759a2a
|
1893
|
parser.add_argument('--loops',
|
switches |
0:0e018d759a2a
|
1894
|
dest='test_loops_list',
|
switches |
0:0e018d759a2a
|
1895
|
type=argparse_many(str),
|
switches |
0:0e018d759a2a
|
1896
|
help='Set no. of loops per test. Format: TEST_1=1,TEST_2=2,TEST_3=3')
|
switches |
0:0e018d759a2a
|
1897
|
|
switches |
0:0e018d759a2a
|
1898
|
parser.add_argument('--global-loops',
|
switches |
0:0e018d759a2a
|
1899
|
dest='test_global_loops_value',
|
switches |
0:0e018d759a2a
|
1900
|
type=int,
|
switches |
0:0e018d759a2a
|
1901
|
help='Set global number of test loops per test. Default value is set 1')
|
switches |
0:0e018d759a2a
|
1902
|
|
switches |
0:0e018d759a2a
|
1903
|
parser.add_argument('--consolidate-waterfall',
|
switches |
0:0e018d759a2a
|
1904
|
dest='consolidate_waterfall_test',
|
switches |
0:0e018d759a2a
|
1905
|
default=False,
|
switches |
0:0e018d759a2a
|
1906
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1907
|
help='Used with --waterfall argument. Adds only one test to report reflecting outcome of waterfall test.')
|
switches |
0:0e018d759a2a
|
1908
|
|
switches |
0:0e018d759a2a
|
1909
|
parser.add_argument('-W', '--waterfall',
|
switches |
0:0e018d759a2a
|
1910
|
dest='waterfall_test',
|
switches |
0:0e018d759a2a
|
1911
|
default=False,
|
switches |
0:0e018d759a2a
|
1912
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1913
|
help='Used with --loops or --global-loops arguments. Tests until OK result occurs and assumes test passed')
|
switches |
0:0e018d759a2a
|
1914
|
|
switches |
0:0e018d759a2a
|
1915
|
parser.add_argument('-N', '--firmware-name',
|
switches |
0:0e018d759a2a
|
1916
|
dest='firmware_global_name',
|
switches |
0:0e018d759a2a
|
1917
|
help='Set global name for all produced projects. Note, proper file extension will be added by buid scripts')
|
switches |
0:0e018d759a2a
|
1918
|
|
switches |
0:0e018d759a2a
|
1919
|
parser.add_argument('-u', '--shuffle',
|
switches |
0:0e018d759a2a
|
1920
|
dest='shuffle_test_order',
|
switches |
0:0e018d759a2a
|
1921
|
default=False,
|
switches |
0:0e018d759a2a
|
1922
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1923
|
help='Shuffles test execution order')
|
switches |
0:0e018d759a2a
|
1924
|
|
switches |
0:0e018d759a2a
|
1925
|
parser.add_argument('--shuffle-seed',
|
switches |
0:0e018d759a2a
|
1926
|
dest='shuffle_test_seed',
|
switches |
0:0e018d759a2a
|
1927
|
default=None,
|
switches |
0:0e018d759a2a
|
1928
|
help='Shuffle seed (If you want to reproduce your shuffle order please use seed provided in test summary)')
|
switches |
0:0e018d759a2a
|
1929
|
|
switches |
0:0e018d759a2a
|
1930
|
parser.add_argument('-f', '--filter',
|
switches |
0:0e018d759a2a
|
1931
|
dest='general_filter_regex',
|
switches |
0:0e018d759a2a
|
1932
|
type=argparse_many(str),
|
switches |
0:0e018d759a2a
|
1933
|
default=None,
|
switches |
0:0e018d759a2a
|
1934
|
help='For some commands you can use filter to filter out results')
|
switches |
0:0e018d759a2a
|
1935
|
|
switches |
0:0e018d759a2a
|
1936
|
parser.add_argument('--inc-timeout',
|
switches |
0:0e018d759a2a
|
1937
|
dest='extend_test_timeout',
|
switches |
0:0e018d759a2a
|
1938
|
metavar="NUMBER",
|
switches |
0:0e018d759a2a
|
1939
|
type=int,
|
switches |
0:0e018d759a2a
|
1940
|
help='You can increase global timeout for each test by specifying additional test timeout in seconds')
|
switches |
0:0e018d759a2a
|
1941
|
|
switches |
0:0e018d759a2a
|
1942
|
parser.add_argument('--db',
|
switches |
0:0e018d759a2a
|
1943
|
dest='db_url',
|
switches |
0:0e018d759a2a
|
1944
|
help='This specifies what database test suite uses to store its state. To pass DB connection info use database connection string. Example: \'mysql://username:password@127.0.0.1/db_name\'')
|
switches |
0:0e018d759a2a
|
1945
|
|
switches |
0:0e018d759a2a
|
1946
|
parser.add_argument('-l', '--log',
|
switches |
0:0e018d759a2a
|
1947
|
dest='log_file_name',
|
switches |
0:0e018d759a2a
|
1948
|
help='Log events to external file (note not all console entries may be visible in log file)')
|
switches |
0:0e018d759a2a
|
1949
|
|
switches |
0:0e018d759a2a
|
1950
|
parser.add_argument('--report-html',
|
switches |
0:0e018d759a2a
|
1951
|
dest='report_html_file_name',
|
switches |
0:0e018d759a2a
|
1952
|
help='You can log test suite results in form of HTML report')
|
switches |
0:0e018d759a2a
|
1953
|
|
switches |
0:0e018d759a2a
|
1954
|
parser.add_argument('--report-junit',
|
switches |
0:0e018d759a2a
|
1955
|
dest='report_junit_file_name',
|
switches |
0:0e018d759a2a
|
1956
|
help='You can log test suite results in form of JUnit compliant XML report')
|
switches |
0:0e018d759a2a
|
1957
|
|
switches |
0:0e018d759a2a
|
1958
|
parser.add_argument("--report-build",
|
switches |
0:0e018d759a2a
|
1959
|
dest="report_build_file_name",
|
switches |
0:0e018d759a2a
|
1960
|
help="Output the build results to a junit xml file")
|
switches |
0:0e018d759a2a
|
1961
|
|
switches |
0:0e018d759a2a
|
1962
|
parser.add_argument("--report-text",
|
switches |
0:0e018d759a2a
|
1963
|
dest="report_text_file_name",
|
switches |
0:0e018d759a2a
|
1964
|
help="Output the build results to a text file")
|
switches |
0:0e018d759a2a
|
1965
|
|
switches |
0:0e018d759a2a
|
1966
|
parser.add_argument('--verbose-skipped',
|
switches |
0:0e018d759a2a
|
1967
|
dest='verbose_skipped_tests',
|
switches |
0:0e018d759a2a
|
1968
|
default=False,
|
switches |
0:0e018d759a2a
|
1969
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1970
|
help='Prints some extra information about skipped tests')
|
switches |
0:0e018d759a2a
|
1971
|
|
switches |
0:0e018d759a2a
|
1972
|
parser.add_argument('-V', '--verbose-test-result',
|
switches |
0:0e018d759a2a
|
1973
|
dest='verbose_test_result_only',
|
switches |
0:0e018d759a2a
|
1974
|
default=False,
|
switches |
0:0e018d759a2a
|
1975
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1976
|
help='Prints test serial output')
|
switches |
0:0e018d759a2a
|
1977
|
|
switches |
0:0e018d759a2a
|
1978
|
parser.add_argument('-v', '--verbose',
|
switches |
0:0e018d759a2a
|
1979
|
dest='verbose',
|
switches |
0:0e018d759a2a
|
1980
|
default=False,
|
switches |
0:0e018d759a2a
|
1981
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1982
|
help='Verbose mode (prints some extra information)')
|
switches |
0:0e018d759a2a
|
1983
|
|
switches |
0:0e018d759a2a
|
1984
|
parser.add_argument('--version',
|
switches |
0:0e018d759a2a
|
1985
|
dest='version',
|
switches |
0:0e018d759a2a
|
1986
|
default=False,
|
switches |
0:0e018d759a2a
|
1987
|
action="store_true",
|
switches |
0:0e018d759a2a
|
1988
|
help='Prints script version and exits')
|
switches |
0:0e018d759a2a
|
1989
|
return parser
|
switches |
0:0e018d759a2a
|
1990
|
|
switches |
0:0e018d759a2a
|
1991
|
def test_path_to_name(path, base):
|
switches |
0:0e018d759a2a
|
1992
|
"""Change all slashes in a path into hyphens
|
switches |
0:0e018d759a2a
|
1993
|
This creates a unique cross-platform test name based on the path
|
switches |
0:0e018d759a2a
|
1994
|
This can eventually be overriden by a to-be-determined meta-data mechanism"""
|
switches |
0:0e018d759a2a
|
1995
|
name_parts = []
|
switches |
0:0e018d759a2a
|
1996
|
head, tail = os.path.split(relpath(path,base))
|
switches |
0:0e018d759a2a
|
1997
|
while (tail and tail != "."):
|
switches |
0:0e018d759a2a
|
1998
|
name_parts.insert(0, tail)
|
switches |
0:0e018d759a2a
|
1999
|
head, tail = os.path.split(head)
|
switches |
0:0e018d759a2a
|
2000
|
|
switches |
0:0e018d759a2a
|
2001
|
return "-".join(name_parts).lower()
|
switches |
0:0e018d759a2a
|
2002
|
|
switches |
0:0e018d759a2a
|
2003
|
def find_tests(base_dir, target_name, toolchain_name, app_config=None):
|
switches |
0:0e018d759a2a
|
2004
|
""" Finds all tests in a directory recursively
|
switches |
0:0e018d759a2a
|
2005
|
base_dir: path to the directory to scan for tests (ex. 'path/to/project')
|
switches |
0:0e018d759a2a
|
2006
|
target_name: name of the target to use for scanning (ex. 'K64F')
|
switches |
0:0e018d759a2a
|
2007
|
toolchain_name: name of the toolchain to use for scanning (ex. 'GCC_ARM')
|
switches |
0:0e018d759a2a
|
2008
|
options: Compile options to pass to the toolchain (ex. ['debug-info'])
|
switches |
0:0e018d759a2a
|
2009
|
app_config - location of a chosen mbed_app.json file
|
switches |
0:0e018d759a2a
|
2010
|
"""
|
switches |
0:0e018d759a2a
|
2011
|
|
switches |
0:0e018d759a2a
|
2012
|
tests = {}
|
switches |
0:0e018d759a2a
|
2013
|
|
switches |
0:0e018d759a2a
|
2014
|
# Prepare the toolchain
|
switches |
0:0e018d759a2a
|
2015
|
toolchain = prepare_toolchain([base_dir], target_name, toolchain_name,
|
switches |
0:0e018d759a2a
|
2016
|
silent=True, app_config=app_config)
|
switches |
0:0e018d759a2a
|
2017
|
|
switches |
0:0e018d759a2a
|
2018
|
# Scan the directory for paths to probe for 'TESTS' folders
|
switches |
0:0e018d759a2a
|
2019
|
base_resources = scan_resources([base_dir], toolchain)
|
switches |
0:0e018d759a2a
|
2020
|
|
switches |
0:0e018d759a2a
|
2021
|
dirs = base_resources.inc_dirs
|
switches |
0:0e018d759a2a
|
2022
|
for directory in dirs:
|
switches |
0:0e018d759a2a
|
2023
|
subdirs = os.listdir(directory)
|
switches |
0:0e018d759a2a
|
2024
|
|
switches |
0:0e018d759a2a
|
2025
|
# If the directory contains a subdirectory called 'TESTS', scan it for test cases
|
switches |
0:0e018d759a2a
|
2026
|
if 'TESTS' in subdirs:
|
switches |
0:0e018d759a2a
|
2027
|
walk_base_dir = join(directory, 'TESTS')
|
switches |
0:0e018d759a2a
|
2028
|
test_resources = toolchain.scan_resources(walk_base_dir, base_path=base_dir)
|
switches |
0:0e018d759a2a
|
2029
|
|
switches |
0:0e018d759a2a
|
2030
|
# Loop through all subdirectories
|
switches |
0:0e018d759a2a
|
2031
|
for d in test_resources.inc_dirs:
|
switches |
0:0e018d759a2a
|
2032
|
|
switches |
0:0e018d759a2a
|
2033
|
# If the test case folder is not called 'host_tests' and it is
|
switches |
0:0e018d759a2a
|
2034
|
# located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
|
switches |
0:0e018d759a2a
|
2035
|
# then add it to the tests
|
switches |
0:0e018d759a2a
|
2036
|
path_depth = get_path_depth(relpath(d, walk_base_dir))
|
switches |
0:0e018d759a2a
|
2037
|
if path_depth == 2:
|
switches |
0:0e018d759a2a
|
2038
|
test_group_directory_path, test_case_directory = os.path.split(d)
|
switches |
0:0e018d759a2a
|
2039
|
test_group_directory = os.path.basename(test_group_directory_path)
|
switches |
0:0e018d759a2a
|
2040
|
|
switches |
0:0e018d759a2a
|
2041
|
# Check to make sure discoverd folder is not in a host test directory
|
switches |
0:0e018d759a2a
|
2042
|
if test_case_directory != 'host_tests' and test_group_directory != 'host_tests':
|
switches |
0:0e018d759a2a
|
2043
|
test_name = test_path_to_name(d, base_dir)
|
switches |
0:0e018d759a2a
|
2044
|
tests[test_name] = d
|
switches |
0:0e018d759a2a
|
2045
|
|
switches |
0:0e018d759a2a
|
2046
|
return tests
|
switches |
0:0e018d759a2a
|
2047
|
|
switches |
0:0e018d759a2a
|
2048
|
def print_tests(tests, format="list", sort=True):
|
switches |
0:0e018d759a2a
|
2049
|
"""Given a dictionary of tests (as returned from "find_tests"), print them
|
switches |
0:0e018d759a2a
|
2050
|
in the specified format"""
|
switches |
0:0e018d759a2a
|
2051
|
if format == "list":
|
switches |
0:0e018d759a2a
|
2052
|
for test_name in sorted(tests.keys()):
|
switches |
0:0e018d759a2a
|
2053
|
test_path = tests[test_name]
|
switches |
0:0e018d759a2a
|
2054
|
print "Test Case:"
|
switches |
0:0e018d759a2a
|
2055
|
print " Name: %s" % test_name
|
switches |
0:0e018d759a2a
|
2056
|
print " Path: %s" % test_path
|
switches |
0:0e018d759a2a
|
2057
|
elif format == "json":
|
switches |
0:0e018d759a2a
|
2058
|
print json.dumps(tests, indent=2)
|
switches |
0:0e018d759a2a
|
2059
|
else:
|
switches |
0:0e018d759a2a
|
2060
|
print "Unknown format '%s'" % format
|
switches |
0:0e018d759a2a
|
2061
|
sys.exit(1)
|
switches |
0:0e018d759a2a
|
2062
|
|
switches |
0:0e018d759a2a
|
2063
|
def norm_relative_path(path, start):
|
switches |
0:0e018d759a2a
|
2064
|
"""This function will create a normalized, relative path. It mimics the
|
switches |
0:0e018d759a2a
|
2065
|
python os.path.relpath function, but also normalizes a Windows-syle path
|
switches |
0:0e018d759a2a
|
2066
|
that use backslashes to a Unix style path that uses forward slashes."""
|
switches |
0:0e018d759a2a
|
2067
|
path = os.path.normpath(path)
|
switches |
0:0e018d759a2a
|
2068
|
path = os.path.relpath(path, start)
|
switches |
0:0e018d759a2a
|
2069
|
path = path.replace("\\", "/")
|
switches |
0:0e018d759a2a
|
2070
|
return path
|
switches |
0:0e018d759a2a
|
2071
|
|
switches |
0:0e018d759a2a
|
2072
|
|
switches |
0:0e018d759a2a
|
2073
|
def build_test_worker(*args, **kwargs):
|
switches |
0:0e018d759a2a
|
2074
|
"""This is a worker function for the parallel building of tests. The `args`
|
switches |
0:0e018d759a2a
|
2075
|
and `kwargs` are passed directly to `build_project`. It returns a dictionary
|
switches |
0:0e018d759a2a
|
2076
|
with the following structure:
|
switches |
0:0e018d759a2a
|
2077
|
|
switches |
0:0e018d759a2a
|
2078
|
{
|
switches |
0:0e018d759a2a
|
2079
|
'result': `True` if no exceptions were thrown, `False` otherwise
|
switches |
0:0e018d759a2a
|
2080
|
'reason': Instance of exception that was thrown on failure
|
switches |
0:0e018d759a2a
|
2081
|
'bin_file': Path to the created binary if `build_project` was
|
switches |
0:0e018d759a2a
|
2082
|
successful. Not present otherwise
|
switches |
0:0e018d759a2a
|
2083
|
'kwargs': The keyword arguments that were passed to `build_project`.
|
switches |
0:0e018d759a2a
|
2084
|
This includes arguments that were modified (ex. report)
|
switches |
0:0e018d759a2a
|
2085
|
}
|
switches |
0:0e018d759a2a
|
2086
|
"""
|
switches |
0:0e018d759a2a
|
2087
|
bin_file = None
|
switches |
0:0e018d759a2a
|
2088
|
ret = {
|
switches |
0:0e018d759a2a
|
2089
|
'result': False,
|
switches |
0:0e018d759a2a
|
2090
|
'args': args,
|
switches |
0:0e018d759a2a
|
2091
|
'kwargs': kwargs
|
switches |
0:0e018d759a2a
|
2092
|
}
|
switches |
0:0e018d759a2a
|
2093
|
|
switches |
0:0e018d759a2a
|
2094
|
try:
|
switches |
0:0e018d759a2a
|
2095
|
bin_file = build_project(*args, **kwargs)
|
switches |
0:0e018d759a2a
|
2096
|
ret['result'] = True
|
switches |
0:0e018d759a2a
|
2097
|
ret['bin_file'] = bin_file
|
switches |
0:0e018d759a2a
|
2098
|
ret['kwargs'] = kwargs
|
switches |
0:0e018d759a2a
|
2099
|
|
switches |
0:0e018d759a2a
|
2100
|
except NotSupportedException, e:
|
switches |
0:0e018d759a2a
|
2101
|
ret['reason'] = e
|
switches |
0:0e018d759a2a
|
2102
|
except ToolException, e:
|
switches |
0:0e018d759a2a
|
2103
|
ret['reason'] = e
|
switches |
0:0e018d759a2a
|
2104
|
except KeyboardInterrupt, e:
|
switches |
0:0e018d759a2a
|
2105
|
ret['reason'] = e
|
switches |
0:0e018d759a2a
|
2106
|
except:
|
switches |
0:0e018d759a2a
|
2107
|
# Print unhandled exceptions here
|
switches |
0:0e018d759a2a
|
2108
|
import traceback
|
switches |
0:0e018d759a2a
|
2109
|
traceback.print_exc(file=sys.stdout)
|
switches |
0:0e018d759a2a
|
2110
|
|
switches |
0:0e018d759a2a
|
2111
|
return ret
|
switches |
0:0e018d759a2a
|
2112
|
|
switches |
0:0e018d759a2a
|
2113
|
|
switches |
0:0e018d759a2a
|
2114
|
def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
|
switches |
0:0e018d759a2a
|
2115
|
clean=False, notify=None, verbose=False, jobs=1, macros=None,
|
switches |
0:0e018d759a2a
|
2116
|
silent=False, report=None, properties=None,
|
switches |
0:0e018d759a2a
|
2117
|
continue_on_build_fail=False, app_config=None,
|
switches |
0:0e018d759a2a
|
2118
|
build_profile=None):
|
switches |
0:0e018d759a2a
|
2119
|
"""Given the data structure from 'find_tests' and the typical build parameters,
|
switches |
0:0e018d759a2a
|
2120
|
build all the tests
|
switches |
0:0e018d759a2a
|
2121
|
|
switches |
0:0e018d759a2a
|
2122
|
Returns a tuple of the build result (True or False) followed by the test
|
switches |
0:0e018d759a2a
|
2123
|
build data structure"""
|
switches |
0:0e018d759a2a
|
2124
|
|
switches |
0:0e018d759a2a
|
2125
|
execution_directory = "."
|
switches |
0:0e018d759a2a
|
2126
|
base_path = norm_relative_path(build_path, execution_directory)
|
switches |
0:0e018d759a2a
|
2127
|
|
switches |
0:0e018d759a2a
|
2128
|
target_name = target if isinstance(target, str) else target.name
|
switches |
0:0e018d759a2a
|
2129
|
|
switches |
0:0e018d759a2a
|
2130
|
test_build = {
|
switches |
0:0e018d759a2a
|
2131
|
"platform": target_name,
|
switches |
0:0e018d759a2a
|
2132
|
"toolchain": toolchain_name,
|
switches |
0:0e018d759a2a
|
2133
|
"base_path": base_path,
|
switches |
0:0e018d759a2a
|
2134
|
"baud_rate": 9600,
|
switches |
0:0e018d759a2a
|
2135
|
"binary_type": "bootable",
|
switches |
0:0e018d759a2a
|
2136
|
"tests": {}
|
switches |
0:0e018d759a2a
|
2137
|
}
|
switches |
0:0e018d759a2a
|
2138
|
|
switches |
0:0e018d759a2a
|
2139
|
result = True
|
switches |
0:0e018d759a2a
|
2140
|
|
switches |
0:0e018d759a2a
|
2141
|
jobs_count = int(jobs if jobs else cpu_count())
|
switches |
0:0e018d759a2a
|
2142
|
p = Pool(processes=jobs_count)
|
switches |
0:0e018d759a2a
|
2143
|
results = []
|
switches |
0:0e018d759a2a
|
2144
|
for test_name, test_path in tests.iteritems():
|
switches |
0:0e018d759a2a
|
2145
|
test_build_path = os.path.join(build_path, test_path)
|
switches |
0:0e018d759a2a
|
2146
|
src_path = base_source_paths + [test_path]
|
switches |
0:0e018d759a2a
|
2147
|
bin_file = None
|
switches |
0:0e018d759a2a
|
2148
|
test_case_folder_name = os.path.basename(test_path)
|
switches |
0:0e018d759a2a
|
2149
|
|
switches |
0:0e018d759a2a
|
2150
|
args = (src_path, test_build_path, target, toolchain_name)
|
switches |
0:0e018d759a2a
|
2151
|
kwargs = {
|
switches |
0:0e018d759a2a
|
2152
|
'jobs': jobs,
|
switches |
0:0e018d759a2a
|
2153
|
'clean': clean,
|
switches |
0:0e018d759a2a
|
2154
|
'macros': macros,
|
switches |
0:0e018d759a2a
|
2155
|
'name': test_case_folder_name,
|
switches |
0:0e018d759a2a
|
2156
|
'project_id': test_name,
|
switches |
0:0e018d759a2a
|
2157
|
'report': report,
|
switches |
0:0e018d759a2a
|
2158
|
'properties': properties,
|
switches |
0:0e018d759a2a
|
2159
|
'verbose': verbose,
|
switches |
0:0e018d759a2a
|
2160
|
'app_config': app_config,
|
switches |
0:0e018d759a2a
|
2161
|
'build_profile': build_profile,
|
switches |
0:0e018d759a2a
|
2162
|
'silent': True
|
switches |
0:0e018d759a2a
|
2163
|
}
|
switches |
0:0e018d759a2a
|
2164
|
|
switches |
0:0e018d759a2a
|
2165
|
results.append(p.apply_async(build_test_worker, args, kwargs))
|
switches |
0:0e018d759a2a
|
2166
|
|
switches |
0:0e018d759a2a
|
2167
|
p.close()
|
switches |
0:0e018d759a2a
|
2168
|
result = True
|
switches |
0:0e018d759a2a
|
2169
|
itr = 0
|
switches |
0:0e018d759a2a
|
2170
|
while len(results):
|
switches |
0:0e018d759a2a
|
2171
|
itr += 1
|
switches |
0:0e018d759a2a
|
2172
|
if itr > 360000:
|
switches |
0:0e018d759a2a
|
2173
|
p.terminate()
|
switches |
0:0e018d759a2a
|
2174
|
p.join()
|
switches |
0:0e018d759a2a
|
2175
|
raise ToolException("Compile did not finish in 10 minutes")
|
switches |
0:0e018d759a2a
|
2176
|
else:
|
switches |
0:0e018d759a2a
|
2177
|
sleep(0.01)
|
switches |
0:0e018d759a2a
|
2178
|
pending = 0
|
switches |
0:0e018d759a2a
|
2179
|
for r in results:
|
switches |
0:0e018d759a2a
|
2180
|
if r.ready() is True:
|
switches |
0:0e018d759a2a
|
2181
|
try:
|
switches |
0:0e018d759a2a
|
2182
|
worker_result = r.get()
|
switches |
0:0e018d759a2a
|
2183
|
results.remove(r)
|
switches |
0:0e018d759a2a
|
2184
|
|
switches |
0:0e018d759a2a
|
2185
|
# Take report from the kwargs and merge it into existing report
|
switches |
0:0e018d759a2a
|
2186
|
report_entry = worker_result['kwargs']['report'][target_name][toolchain_name]
|
switches |
0:0e018d759a2a
|
2187
|
for test_key in report_entry.keys():
|
switches |
0:0e018d759a2a
|
2188
|
report[target_name][toolchain_name][test_key] = report_entry[test_key]
|
switches |
0:0e018d759a2a
|
2189
|
|
switches |
0:0e018d759a2a
|
2190
|
# Set the overall result to a failure if a build failure occurred
|
switches |
0:0e018d759a2a
|
2191
|
if not worker_result['result'] and not isinstance(worker_result['reason'], NotSupportedException):
|
switches |
0:0e018d759a2a
|
2192
|
result = False
|
switches |
0:0e018d759a2a
|
2193
|
break
|
switches |
0:0e018d759a2a
|
2194
|
|
switches |
0:0e018d759a2a
|
2195
|
# Adding binary path to test build result
|
switches |
0:0e018d759a2a
|
2196
|
if worker_result['result'] and 'bin_file' in worker_result:
|
switches |
0:0e018d759a2a
|
2197
|
bin_file = norm_relative_path(worker_result['bin_file'], execution_directory)
|
switches |
0:0e018d759a2a
|
2198
|
|
switches |
0:0e018d759a2a
|
2199
|
test_build['tests'][worker_result['kwargs']['project_id']] = {
|
switches |
0:0e018d759a2a
|
2200
|
"binaries": [
|
switches |
0:0e018d759a2a
|
2201
|
{
|
switches |
0:0e018d759a2a
|
2202
|
"path": bin_file
|
switches |
0:0e018d759a2a
|
2203
|
}
|
switches |
0:0e018d759a2a
|
2204
|
]
|
switches |
0:0e018d759a2a
|
2205
|
}
|
switches |
0:0e018d759a2a
|
2206
|
|
switches |
0:0e018d759a2a
|
2207
|
test_key = worker_result['kwargs']['project_id'].upper()
|
switches |
0:0e018d759a2a
|
2208
|
print report[target_name][toolchain_name][test_key][0][0]['output'].rstrip()
|
switches |
0:0e018d759a2a
|
2209
|
print 'Image: %s\n' % bin_file
|
switches |
0:0e018d759a2a
|
2210
|
|
switches |
0:0e018d759a2a
|
2211
|
except:
|
switches |
0:0e018d759a2a
|
2212
|
if p._taskqueue.queue:
|
switches |
0:0e018d759a2a
|
2213
|
p._taskqueue.queue.clear()
|
switches |
0:0e018d759a2a
|
2214
|
sleep(0.5)
|
switches |
0:0e018d759a2a
|
2215
|
p.terminate()
|
switches |
0:0e018d759a2a
|
2216
|
p.join()
|
switches |
0:0e018d759a2a
|
2217
|
raise
|
switches |
0:0e018d759a2a
|
2218
|
else:
|
switches |
0:0e018d759a2a
|
2219
|
pending += 1
|
switches |
0:0e018d759a2a
|
2220
|
if pending >= jobs_count:
|
switches |
0:0e018d759a2a
|
2221
|
break
|
switches |
0:0e018d759a2a
|
2222
|
|
switches |
0:0e018d759a2a
|
2223
|
# Break as soon as possible if there is a failure and we are not
|
switches |
0:0e018d759a2a
|
2224
|
# continuing on build failures
|
switches |
0:0e018d759a2a
|
2225
|
if not result and not continue_on_build_fail:
|
switches |
0:0e018d759a2a
|
2226
|
if p._taskqueue.queue:
|
switches |
0:0e018d759a2a
|
2227
|
p._taskqueue.queue.clear()
|
switches |
0:0e018d759a2a
|
2228
|
sleep(0.5)
|
switches |
0:0e018d759a2a
|
2229
|
p.terminate()
|
switches |
0:0e018d759a2a
|
2230
|
break
|
switches |
0:0e018d759a2a
|
2231
|
|
switches |
0:0e018d759a2a
|
2232
|
p.join()
|
switches |
0:0e018d759a2a
|
2233
|
|
switches |
0:0e018d759a2a
|
2234
|
test_builds = {}
|
switches |
0:0e018d759a2a
|
2235
|
test_builds["%s-%s" % (target_name, toolchain_name)] = test_build
|
switches |
0:0e018d759a2a
|
2236
|
|
switches |
0:0e018d759a2a
|
2237
|
return result, test_builds
|
switches |
0:0e018d759a2a
|
2238
|
|
switches |
0:0e018d759a2a
|
2239
|
|
switches |
0:0e018d759a2a
|
2240
|
def test_spec_from_test_builds(test_builds):
|
switches |
0:0e018d759a2a
|
2241
|
return {
|
switches |
0:0e018d759a2a
|
2242
|
"builds": test_builds
|
switches |
0:0e018d759a2a
|
2243
|
}
|