Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
build_api.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2016 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import tempfile 00020 from types import ListType 00021 from shutil import rmtree 00022 from os.path import join, exists, dirname, basename, abspath, normpath 00023 from os import linesep, remove 00024 from time import time 00025 00026 from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\ 00027 ToolException, InvalidReleaseTargetException 00028 from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\ 00029 MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\ 00030 MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\ 00031 BUILD_DIR 00032 from tools.targets import TARGET_NAMES, TARGET_MAP 00033 from tools.libraries import Library 00034 from tools.toolchains import TOOLCHAIN_CLASSES 00035 from jinja2 import FileSystemLoader 00036 from jinja2.environment import Environment 00037 from tools.config import Config 00038 00039 RELEASE_VERSIONS = ['2', '5'] 00040 00041 def prep_report (report, target_name, toolchain_name, id_name): 00042 """Setup report keys 00043 00044 Positional arguments: 00045 report - the report to fill 00046 target_name - the target being used 00047 toolchain_name - the toolchain being used 00048 id_name - the name of the executable or library being built 00049 """ 00050 if not target_name in report: 00051 report[target_name] = {} 00052 00053 if not toolchain_name in report[target_name]: 00054 report[target_name][toolchain_name] = {} 00055 00056 if not id_name in report[target_name][toolchain_name]: 00057 report[target_name][toolchain_name][id_name] = [] 00058 00059 def prep_properties (properties, target_name, toolchain_name, vendor_label): 00060 """Setup test properties 00061 00062 Positional arguments: 00063 properties - the dict to fill 00064 target_name - the target the test is targeting 00065 toolchain_name - the toolchain that will compile the test 00066 vendor_label - the vendor 00067 """ 00068 if not target_name in properties: 00069 properties[target_name] = {} 00070 00071 if not toolchain_name in properties[target_name]: 00072 properties[target_name][toolchain_name] = {} 00073 00074 properties[target_name][toolchain_name]["target"] = target_name 00075 properties[target_name][toolchain_name]["vendor"] = vendor_label 00076 properties[target_name][toolchain_name]["toolchain"] = toolchain_name 00077 00078 def create_result (target_name, toolchain_name, id_name, description): 00079 """Create a result dictionary 00080 00081 Positional arguments: 00082 target_name - the target being built for 00083 toolchain_name - the toolchain doing the building 00084 id_name - the name of the executable or library being built 00085 description - a human readable description of what's going on 00086 """ 00087 cur_result = {} 00088 cur_result["target_name"] = target_name 00089 cur_result["toolchain_name"] = toolchain_name 00090 cur_result["id"] = id_name 00091 cur_result["description"] = description 00092 cur_result["elapsed_time"] = 0 00093 cur_result["output"] = "" 00094 00095 return cur_result 00096 00097 def add_result_to_report (report, result): 00098 """Add a single result to a report dictionary 00099 00100 Positional arguments: 00101 report - the report to append to 00102 result - the result to append 00103 """ 00104 target = result["target_name"] 00105 toolchain = result["toolchain_name"] 00106 id_name = result['id'] 00107 result_wrap = {0: result} 00108 report[target][toolchain][id_name].append(result_wrap) 00109 00110 def get_config (src_paths, target, toolchain_name): 00111 """Get the configuration object for a target-toolchain combination 00112 00113 Positional arguments: 00114 src_paths - paths to scan for the configuration files 00115 target - the device we are building for 00116 toolchain_name - the string that identifies the build tools 00117 """ 00118 # Convert src_paths to a list if needed 00119 if type(src_paths) != ListType: 00120 src_paths = [src_paths] 00121 00122 # Pass all params to the unified prepare_resources() 00123 toolchain = prepare_toolchain(src_paths, target, toolchain_name) 00124 00125 # Scan src_path for config files 00126 resources = toolchain.scan_resources(src_paths[0]) 00127 for path in src_paths[1:]: 00128 resources.add(toolchain.scan_resources(path)) 00129 00130 # Update configuration files until added features creates no changes 00131 prev_features = set() 00132 while True: 00133 # Update the configuration with any .json files found while scanning 00134 toolchain.config.add_config_files(resources.json_files) 00135 00136 # Add features while we find new ones 00137 features = toolchain.config.get_features() 00138 if features == prev_features: 00139 break 00140 00141 for feature in features: 00142 if feature in resources.features: 00143 resources += resources.features[feature] 00144 00145 prev_features = features 00146 toolchain.config.validate_config() 00147 00148 cfg, macros = toolchain.config.get_config_data() 00149 features = toolchain.config.get_features() 00150 return cfg, macros, features 00151 00152 def is_official_target (target_name, version): 00153 """ Returns True, None if a target is part of the official release for the 00154 given version. Return False, 'reason' if a target is not part of the 00155 official release for the given version. 00156 00157 Positional arguments: 00158 target_name - Name if the target (ex. 'K64F') 00159 version - The release version string. Should be a string contained within 00160 RELEASE_VERSIONS 00161 """ 00162 00163 result = True 00164 reason = None 00165 target = TARGET_MAP[target_name] 00166 00167 if hasattr(target, 'release_versions') \ 00168 and version in target.release_versions: 00169 if version == '2': 00170 # For version 2, either ARM or uARM toolchain support is required 00171 required_toolchains = set(['ARM', 'uARM']) 00172 00173 if not len(required_toolchains.intersection( 00174 set(target.supported_toolchains))) > 0: 00175 result = False 00176 reason = ("Target '%s' must support " % target.name) + \ 00177 ("one of the folowing toolchains to be included in the") + \ 00178 ((" mbed 2.0 official release: %s" + linesep) % 00179 ", ".join(required_toolchains)) + \ 00180 ("Currently it is only configured to support the ") + \ 00181 ("following toolchains: %s" % 00182 ", ".join(target.supported_toolchains)) 00183 00184 elif version == '5': 00185 # For version 5, ARM, GCC_ARM, and IAR toolchain support is required 00186 required_toolchains = set(['ARM', 'GCC_ARM', 'IAR']) 00187 required_toolchains_sorted = list(required_toolchains) 00188 required_toolchains_sorted.sort() 00189 supported_toolchains = set(target.supported_toolchains) 00190 supported_toolchains_sorted = list(supported_toolchains) 00191 supported_toolchains_sorted.sort() 00192 00193 if not required_toolchains.issubset(supported_toolchains): 00194 result = False 00195 reason = ("Target '%s' must support " % target.name) + \ 00196 ("ALL of the folowing toolchains to be included in the") + \ 00197 ((" mbed OS 5.0 official release: %s" + linesep) % 00198 ", ".join(required_toolchains_sorted)) + \ 00199 ("Currently it is only configured to support the ") + \ 00200 ("following toolchains: %s" % 00201 ", ".join(supported_toolchains_sorted)) 00202 00203 elif not target.default_lib == 'std': 00204 result = False 00205 reason = ("Target '%s' must set the " % target.name) + \ 00206 ("'default_lib' to 'std' to be included in the ") + \ 00207 ("mbed OS 5.0 official release." + linesep) + \ 00208 ("Currently it is set to '%s'" % target.default_lib) 00209 00210 else: 00211 result = False 00212 reason = ("Target '%s' has set an invalid release version of '%s'" % 00213 version) + \ 00214 ("Please choose from the following release versions: %s" % 00215 ', '.join(RELEASE_VERSIONS)) 00216 00217 else: 00218 result = False 00219 if not hasattr(target, 'release_versions'): 00220 reason = "Target '%s' " % target.name 00221 reason += "does not have the 'release_versions' key set" 00222 elif not version in target.release_versions: 00223 reason = "Target '%s' does not contain the version '%s' " % \ 00224 (target.name, version) 00225 reason += "in its 'release_versions' key" 00226 00227 return result, reason 00228 00229 def transform_release_toolchains (toolchains, version): 00230 """ Given a list of toolchains and a release version, return a list of 00231 only the supported toolchains for that release 00232 00233 Positional arguments: 00234 toolchains - The list of toolchains 00235 version - The release version string. Should be a string contained within 00236 RELEASE_VERSIONS 00237 """ 00238 if version == '5': 00239 return ['ARM', 'GCC_ARM', 'IAR'] 00240 else: 00241 return toolchains 00242 00243 00244 def get_mbed_official_release (version): 00245 """ Given a release version string, return a tuple that contains a target 00246 and the supported toolchains for that release. 00247 Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), 00248 ('K64F', ('ARM', 'GCC_ARM')), ...) 00249 00250 Positional arguments: 00251 version - The version string. Should be a string contained within 00252 RELEASE_VERSIONS 00253 """ 00254 00255 mbed_official_release = ( 00256 tuple( 00257 tuple( 00258 [ 00259 TARGET_MAP[target].name, 00260 tuple(transform_release_toolchains( 00261 TARGET_MAP[target].supported_toolchains, version)) 00262 ] 00263 ) for target in TARGET_NAMES \ 00264 if (hasattr(TARGET_MAP[target], 'release_versions') 00265 and version in TARGET_MAP[target].release_versions) 00266 ) 00267 ) 00268 00269 for target in mbed_official_release: 00270 is_official, reason = is_official_target(target[0], version) 00271 00272 if not is_official: 00273 raise InvalidReleaseTargetException(reason) 00274 00275 return mbed_official_release 00276 00277 00278 def prepare_toolchain (src_paths, target, toolchain_name, 00279 macros=None, clean=False, jobs=1, 00280 notify=None, silent=False, verbose=False, 00281 extra_verbose=False, config=None, 00282 app_config=None, build_profile=None): 00283 """ Prepares resource related objects - toolchain, target, config 00284 00285 Positional arguments: 00286 src_paths - the paths to source directories 00287 target - ['LPC1768', 'LPC11U24', 'LPC2368', etc.] 00288 toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] 00289 00290 Keyword arguments: 00291 macros - additional macros 00292 clean - Rebuild everything if True 00293 jobs - how many compilers we can run at once 00294 notify - Notify function for logs 00295 silent - suppress printing of progress indicators 00296 verbose - Write the actual tools command lines used if True 00297 extra_verbose - even more output! 00298 config - a Config object to use instead of creating one 00299 app_config - location of a chosen mbed_app.json file 00300 build_profile - a dict of flags that will be passed to the compiler 00301 """ 00302 00303 # We need to remove all paths which are repeated to avoid 00304 # multiple compilations and linking with the same objects 00305 src_paths = [src_paths[0]] + list(set(src_paths[1:])) 00306 00307 # If the configuration object was not yet created, create it now 00308 config = config or Config(target, src_paths, app_config=app_config) 00309 target = config.target 00310 00311 # Toolchain instance 00312 try: 00313 toolchain = TOOLCHAIN_CLASSES[toolchain_name]( 00314 target, notify, macros, silent, 00315 extra_verbose=extra_verbose, build_profile=build_profile) 00316 except KeyError: 00317 raise KeyError("Toolchain %s not supported" % toolchain_name) 00318 00319 toolchain.config = config 00320 toolchain.jobs = jobs 00321 toolchain.build_all = clean 00322 toolchain.VERBOSE = verbose 00323 00324 return toolchain 00325 00326 def scan_resources (src_paths, toolchain, dependencies_paths=None, 00327 inc_dirs=None, base_path=None): 00328 """ Scan resources using initialized toolcain 00329 00330 Positional arguments 00331 src_paths - the paths to source directories 00332 toolchain - valid toolchain object 00333 dependencies_paths - dependency paths that we should scan for include dirs 00334 inc_dirs - additional include directories which should be added to 00335 the scanner resources 00336 """ 00337 00338 # Scan src_path 00339 resources = toolchain.scan_resources(src_paths[0], base_path=base_path) 00340 for path in src_paths[1:]: 00341 resources.add(toolchain.scan_resources(path, base_path=base_path)) 00342 00343 # Scan dependency paths for include dirs 00344 if dependencies_paths is not None: 00345 for path in dependencies_paths: 00346 lib_resources = toolchain.scan_resources(path) 00347 resources.inc_dirs.extend(lib_resources.inc_dirs) 00348 00349 # Add additional include directories if passed 00350 if inc_dirs: 00351 if type(inc_dirs) == ListType: 00352 resources.inc_dirs.extend(inc_dirs) 00353 else: 00354 resources.inc_dirs.append(inc_dirs) 00355 00356 # Load resources into the config system which might expand/modify resources 00357 # based on config data 00358 resources = toolchain.config.load_resources(resources) 00359 00360 # Set the toolchain's configuration data 00361 toolchain.set_config_data(toolchain.config.get_config_data()) 00362 00363 return resources 00364 00365 def build_project (src_paths, build_path, target, toolchain_name, 00366 libraries_paths=None, linker_script=None, 00367 clean=False, notify=None, verbose=False, name=None, 00368 macros=None, inc_dirs=None, jobs=1, silent=False, 00369 report=None, properties=None, project_id=None, 00370 project_description=None, extra_verbose=False, config=None, 00371 app_config=None, build_profile=None): 00372 """ Build a project. A project may be a test or a user program. 00373 00374 Positional arguments: 00375 src_paths - a path or list of paths that contain all files needed to build 00376 the project 00377 build_path - the directory where all of the object files will be placed 00378 target - the MCU or board that the project will compile for 00379 toolchain_name - the name of the build tools 00380 00381 Keyword arguments: 00382 libraries_paths - The location of libraries to include when linking 00383 linker_script - the file that drives the linker to do it's job 00384 clean - Rebuild everything if True 00385 notify - Notify function for logs 00386 verbose - Write the actual tools command lines used if True 00387 name - the name of the project 00388 macros - additional macros 00389 inc_dirs - additional directories where include files may be found 00390 jobs - how many compilers we can run at once 00391 silent - suppress printing of progress indicators 00392 report - a dict where a result may be appended 00393 properties - UUUUHHHHH beats me 00394 project_id - the name put in the report 00395 project_description - the human-readable version of what this thing does 00396 extra_verbose - even more output! 00397 config - a Config object to use instead of creating one 00398 app_config - location of a chosen mbed_app.json file 00399 build_profile - a dict of flags that will be passed to the compiler 00400 """ 00401 00402 # Convert src_path to a list if needed 00403 if type(src_paths) != ListType: 00404 src_paths = [src_paths] 00405 # Extend src_paths wiht libraries_paths 00406 if libraries_paths is not None: 00407 src_paths.extend(libraries_paths) 00408 inc_dirs.extend(map(dirname, libraries_paths)) 00409 00410 # Build Directory 00411 if clean and exists(build_path): 00412 rmtree(build_path) 00413 mkdir(build_path) 00414 00415 # Pass all params to the unified prepare_toolchain() 00416 toolchain = prepare_toolchain( 00417 src_paths, target, toolchain_name, macros=macros, clean=clean, 00418 jobs=jobs, notify=notify, silent=silent, verbose=verbose, 00419 extra_verbose=extra_verbose, config=config, app_config=app_config, 00420 build_profile=build_profile) 00421 00422 # The first path will give the name to the library 00423 if name is None: 00424 name = basename(normpath(abspath(src_paths[0]))) 00425 toolchain.info("Building project %s (%s, %s)" % 00426 (name, toolchain.target.name, toolchain_name)) 00427 00428 # Initialize reporting 00429 if report != None: 00430 start = time() 00431 # If project_id is specified, use that over the default name 00432 id_name = project_id.upper() if project_id else name.upper() 00433 description = project_description if project_description else name 00434 vendor_label = toolchain.target.extra_labels[0] 00435 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00436 cur_result = create_result(toolchain.target.name, toolchain_name, 00437 id_name, description) 00438 if properties != None: 00439 prep_properties(properties, toolchain.target.name, toolchain_name, 00440 vendor_label) 00441 00442 try: 00443 # Call unified scan_resources 00444 resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) 00445 00446 # Change linker script if specified 00447 if linker_script is not None: 00448 resources.linker_script = linker_script 00449 00450 # Compile Sources 00451 objects = toolchain.compile_sources(resources, build_path, 00452 resources.inc_dirs) 00453 resources.objects.extend(objects) 00454 00455 # Link Program 00456 res, _ = toolchain.link_program(resources, build_path, name) 00457 00458 memap_instance = getattr(toolchain, 'memap_instance', None) 00459 memap_table = '' 00460 if memap_instance: 00461 # Write output to stdout in text (pretty table) format 00462 memap_table = memap_instance.generate_output('table') 00463 00464 if not silent: 00465 print memap_table 00466 00467 # Write output to file in JSON format 00468 map_out = join(build_path, name + "_map.json") 00469 memap_instance.generate_output('json', map_out) 00470 00471 # Write output to file in CSV format for the CI 00472 map_csv = join(build_path, name + "_map.csv") 00473 memap_instance.generate_output('csv-ci', map_csv) 00474 00475 resources.detect_duplicates(toolchain) 00476 00477 if report != None: 00478 end = time() 00479 cur_result["elapsed_time"] = end - start 00480 cur_result["output"] = toolchain.get_output() + memap_table 00481 cur_result["result"] = "OK" 00482 cur_result["memory_usage"] = toolchain.map_outputs 00483 00484 add_result_to_report(report, cur_result) 00485 00486 return res 00487 00488 except Exception as exc: 00489 if report != None: 00490 end = time() 00491 00492 if isinstance(exc, NotSupportedException): 00493 cur_result["result"] = "NOT_SUPPORTED" 00494 else: 00495 cur_result["result"] = "FAIL" 00496 00497 cur_result["elapsed_time"] = end - start 00498 00499 toolchain_output = toolchain.get_output() 00500 if toolchain_output: 00501 cur_result["output"] += toolchain_output 00502 00503 add_result_to_report(report, cur_result) 00504 00505 # Let Exception propagate 00506 raise 00507 00508 def build_library (src_paths, build_path, target, toolchain_name, 00509 dependencies_paths=None, name=None, clean=False, 00510 archive=True, notify=None, verbose=False, macros=None, 00511 inc_dirs=None, jobs=1, silent=False, report=None, 00512 properties=None, extra_verbose=False, project_id=None, 00513 remove_config_header_file=False, app_config=None, 00514 build_profile=None): 00515 """ Build a library 00516 00517 Positional arguments: 00518 src_paths - a path or list of paths that contain all files needed to build 00519 the library 00520 build_path - the directory where all of the object files will be placed 00521 target - the MCU or board that the project will compile for 00522 toolchain_name - the name of the build tools 00523 00524 Keyword arguments: 00525 dependencies_paths - The location of libraries to include when linking 00526 name - the name of the library 00527 clean - Rebuild everything if True 00528 archive - whether the library will create an archive file 00529 notify - Notify function for logs 00530 verbose - Write the actual tools command lines used if True 00531 macros - additional macros 00532 inc_dirs - additional directories where include files may be found 00533 jobs - how many compilers we can run at once 00534 silent - suppress printing of progress indicators 00535 report - a dict where a result may be appended 00536 properties - UUUUHHHHH beats me 00537 extra_verbose - even more output! 00538 project_id - the name that goes in the report 00539 remove_config_header_file - delete config header file when done building 00540 app_config - location of a chosen mbed_app.json file 00541 build_profile - a dict of flags that will be passed to the compiler 00542 """ 00543 00544 # Convert src_path to a list if needed 00545 if type(src_paths) != ListType: 00546 src_paths = [src_paths] 00547 00548 # Build path 00549 if archive: 00550 # Use temp path when building archive 00551 tmp_path = join(build_path, '.temp') 00552 mkdir(tmp_path) 00553 else: 00554 tmp_path = build_path 00555 00556 # Clean the build directory 00557 if clean and exists(tmp_path): 00558 rmtree(tmp_path) 00559 mkdir(tmp_path) 00560 00561 # Pass all params to the unified prepare_toolchain() 00562 toolchain = prepare_toolchain( 00563 src_paths, target, toolchain_name, macros=macros, clean=clean, 00564 jobs=jobs, notify=notify, silent=silent, verbose=verbose, 00565 extra_verbose=extra_verbose, app_config=app_config, 00566 build_profile=build_profile) 00567 00568 # The first path will give the name to the library 00569 if name is None: 00570 name = basename(normpath(abspath(src_paths[0]))) 00571 toolchain.info("Building library %s (%s, %s)" % 00572 (name, toolchain.target.name, toolchain_name)) 00573 00574 # Initialize reporting 00575 if report != None: 00576 start = time() 00577 # If project_id is specified, use that over the default name 00578 id_name = project_id.upper() if project_id else name.upper() 00579 description = name 00580 vendor_label = toolchain.target.extra_labels[0] 00581 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00582 cur_result = create_result(toolchain.target.name, toolchain_name, 00583 id_name, description) 00584 if properties != None: 00585 prep_properties(properties, toolchain.target.name, toolchain_name, 00586 vendor_label) 00587 00588 for src_path in src_paths: 00589 if not exists(src_path): 00590 error_msg = "The library source folder does not exist: %s", src_path 00591 if report != None: 00592 cur_result["output"] = error_msg 00593 cur_result["result"] = "FAIL" 00594 add_result_to_report(report, cur_result) 00595 raise Exception(error_msg) 00596 00597 try: 00598 # Call unified scan_resources 00599 resources = scan_resources(src_paths, toolchain, 00600 dependencies_paths=dependencies_paths, 00601 inc_dirs=inc_dirs) 00602 00603 00604 # Copy headers, objects and static libraries - all files needed for 00605 # static lib 00606 toolchain.copy_files(resources.headers, build_path, resources=resources) 00607 toolchain.copy_files(resources.objects, build_path, resources=resources) 00608 toolchain.copy_files(resources.libraries, build_path, 00609 resources=resources) 00610 toolchain.copy_files(resources.json_files, build_path, 00611 resources=resources) 00612 if resources.linker_script: 00613 toolchain.copy_files(resources.linker_script, build_path, 00614 resources=resources) 00615 00616 if resources.hex_files: 00617 toolchain.copy_files(resources.hex_files, build_path, 00618 resources=resources) 00619 00620 # Compile Sources 00621 objects = toolchain.compile_sources(resources, abspath(tmp_path), 00622 resources.inc_dirs) 00623 resources.objects.extend(objects) 00624 00625 if archive: 00626 toolchain.build_library(objects, build_path, name) 00627 00628 if remove_config_header_file: 00629 config_header_path = toolchain.get_config_header() 00630 if config_header_path: 00631 remove(config_header_path) 00632 00633 if report != None: 00634 end = time() 00635 cur_result["elapsed_time"] = end - start 00636 cur_result["output"] = toolchain.get_output() 00637 cur_result["result"] = "OK" 00638 00639 00640 add_result_to_report(report, cur_result) 00641 return True 00642 00643 except Exception as exc: 00644 if report != None: 00645 end = time() 00646 00647 if isinstance(exc, ToolException): 00648 cur_result["result"] = "FAIL" 00649 elif isinstance(exc, NotSupportedException): 00650 cur_result["result"] = "NOT_SUPPORTED" 00651 00652 cur_result["elapsed_time"] = end - start 00653 00654 toolchain_output = toolchain.get_output() 00655 if toolchain_output: 00656 cur_result["output"] += toolchain_output 00657 00658 add_result_to_report(report, cur_result) 00659 00660 # Let Exception propagate 00661 raise 00662 00663 ###################### 00664 ### Legacy methods ### 00665 ###################### 00666 00667 def build_lib(lib_id, target, toolchain_name, verbose=False, 00668 clean=False, macros=None, notify=None, jobs=1, silent=False, 00669 report=None, properties=None, extra_verbose=False, 00670 build_profile=None): 00671 """ Legacy method for building mbed libraries 00672 00673 Positional arguments: 00674 lib_id - the library's unique identifier 00675 target - the MCU or board that the project will compile for 00676 toolchain_name - the name of the build tools 00677 00678 Keyword arguments: 00679 clean - Rebuild everything if True 00680 verbose - Write the actual tools command lines used if True 00681 macros - additional macros 00682 notify - Notify function for logs 00683 jobs - how many compilers we can run at once 00684 silent - suppress printing of progress indicators 00685 report - a dict where a result may be appended 00686 properties - UUUUHHHHH beats me 00687 extra_verbose - even more output! 00688 build_profile - a dict of flags that will be passed to the compiler 00689 """ 00690 lib = Library(lib_id) 00691 if not lib.is_supported(target, toolchain_name): 00692 print('Library "%s" is not yet supported on target %s with toolchain %s' 00693 % (lib_id, target.name, toolchain_name)) 00694 return False 00695 00696 # We need to combine macros from parameter list with macros from library 00697 # definition 00698 lib_macros = lib.macros if lib.macros else [] 00699 if macros: 00700 macros.extend(lib_macros) 00701 else: 00702 macros = lib_macros 00703 00704 src_paths = lib.source_dir 00705 build_path = lib.build_dir 00706 dependencies_paths = lib.dependencies 00707 inc_dirs = lib.inc_dirs 00708 inc_dirs_ext = lib.inc_dirs_ext 00709 00710 if type(src_paths) != ListType: 00711 src_paths = [src_paths] 00712 00713 # The first path will give the name to the library 00714 name = basename(src_paths[0]) 00715 00716 if report != None: 00717 start = time() 00718 id_name = name.upper() 00719 description = name 00720 vendor_label = target.extra_labels[0] 00721 cur_result = None 00722 prep_report(report, target.name, toolchain_name, id_name) 00723 cur_result = create_result(target.name, toolchain_name, id_name, 00724 description) 00725 00726 if properties != None: 00727 prep_properties(properties, target.name, toolchain_name, 00728 vendor_label) 00729 00730 for src_path in src_paths: 00731 if not exists(src_path): 00732 error_msg = "The library source folder does not exist: %s", src_path 00733 00734 if report != None: 00735 cur_result["output"] = error_msg 00736 cur_result["result"] = "FAIL" 00737 add_result_to_report(report, cur_result) 00738 00739 raise Exception(error_msg) 00740 00741 try: 00742 # Toolchain instance 00743 toolchain = TOOLCHAIN_CLASSES[toolchain_name]( 00744 target, macros=macros, notify=notify, silent=silent, 00745 extra_verbose=extra_verbose, build_profile=build_profile) 00746 toolchain.VERBOSE = verbose 00747 toolchain.jobs = jobs 00748 toolchain.build_all = clean 00749 00750 toolchain.info("Building library %s (%s, %s)" % 00751 (name.upper(), target.name, toolchain_name)) 00752 00753 # Take into account the library configuration (MBED_CONFIG_FILE) 00754 config = Config(target) 00755 toolchain.config = config 00756 config.add_config_files([MBED_CONFIG_FILE]) 00757 00758 # Scan Resources 00759 resources = [] 00760 for src_path in src_paths: 00761 resources.append(toolchain.scan_resources(src_path)) 00762 00763 # Add extra include directories / files which are required by library 00764 # This files usually are not in the same directory as source files so 00765 # previous scan will not include them 00766 if inc_dirs_ext is not None: 00767 for inc_ext in inc_dirs_ext: 00768 resources.append(toolchain.scan_resources(inc_ext)) 00769 00770 # Dependencies Include Paths 00771 dependencies_include_dir = [] 00772 if dependencies_paths is not None: 00773 for path in dependencies_paths: 00774 lib_resources = toolchain.scan_resources(path) 00775 dependencies_include_dir.extend(lib_resources.inc_dirs) 00776 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) 00777 00778 if inc_dirs: 00779 dependencies_include_dir.extend(inc_dirs) 00780 00781 # Add other discovered configuration data to the configuration object 00782 for res in resources: 00783 config.load_resources(res) 00784 toolchain.set_config_data(toolchain.config.get_config_data()) 00785 00786 # Create the desired build directory structure 00787 bin_path = join(build_path, toolchain.obj_path) 00788 mkdir(bin_path) 00789 tmp_path = join(build_path, '.temp', toolchain.obj_path) 00790 mkdir(tmp_path) 00791 00792 # Copy Headers 00793 for resource in resources: 00794 toolchain.copy_files(resource.headers, build_path, 00795 resources=resource) 00796 00797 dependencies_include_dir.extend( 00798 toolchain.scan_resources(build_path).inc_dirs) 00799 00800 # Compile Sources 00801 objects = [] 00802 for resource in resources: 00803 objects.extend(toolchain.compile_sources(resource, tmp_path, 00804 dependencies_include_dir)) 00805 00806 needed_update = toolchain.build_library(objects, bin_path, name) 00807 00808 if report != None and needed_update: 00809 end = time() 00810 cur_result["elapsed_time"] = end - start 00811 cur_result["output"] = toolchain.get_output() 00812 cur_result["result"] = "OK" 00813 00814 add_result_to_report(report, cur_result) 00815 return True 00816 00817 except Exception: 00818 if report != None: 00819 end = time() 00820 cur_result["result"] = "FAIL" 00821 cur_result["elapsed_time"] = end - start 00822 00823 toolchain_output = toolchain.get_output() 00824 if toolchain_output: 00825 cur_result["output"] += toolchain_output 00826 00827 add_result_to_report(report, cur_result) 00828 00829 # Let Exception propagate 00830 raise 00831 00832 # We do have unique legacy conventions about how we build and package the mbed 00833 # library 00834 def build_mbed_libs (target, toolchain_name, verbose=False, 00835 clean=False, macros=None, notify=None, jobs=1, silent=False, 00836 report=None, properties=None, extra_verbose=False, 00837 build_profile=None): 00838 """ Function returns True is library was built and false if building was 00839 skipped 00840 00841 Positional arguments: 00842 target - the MCU or board that the project will compile for 00843 toolchain_name - the name of the build tools 00844 00845 Keyword arguments: 00846 verbose - Write the actual tools command lines used if True 00847 clean - Rebuild everything if True 00848 macros - additional macros 00849 notify - Notify function for logs 00850 jobs - how many compilers we can run at once 00851 silent - suppress printing of progress indicators 00852 report - a dict where a result may be appended 00853 properties - UUUUHHHHH beats me 00854 extra_verbose - even more output! 00855 build_profile - a dict of flags that will be passed to the compiler 00856 """ 00857 00858 if report != None: 00859 start = time() 00860 id_name = "MBED" 00861 description = "mbed SDK" 00862 vendor_label = target.extra_labels[0] 00863 cur_result = None 00864 prep_report(report, target.name, toolchain_name, id_name) 00865 cur_result = create_result(target.name, toolchain_name, id_name, 00866 description) 00867 00868 if properties != None: 00869 prep_properties(properties, target.name, toolchain_name, 00870 vendor_label) 00871 00872 # Check toolchain support 00873 if toolchain_name not in target.supported_toolchains: 00874 supported_toolchains_text = ", ".join(target.supported_toolchains) 00875 print('%s target is not yet supported by toolchain %s' % 00876 (target.name, toolchain_name)) 00877 print('%s target supports %s toolchain%s' % 00878 (target.name, supported_toolchains_text, 's' 00879 if len(target.supported_toolchains) > 1 else '')) 00880 00881 if report != None: 00882 cur_result["result"] = "SKIP" 00883 add_result_to_report(report, cur_result) 00884 00885 return False 00886 00887 try: 00888 # Toolchain 00889 toolchain = TOOLCHAIN_CLASSES[toolchain_name]( 00890 target, macros=macros, notify=notify, silent=silent, 00891 extra_verbose=extra_verbose, build_profile=build_profile) 00892 toolchain.VERBOSE = verbose 00893 toolchain.jobs = jobs 00894 toolchain.build_all = clean 00895 00896 # Take into account the library configuration (MBED_CONFIG_FILE) 00897 config = Config(target) 00898 toolchain.config = config 00899 config.add_config_files([MBED_CONFIG_FILE]) 00900 toolchain.set_config_data(toolchain.config.get_config_data()) 00901 00902 # Source and Build Paths 00903 build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) 00904 build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name) 00905 mkdir(build_toolchain) 00906 00907 tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) 00908 mkdir(tmp_path) 00909 00910 # CMSIS 00911 toolchain.info("Building library %s (%s, %s)" % 00912 ('CMSIS', target.name, toolchain_name)) 00913 cmsis_src = MBED_CMSIS_PATH 00914 resources = toolchain.scan_resources(cmsis_src) 00915 00916 toolchain.copy_files(resources.headers, build_target) 00917 toolchain.copy_files(resources.linker_script, build_toolchain) 00918 toolchain.copy_files(resources.bin_files, build_toolchain) 00919 00920 objects = toolchain.compile_sources(resources, tmp_path) 00921 toolchain.copy_files(objects, build_toolchain) 00922 00923 # mbed 00924 toolchain.info("Building library %s (%s, %s)" % 00925 ('MBED', target.name, toolchain_name)) 00926 00927 # Common Headers 00928 toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) 00929 library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] 00930 00931 for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), 00932 (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), 00933 (MBED_HAL, MBED_LIBRARIES_HAL)]: 00934 resources = toolchain.scan_resources(dir) 00935 toolchain.copy_files(resources.headers, dest) 00936 library_incdirs.append(dest) 00937 00938 # Target specific sources 00939 hal_src = MBED_TARGETS_PATH 00940 hal_implementation = toolchain.scan_resources(hal_src) 00941 toolchain.copy_files(hal_implementation.headers + 00942 hal_implementation.hex_files + 00943 hal_implementation.libraries + 00944 [MBED_CONFIG_FILE], 00945 build_target, resources=hal_implementation) 00946 toolchain.copy_files(hal_implementation.linker_script, build_toolchain) 00947 toolchain.copy_files(hal_implementation.bin_files, build_toolchain) 00948 incdirs = toolchain.scan_resources(build_target).inc_dirs 00949 objects = toolchain.compile_sources(hal_implementation, tmp_path, 00950 library_incdirs + incdirs) 00951 toolchain.copy_files(objects, build_toolchain) 00952 00953 # Common Sources 00954 mbed_resources = None 00955 for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: 00956 mbed_resources += toolchain.scan_resources(dir) 00957 00958 objects = toolchain.compile_sources(mbed_resources, tmp_path, 00959 library_incdirs + incdirs) 00960 00961 # A number of compiled files need to be copied as objects as opposed to 00962 # way the linker search for symbols in archives. These are: 00963 # - retarget.o: to make sure that the C standard lib symbols get 00964 # overridden 00965 # - board.o: mbed_die is weak 00966 # - mbed_overrides.o: this contains platform overrides of various 00967 # weak SDK functions 00968 separate_names, separate_objects = ['retarget.o', 'board.o', 00969 'mbed_overrides.o'], [] 00970 00971 for obj in objects: 00972 for name in separate_names: 00973 if obj.endswith(name): 00974 separate_objects.append(obj) 00975 00976 for obj in separate_objects: 00977 objects.remove(obj) 00978 00979 toolchain.build_library(objects, build_toolchain, "mbed") 00980 00981 for obj in separate_objects: 00982 toolchain.copy_files(obj, build_toolchain) 00983 00984 if report != None: 00985 end = time() 00986 cur_result["elapsed_time"] = end - start 00987 cur_result["output"] = toolchain.get_output() 00988 cur_result["result"] = "OK" 00989 00990 add_result_to_report(report, cur_result) 00991 00992 return True 00993 00994 except Exception as exc: 00995 if report != None: 00996 end = time() 00997 cur_result["result"] = "FAIL" 00998 cur_result["elapsed_time"] = end - start 00999 01000 toolchain_output = toolchain.get_output() 01001 if toolchain_output: 01002 cur_result["output"] += toolchain_output 01003 01004 cur_result["output"] += str(exc) 01005 01006 add_result_to_report(report, cur_result) 01007 01008 # Let Exception propagate 01009 raise 01010 01011 01012 def get_unique_supported_toolchains (release_targets=None): 01013 """ Get list of all unique toolchains supported by targets 01014 01015 Keyword arguments: 01016 release_targets - tuple structure returned from get_mbed_official_release(). 01017 If release_targets is not specified, then it queries all 01018 known targets 01019 """ 01020 unique_supported_toolchains = [] 01021 01022 if not release_targets: 01023 for target in TARGET_NAMES: 01024 for toolchain in TARGET_MAP[target].supported_toolchains: 01025 if toolchain not in unique_supported_toolchains: 01026 unique_supported_toolchains.append(toolchain) 01027 else: 01028 for target in release_targets: 01029 for toolchain in target[1]: 01030 if toolchain not in unique_supported_toolchains: 01031 unique_supported_toolchains.append(toolchain) 01032 01033 return unique_supported_toolchains 01034 01035 01036 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None, 01037 release_version='5'): 01038 """ Shows target map using prettytable 01039 01040 Keyword arguments: 01041 verbose_html - emit html instead of a simple table 01042 platform_filter - remove results that match the string 01043 release_version - get the matrix for this major version number 01044 """ 01045 # Only use it in this function so building works without extra modules 01046 from prettytable import PrettyTable 01047 01048 if isinstance(release_version, basestring): 01049 # Force release_version to lowercase if it is a string 01050 release_version = release_version.lower() 01051 else: 01052 # Otherwise default to printing all known targets and toolchains 01053 release_version = 'all' 01054 01055 01056 version_release_targets = {} 01057 version_release_target_names = {} 01058 01059 for version in RELEASE_VERSIONS: 01060 version_release_targets[version] = get_mbed_official_release(version) 01061 version_release_target_names[version] = [x[0] for x in 01062 version_release_targets[ 01063 version]] 01064 01065 if release_version in RELEASE_VERSIONS: 01066 release_targets = version_release_targets[release_version] 01067 else: 01068 release_targets = None 01069 01070 unique_supported_toolchains = get_unique_supported_toolchains( 01071 release_targets) 01072 prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS] 01073 01074 # All tests status table print 01075 columns = prepend_columns + unique_supported_toolchains 01076 table_printer = PrettyTable(columns) 01077 # Align table 01078 for col in columns: 01079 table_printer.align[col] = "c" 01080 table_printer.align["Target"] = "l" 01081 01082 perm_counter = 0 01083 target_counter = 0 01084 01085 target_names = [] 01086 01087 if release_targets: 01088 target_names = [x[0] for x in release_targets] 01089 else: 01090 target_names = TARGET_NAMES 01091 01092 for target in sorted(target_names): 01093 if platform_filter is not None: 01094 # FIlter out platforms using regex 01095 if re.search(platform_filter, target) is None: 01096 continue 01097 target_counter += 1 01098 01099 row = [target] # First column is platform name 01100 01101 for version in RELEASE_VERSIONS: 01102 if target in version_release_target_names[version]: 01103 text = "Supported" 01104 else: 01105 text = "-" 01106 row.append(text) 01107 01108 for unique_toolchain in unique_supported_toolchains: 01109 if unique_toolchain in TARGET_MAP[target].supported_toolchains: 01110 text = "Supported" 01111 perm_counter += 1 01112 else: 01113 text = "-" 01114 01115 row.append(text) 01116 table_printer.add_row(row) 01117 01118 result = table_printer.get_html_string() if verbose_html \ 01119 else table_printer.get_string() 01120 result += "\n" 01121 result += "Supported targets: %d\n"% (target_counter) 01122 if target_counter == 1: 01123 result += "Supported toolchains: %d"% (perm_counter) 01124 return result 01125 01126 01127 def get_target_supported_toolchains (target): 01128 """ Returns target supported toolchains list 01129 01130 Positional arguments: 01131 target - the target to get the supported toolchains of 01132 """ 01133 return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \ 01134 else None 01135 01136 01137 def static_analysis_scan (target, toolchain_name, cppcheck_cmd, 01138 cppcheck_msg_format, verbose=False, 01139 clean=False, macros=None, notify=None, jobs=1, 01140 extra_verbose=False, build_profile=None): 01141 """Perform static analysis on a target and toolchain combination 01142 01143 Positional arguments: 01144 target - the target to fake the build for 01145 toolchain_name - pretend you would compile with this toolchain 01146 cppcheck_cmd - the command used to do static analysis 01147 cppcheck_msg_format - the format of the check messages 01148 01149 Keyword arguments: 01150 verbose - more printing! 01151 clean - start from a clean slate 01152 macros - extra macros to compile with 01153 notify - the notification event handling function 01154 jobs - number of commands to run at once 01155 extra_verbose - even moar printing 01156 build_profile - a dict of flags that will be passed to the compiler 01157 """ 01158 # Toolchain 01159 toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros, 01160 notify=notify, 01161 extra_verbose=extra_verbose, 01162 build_profile=build_profile) 01163 toolchain.VERBOSE = verbose 01164 toolchain.jobs = jobs 01165 toolchain.build_all = clean 01166 01167 # Source and Build Paths 01168 build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) 01169 build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name) 01170 mkdir(build_toolchain) 01171 01172 tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) 01173 mkdir(tmp_path) 01174 01175 # CMSIS 01176 toolchain.info("Static analysis for %s (%s, %s)" % 01177 ('CMSIS', target.name, toolchain_name)) 01178 cmsis_src = MBED_CMSIS_PATH 01179 resources = toolchain.scan_resources(cmsis_src) 01180 01181 # Copy files before analysis 01182 toolchain.copy_files(resources.headers, build_target) 01183 toolchain.copy_files(resources.linker_script, build_toolchain) 01184 01185 # Gather include paths, c, cpp sources and macros to transfer to cppcheck 01186 # command line 01187 includes = ["-I%s"% i for i in resources.inc_dirs] 01188 includes.append("-I%s"% str(build_target)) 01189 c_sources = " ".join(resources.c_sources) 01190 cpp_sources = " ".join(resources.cpp_sources) 01191 macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros] 01192 01193 includes = [inc.strip() for inc in includes] 01194 macros = [mac.strip() for mac in macros] 01195 01196 check_cmd = cppcheck_cmd 01197 check_cmd += cppcheck_msg_format 01198 check_cmd += includes 01199 check_cmd += macros 01200 01201 # We need to pass some params via file to avoid "command line too long in 01202 # some OSs" 01203 tmp_file = tempfile.NamedTemporaryFile(delete=False) 01204 tmp_file.writelines(line + '\n' for line in c_sources.split()) 01205 tmp_file.writelines(line + '\n' for line in cpp_sources.split()) 01206 tmp_file.close() 01207 check_cmd += ["--file-list=%s"% tmp_file.name] 01208 01209 _stdout, _stderr, _ = run_cmd(check_cmd) 01210 if verbose: 01211 print _stdout 01212 print _stderr 01213 01214 # ========================================================================= 01215 01216 # MBED 01217 toolchain.info("Static analysis for %s (%s, %s)" % 01218 ('MBED', target.name, toolchain_name)) 01219 01220 # Common Headers 01221 toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) 01222 toolchain.copy_files(toolchain.scan_resources(MBED_DRIVERS).headers, 01223 MBED_LIBRARIES) 01224 toolchain.copy_files(toolchain.scan_resources(MBED_PLATFORM).headers, 01225 MBED_LIBRARIES) 01226 toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, 01227 MBED_LIBRARIES) 01228 01229 # Target specific sources 01230 hal_src = join(MBED_TARGETS_PATH, "hal") 01231 hal_implementation = toolchain.scan_resources(hal_src) 01232 01233 # Copy files before analysis 01234 toolchain.copy_files(hal_implementation.headers + 01235 hal_implementation.hex_files, build_target, 01236 resources=hal_implementation) 01237 incdirs = toolchain.scan_resources(build_target) 01238 01239 target_includes = ["-I%s" % i for i in incdirs.inc_dirs] 01240 target_includes.append("-I%s"% str(build_target)) 01241 target_includes.append("-I%s"% str(hal_src)) 01242 target_c_sources = " ".join(incdirs.c_sources) 01243 target_cpp_sources = " ".join(incdirs.cpp_sources) 01244 target_macros = ["-D%s"% s for s in 01245 toolchain.get_symbols() + toolchain.macros] 01246 01247 # Common Sources 01248 mbed_resources = toolchain.scan_resources(MBED_COMMON) 01249 01250 # Gather include paths, c, cpp sources and macros to transfer to cppcheck 01251 # command line 01252 mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs] 01253 mbed_includes.append("-I%s"% str(build_target)) 01254 mbed_includes.append("-I%s"% str(MBED_DRIVERS)) 01255 mbed_includes.append("-I%s"% str(MBED_PLATFORM)) 01256 mbed_includes.append("-I%s"% str(MBED_HAL)) 01257 mbed_c_sources = " ".join(mbed_resources.c_sources) 01258 mbed_cpp_sources = " ".join(mbed_resources.cpp_sources) 01259 01260 target_includes = [inc.strip() for inc in target_includes] 01261 mbed_includes = [inc.strip() for inc in mbed_includes] 01262 target_macros = [mac.strip() for mac in target_macros] 01263 01264 check_cmd = cppcheck_cmd 01265 check_cmd += cppcheck_msg_format 01266 check_cmd += target_includes 01267 check_cmd += mbed_includes 01268 check_cmd += target_macros 01269 01270 # We need to pass some parames via file to avoid "command line too long in 01271 # some OSs" 01272 tmp_file = tempfile.NamedTemporaryFile(delete=False) 01273 tmp_file.writelines(line + '\n' for line in target_c_sources.split()) 01274 tmp_file.writelines(line + '\n' for line in target_cpp_sources.split()) 01275 tmp_file.writelines(line + '\n' for line in mbed_c_sources.split()) 01276 tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split()) 01277 tmp_file.close() 01278 check_cmd += ["--file-list=%s"% tmp_file.name] 01279 01280 _stdout, _stderr, _ = run_cmd_ext(check_cmd) 01281 if verbose: 01282 print _stdout 01283 print _stderr 01284 01285 01286 def static_analysis_scan_lib (lib_id, target, toolchain, cppcheck_cmd, 01287 cppcheck_msg_format, verbose=False, 01288 clean=False, macros=None, notify=None, jobs=1, 01289 extra_verbose=False, build_profile=None): 01290 """Perform static analysis on a library as if it were to be compiled for a 01291 particular target and toolchain combination 01292 """ 01293 lib = Library(lib_id) 01294 if lib.is_supported(target, toolchain): 01295 static_analysis_scan_library( 01296 lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, 01297 cppcheck_msg_format, lib.dependencies, verbose=verbose, 01298 clean=clean, macros=macros, notify=notify, jobs=jobs, 01299 extra_verbose=extra_verbose, build_profile=build_profile) 01300 else: 01301 print('Library "%s" is not yet supported on target %s with toolchain %s' 01302 % (lib_id, target.name, toolchain)) 01303 01304 01305 def static_analysis_scan_library (src_paths, build_path, target, toolchain_name, 01306 cppcheck_cmd, cppcheck_msg_format, 01307 dependencies_paths=None, 01308 name=None, clean=False, notify=None, 01309 verbose=False, macros=None, jobs=1, 01310 extra_verbose=False, build_profile=None): 01311 """ Function scans library for statically detectable defects 01312 01313 Positional arguments: 01314 src_paths - the list of library paths to scan 01315 build_path - the location directory of result files 01316 target - the target to fake the build for 01317 toolchain_name - pretend you would compile with this toolchain 01318 cppcheck_cmd - the command used to do static analysis 01319 cppcheck_msg_format - the format of the check messages 01320 01321 Keyword arguments: 01322 dependencies_paths - the paths to sources that this library depends on 01323 name - the name of this library 01324 clean - start from a clean slate 01325 notify - the notification event handling function 01326 verbose - more printing! 01327 macros - extra macros to compile with 01328 jobs - number of commands to run at once 01329 extra_verbose - even moar printing 01330 build_profile - a dict of flags that will be passed to the compiler 01331 """ 01332 if type(src_paths) != ListType: 01333 src_paths = [src_paths] 01334 01335 for src_path in src_paths: 01336 if not exists(src_path): 01337 raise Exception("The library source folder does not exist: %s", 01338 src_path) 01339 01340 # Toolchain instance 01341 toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros, 01342 notify=notify, 01343 extra_verbose=extra_verbose, 01344 build_profile=build_profile) 01345 toolchain.VERBOSE = verbose 01346 toolchain.jobs = jobs 01347 01348 # The first path will give the name to the library 01349 name = basename(src_paths[0]) 01350 toolchain.info("Static analysis for library %s (%s, %s)" % 01351 (name.upper(), target.name, toolchain_name)) 01352 01353 # Scan Resources 01354 resources = [] 01355 for src_path in src_paths: 01356 resources.append(toolchain.scan_resources(src_path)) 01357 01358 # Dependencies Include Paths 01359 dependencies_include_dir = [] 01360 if dependencies_paths is not None: 01361 for path in dependencies_paths: 01362 lib_resources = toolchain.scan_resources(path) 01363 dependencies_include_dir.extend(lib_resources.inc_dirs) 01364 01365 # Create the desired build directory structure 01366 bin_path = join(build_path, toolchain.obj_path) 01367 mkdir(bin_path) 01368 tmp_path = join(build_path, '.temp', toolchain.obj_path) 01369 mkdir(tmp_path) 01370 01371 # Gather include paths, c, cpp sources and macros to transfer to cppcheck 01372 # command line 01373 includes = ["-I%s" % i for i in dependencies_include_dir + src_paths] 01374 c_sources = " " 01375 cpp_sources = " " 01376 macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros] 01377 01378 # Copy Headers 01379 for resource in resources: 01380 toolchain.copy_files(resource.headers, build_path, resources=resource) 01381 includes += ["-I%s" % i for i in resource.inc_dirs] 01382 c_sources += " ".join(resource.c_sources) + " " 01383 cpp_sources += " ".join(resource.cpp_sources) + " " 01384 01385 dependencies_include_dir.extend( 01386 toolchain.scan_resources(build_path).inc_dirs) 01387 01388 includes = [inc.strip() for inc in includes] 01389 macros = [mac.strip() for mac in macros] 01390 01391 check_cmd = cppcheck_cmd 01392 check_cmd += cppcheck_msg_format 01393 check_cmd += includes 01394 check_cmd += macros 01395 01396 # We need to pass some parameters via file to avoid "command line too long 01397 # in some OSs". A temporary file is created to store e.g. cppcheck list of 01398 # files for command line 01399 tmp_file = tempfile.NamedTemporaryFile(delete=False) 01400 tmp_file.writelines(line + '\n' for line in c_sources.split()) 01401 tmp_file.writelines(line + '\n' for line in cpp_sources.split()) 01402 tmp_file.close() 01403 check_cmd += ["--file-list=%s"% tmp_file.name] 01404 01405 # This will allow us to grab result from both stdio and stderr outputs (so 01406 # we can show them) We assume static code analysis tool is outputting 01407 # defects on STDERR 01408 _stdout, _stderr, _ = run_cmd_ext(check_cmd) 01409 if verbose: 01410 print _stdout 01411 print _stderr 01412 01413 01414 def print_build_results (result_list, build_name): 01415 """ Generate result string for build results 01416 01417 Positional arguments: 01418 result_list - the list of results to print 01419 build_name - the name of the build we are printing result for 01420 """ 01421 result = "" 01422 if len(result_list) > 0: 01423 result += build_name + "\n" 01424 result += "\n".join([" * %s" % f for f in result_list]) 01425 result += "\n" 01426 return result 01427 01428 def print_build_memory_usage (report): 01429 """ Generate result table with memory usage values for build results 01430 Aggregates (puts together) reports obtained from self.get_memory_summary() 01431 01432 Positional arguments: 01433 report - Report generated during build procedure. 01434 """ 01435 from prettytable import PrettyTable 01436 columns_text = ['name', 'target', 'toolchain'] 01437 columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash'] 01438 table = PrettyTable(columns_text + columns_int) 01439 01440 for col in columns_text: 01441 table.align[col] = 'l' 01442 01443 for col in columns_int: 01444 table.align[col] = 'r' 01445 01446 for target in report: 01447 for toolchain in report[target]: 01448 for name in report[target][toolchain]: 01449 for dlist in report[target][toolchain][name]: 01450 for dlistelem in dlist: 01451 # Get 'memory_usage' record and build table with 01452 # statistics 01453 record = dlist[dlistelem] 01454 if 'memory_usage' in record and record['memory_usage']: 01455 # Note that summary should be in the last record of 01456 # 'memory_usage' section. This is why we are 01457 # grabbing last "[-1]" record. 01458 row = [ 01459 record['description'], 01460 record['target_name'], 01461 record['toolchain_name'], 01462 record['memory_usage'][-1]['summary'][ 01463 'static_ram'], 01464 record['memory_usage'][-1]['summary']['stack'], 01465 record['memory_usage'][-1]['summary']['heap'], 01466 record['memory_usage'][-1]['summary'][ 01467 'total_ram'], 01468 record['memory_usage'][-1]['summary'][ 01469 'total_flash'], 01470 ] 01471 table.add_row(row) 01472 01473 result = "Memory map breakdown for built projects (values in Bytes):\n" 01474 result += table.get_string(sortby='name') 01475 return result 01476 01477 def write_build_report (build_report, template_filename, filename): 01478 """Write a build report to disk using a template file 01479 01480 Positional arguments: 01481 build_report - a report generated by the build system 01482 template_filename - a file that contains the template for the style of build 01483 report 01484 filename - the location on disk to write the file to 01485 """ 01486 build_report_failing = [] 01487 build_report_passing = [] 01488 01489 for report in build_report: 01490 if len(report["failing"]) > 0: 01491 build_report_failing.append(report) 01492 else: 01493 build_report_passing.append(report) 01494 01495 env = Environment(extensions=['jinja2.ext.with_']) 01496 env.loader = FileSystemLoader('ci_templates') 01497 template = env.get_template(template_filename) 01498 01499 with open(filename, 'w+') as placeholder: 01500 placeholder.write(template.render( 01501 failing_builds=build_report_failing, 01502 passing_builds=build_report_passing))
Generated on Tue Jul 12 2022 17:34:38 by
