Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Dependents: mbed-os-example-blinky-gr-lychee GR-Boads_Camera_sample GR-Boards_Audio_Recoder GR-Boads_Camera_DisplayApp ... more
build_api.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2016 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import tempfile 00020 from types import ListType 00021 from shutil import rmtree 00022 from os.path import join, exists, dirname, basename, abspath, normpath, splitext 00023 from os import linesep, remove, makedirs 00024 from time import time 00025 from intelhex import IntelHex 00026 00027 from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\ 00028 ToolException, InvalidReleaseTargetException, intelhex_offset 00029 from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\ 00030 MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\ 00031 MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\ 00032 BUILD_DIR 00033 from tools.targets import TARGET_NAMES, TARGET_MAP 00034 from tools.libraries import Library 00035 from tools.toolchains import TOOLCHAIN_CLASSES 00036 from jinja2 import FileSystemLoader 00037 from jinja2.environment import Environment 00038 from tools.config import Config 00039 00040 RELEASE_VERSIONS = ['2', '5'] 00041 00042 def prep_report (report, target_name, toolchain_name, id_name): 00043 """Setup report keys 00044 00045 Positional arguments: 00046 report - the report to fill 00047 target_name - the target being used 00048 toolchain_name - the toolchain being used 00049 id_name - the name of the executable or library being built 00050 """ 00051 if not target_name in report: 00052 report[target_name] = {} 00053 00054 if not toolchain_name in report[target_name]: 00055 report[target_name][toolchain_name] = {} 00056 00057 if not id_name in report[target_name][toolchain_name]: 00058 report[target_name][toolchain_name][id_name] = [] 00059 00060 def prep_properties (properties, target_name, toolchain_name, vendor_label): 00061 """Setup test properties 00062 00063 Positional arguments: 00064 properties - the dict to fill 00065 target_name - the target the test is targeting 00066 toolchain_name - the toolchain that will compile the test 00067 vendor_label - the vendor 00068 """ 00069 if not target_name in properties: 00070 properties[target_name] = {} 00071 00072 if not toolchain_name in properties[target_name]: 00073 properties[target_name][toolchain_name] = {} 00074 00075 properties[target_name][toolchain_name]["target"] = target_name 00076 properties[target_name][toolchain_name]["vendor"] = vendor_label 00077 properties[target_name][toolchain_name]["toolchain"] = toolchain_name 00078 00079 def create_result (target_name, toolchain_name, id_name, description): 00080 """Create a result dictionary 00081 00082 Positional arguments: 00083 target_name - the target being built for 00084 toolchain_name - the toolchain doing the building 00085 id_name - the name of the executable or library being built 00086 description - a human readable description of what's going on 00087 """ 00088 cur_result = {} 00089 cur_result["target_name"] = target_name 00090 cur_result["toolchain_name"] = toolchain_name 00091 cur_result["id"] = id_name 00092 cur_result["description"] = description 00093 cur_result["elapsed_time"] = 0 00094 cur_result["output"] = "" 00095 00096 return cur_result 00097 00098 def add_result_to_report (report, result): 00099 """Add a single result to a report dictionary 00100 00101 Positional arguments: 00102 report - the report to append to 00103 result - the result to append 00104 """ 00105 target = result["target_name"] 00106 toolchain = result["toolchain_name"] 00107 id_name = result['id'] 00108 result_wrap = {0: result} 00109 report[target][toolchain][id_name].append(result_wrap) 00110 00111 def get_config (src_paths, target, toolchain_name): 00112 """Get the configuration object for a target-toolchain combination 00113 00114 Positional arguments: 00115 src_paths - paths to scan for the configuration files 00116 target - the device we are building for 00117 toolchain_name - the string that identifies the build tools 00118 """ 00119 # Convert src_paths to a list if needed 00120 if type(src_paths) != ListType: 00121 src_paths = [src_paths] 00122 00123 # Pass all params to the unified prepare_resources() 00124 toolchain = prepare_toolchain(src_paths, None, target, toolchain_name) 00125 00126 # Scan src_path for config files 00127 resources = toolchain.scan_resources(src_paths[0]) 00128 for path in src_paths[1:]: 00129 resources.add(toolchain.scan_resources(path)) 00130 00131 # Update configuration files until added features creates no changes 00132 prev_features = set() 00133 while True: 00134 # Update the configuration with any .json files found while scanning 00135 toolchain.config.add_config_files(resources.json_files) 00136 00137 # Add features while we find new ones 00138 features = set(toolchain.config.get_features()) 00139 if features == prev_features: 00140 break 00141 00142 for feature in features: 00143 if feature in resources.features: 00144 resources += resources.features[feature] 00145 00146 prev_features = features 00147 toolchain.config.validate_config() 00148 00149 cfg, macros = toolchain.config.get_config_data() 00150 features = toolchain.config.get_features() 00151 return cfg, macros, features 00152 00153 def is_official_target (target_name, version): 00154 """ Returns True, None if a target is part of the official release for the 00155 given version. Return False, 'reason' if a target is not part of the 00156 official release for the given version. 00157 00158 Positional arguments: 00159 target_name - Name if the target (ex. 'K64F') 00160 version - The release version string. Should be a string contained within 00161 RELEASE_VERSIONS 00162 """ 00163 00164 result = True 00165 reason = None 00166 target = TARGET_MAP[target_name] 00167 00168 if hasattr(target, 'release_versions') \ 00169 and version in target.release_versions: 00170 if version == '2': 00171 # For version 2, either ARM or uARM toolchain support is required 00172 required_toolchains = set(['ARM', 'uARM']) 00173 00174 if not len(required_toolchains.intersection( 00175 set(target.supported_toolchains))) > 0: 00176 result = False 00177 reason = ("Target '%s' must support " % target.name) + \ 00178 ("one of the folowing toolchains to be included in the") + \ 00179 ((" mbed 2.0 official release: %s" + linesep) % 00180 ", ".join(required_toolchains)) + \ 00181 ("Currently it is only configured to support the ") + \ 00182 ("following toolchains: %s" % 00183 ", ".join(target.supported_toolchains)) 00184 00185 elif version == '5': 00186 # For version 5, ARM, GCC_ARM, and IAR toolchain support is required 00187 required_toolchains = set(['ARM', 'GCC_ARM', 'IAR']) 00188 required_toolchains_sorted = list(required_toolchains) 00189 required_toolchains_sorted.sort() 00190 supported_toolchains = set(target.supported_toolchains) 00191 supported_toolchains_sorted = list(supported_toolchains) 00192 supported_toolchains_sorted.sort() 00193 00194 if not required_toolchains.issubset(supported_toolchains): 00195 result = False 00196 reason = ("Target '%s' must support " % target.name) + \ 00197 ("ALL of the folowing toolchains to be included in the") + \ 00198 ((" mbed OS 5.0 official release: %s" + linesep) % 00199 ", ".join(required_toolchains_sorted)) + \ 00200 ("Currently it is only configured to support the ") + \ 00201 ("following toolchains: %s" % 00202 ", ".join(supported_toolchains_sorted)) 00203 00204 elif not target.default_lib == 'std': 00205 result = False 00206 reason = ("Target '%s' must set the " % target.name) + \ 00207 ("'default_lib' to 'std' to be included in the ") + \ 00208 ("mbed OS 5.0 official release." + linesep) + \ 00209 ("Currently it is set to '%s'" % target.default_lib) 00210 00211 else: 00212 result = False 00213 reason = ("Target '%s' has set an invalid release version of '%s'" % 00214 version) + \ 00215 ("Please choose from the following release versions: %s" % 00216 ', '.join(RELEASE_VERSIONS)) 00217 00218 else: 00219 result = False 00220 if not hasattr(target, 'release_versions'): 00221 reason = "Target '%s' " % target.name 00222 reason += "does not have the 'release_versions' key set" 00223 elif not version in target.release_versions: 00224 reason = "Target '%s' does not contain the version '%s' " % \ 00225 (target.name, version) 00226 reason += "in its 'release_versions' key" 00227 00228 return result, reason 00229 00230 def transform_release_toolchains (toolchains, version): 00231 """ Given a list of toolchains and a release version, return a list of 00232 only the supported toolchains for that release 00233 00234 Positional arguments: 00235 toolchains - The list of toolchains 00236 version - The release version string. Should be a string contained within 00237 RELEASE_VERSIONS 00238 """ 00239 if version == '5': 00240 return ['ARM', 'GCC_ARM', 'IAR'] 00241 else: 00242 return toolchains 00243 00244 00245 def get_mbed_official_release (version): 00246 """ Given a release version string, return a tuple that contains a target 00247 and the supported toolchains for that release. 00248 Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), 00249 ('K64F', ('ARM', 'GCC_ARM')), ...) 00250 00251 Positional arguments: 00252 version - The version string. Should be a string contained within 00253 RELEASE_VERSIONS 00254 """ 00255 00256 mbed_official_release = ( 00257 tuple( 00258 tuple( 00259 [ 00260 TARGET_MAP[target].name, 00261 tuple(transform_release_toolchains( 00262 TARGET_MAP[target].supported_toolchains, version)) 00263 ] 00264 ) for target in TARGET_NAMES \ 00265 if (hasattr(TARGET_MAP[target], 'release_versions') 00266 and version in TARGET_MAP[target].release_versions) 00267 ) 00268 ) 00269 00270 for target in mbed_official_release: 00271 is_official, reason = is_official_target(target[0], version) 00272 00273 if not is_official: 00274 raise InvalidReleaseTargetException(reason) 00275 00276 return mbed_official_release 00277 00278 def add_regions_to_profile (profile, config, toolchain_class): 00279 """Add regions to the build profile, if there are any. 00280 00281 Positional Arguments: 00282 profile - the profile to update 00283 config - the configuration object that owns the region 00284 toolchain_class - the class of the toolchain being used 00285 """ 00286 regions = list(config.regions) 00287 for region in regions: 00288 for define in [(region.name.upper() + "_ADDR", region.start), 00289 (region.name.upper() + "_SIZE", region.size)]: 00290 profile["common"].append("-D%s=0x%x" % define) 00291 active_region = [r for r in regions if r.active][0] 00292 for define in [("MBED_APP_START", active_region.start), 00293 ("MBED_APP_SIZE", active_region.size)]: 00294 profile["ld"].append(toolchain_class.make_ld_define(*define)) 00295 00296 print("Using regions in this build:") 00297 for region in regions: 00298 print(" Region %s size 0x%x, offset 0x%x" 00299 % (region.name, region.size, region.start)) 00300 00301 00302 def prepare_toolchain (src_paths, build_dir, target, toolchain_name, 00303 macros=None, clean=False, jobs=1, 00304 notify=None, silent=False, verbose=False, 00305 extra_verbose=False, config=None, 00306 app_config=None, build_profile=None): 00307 """ Prepares resource related objects - toolchain, target, config 00308 00309 Positional arguments: 00310 src_paths - the paths to source directories 00311 target - ['LPC1768', 'LPC11U24', 'LPC2368', etc.] 00312 toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] 00313 00314 Keyword arguments: 00315 macros - additional macros 00316 clean - Rebuild everything if True 00317 jobs - how many compilers we can run at once 00318 notify - Notify function for logs 00319 silent - suppress printing of progress indicators 00320 verbose - Write the actual tools command lines used if True 00321 extra_verbose - even more output! 00322 config - a Config object to use instead of creating one 00323 app_config - location of a chosen mbed_app.json file 00324 build_profile - a dict of flags that will be passed to the compiler 00325 """ 00326 00327 # We need to remove all paths which are repeated to avoid 00328 # multiple compilations and linking with the same objects 00329 src_paths = [src_paths[0]] + list(set(src_paths[1:])) 00330 00331 # If the configuration object was not yet created, create it now 00332 config = config or Config(target, src_paths, app_config=app_config) 00333 target = config.target 00334 try: 00335 cur_tc = TOOLCHAIN_CLASSES[toolchain_name] 00336 except KeyError: 00337 raise KeyError("Toolchain %s not supported" % toolchain_name) 00338 if config.has_regions: 00339 add_regions_to_profile(build_profile, config, cur_tc) 00340 00341 # Toolchain instance 00342 toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir, 00343 extra_verbose=extra_verbose, build_profile=build_profile) 00344 00345 toolchain.config = config 00346 toolchain.jobs = jobs 00347 toolchain.build_all = clean 00348 toolchain.VERBOSE = verbose 00349 00350 return toolchain 00351 00352 def merge_region_list (region_list, destination, padding=b'\xFF'): 00353 """Merege the region_list into a single image 00354 00355 Positional Arguments: 00356 region_list - list of regions, which should contain filenames 00357 destination - file name to write all regions to 00358 padding - bytes to fill gapps with 00359 """ 00360 merged = IntelHex() 00361 00362 print("Merging Regions:") 00363 00364 for region in region_list: 00365 if region.active and not region.filename: 00366 raise ToolException("Active region has no contents: No file found.") 00367 if region.filename: 00368 print(" Filling region %s with %s" % (region.name, region.filename)) 00369 part = intelhex_offset(region.filename, offset=region.start) 00370 part_size = (part.maxaddr() - part.minaddr()) + 1 00371 if part_size > region.size: 00372 raise ToolException("Contents of region %s does not fit" 00373 % region.name) 00374 merged.merge(part) 00375 pad_size = region.size - part_size 00376 if pad_size > 0 and region != region_list[-1]: 00377 print(" Padding region %s with 0x%x bytes" % (region.name, pad_size)) 00378 merged.puts(merged.maxaddr() + 1, padding * pad_size) 00379 00380 if not exists(dirname(destination)): 00381 makedirs(dirname(destination)) 00382 print("Space used after regions merged: 0x%x" % 00383 (merged.maxaddr() - merged.minaddr() + 1)) 00384 with open(destination, "wb+") as output: 00385 merged.tofile(output, format='bin') 00386 00387 def scan_resources (src_paths, toolchain, dependencies_paths=None, 00388 inc_dirs=None, base_path=None): 00389 """ Scan resources using initialized toolcain 00390 00391 Positional arguments 00392 src_paths - the paths to source directories 00393 toolchain - valid toolchain object 00394 dependencies_paths - dependency paths that we should scan for include dirs 00395 inc_dirs - additional include directories which should be added to 00396 the scanner resources 00397 """ 00398 00399 # Scan src_path 00400 resources = toolchain.scan_resources(src_paths[0], base_path=base_path) 00401 for path in src_paths[1:]: 00402 resources.add(toolchain.scan_resources(path, base_path=base_path)) 00403 00404 # Scan dependency paths for include dirs 00405 if dependencies_paths is not None: 00406 for path in dependencies_paths: 00407 lib_resources = toolchain.scan_resources(path) 00408 resources.inc_dirs.extend(lib_resources.inc_dirs) 00409 00410 # Add additional include directories if passed 00411 if inc_dirs: 00412 if type(inc_dirs) == ListType: 00413 resources.inc_dirs.extend(inc_dirs) 00414 else: 00415 resources.inc_dirs.append(inc_dirs) 00416 00417 # Load resources into the config system which might expand/modify resources 00418 # based on config data 00419 resources = toolchain.config.load_resources(resources) 00420 00421 # Set the toolchain's configuration data 00422 toolchain.set_config_data(toolchain.config.get_config_data()) 00423 00424 return resources 00425 00426 def build_project (src_paths, build_path, target, toolchain_name, 00427 libraries_paths=None, linker_script=None, 00428 clean=False, notify=None, verbose=False, name=None, 00429 macros=None, inc_dirs=None, jobs=1, silent=False, 00430 report=None, properties=None, project_id=None, 00431 project_description=None, extra_verbose=False, config=None, 00432 app_config=None, build_profile=None): 00433 """ Build a project. A project may be a test or a user program. 00434 00435 Positional arguments: 00436 src_paths - a path or list of paths that contain all files needed to build 00437 the project 00438 build_path - the directory where all of the object files will be placed 00439 target - the MCU or board that the project will compile for 00440 toolchain_name - the name of the build tools 00441 00442 Keyword arguments: 00443 libraries_paths - The location of libraries to include when linking 00444 linker_script - the file that drives the linker to do it's job 00445 clean - Rebuild everything if True 00446 notify - Notify function for logs 00447 verbose - Write the actual tools command lines used if True 00448 name - the name of the project 00449 macros - additional macros 00450 inc_dirs - additional directories where include files may be found 00451 jobs - how many compilers we can run at once 00452 silent - suppress printing of progress indicators 00453 report - a dict where a result may be appended 00454 properties - UUUUHHHHH beats me 00455 project_id - the name put in the report 00456 project_description - the human-readable version of what this thing does 00457 extra_verbose - even more output! 00458 config - a Config object to use instead of creating one 00459 app_config - location of a chosen mbed_app.json file 00460 build_profile - a dict of flags that will be passed to the compiler 00461 """ 00462 00463 # Convert src_path to a list if needed 00464 if type(src_paths) != ListType: 00465 src_paths = [src_paths] 00466 # Extend src_paths wiht libraries_paths 00467 if libraries_paths is not None: 00468 src_paths.extend(libraries_paths) 00469 inc_dirs.extend(map(dirname, libraries_paths)) 00470 00471 # Build Directory 00472 if clean and exists(build_path): 00473 rmtree(build_path) 00474 mkdir(build_path) 00475 00476 # Pass all params to the unified prepare_toolchain() 00477 toolchain = prepare_toolchain( 00478 src_paths, build_path, target, toolchain_name, macros=macros, 00479 clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, 00480 extra_verbose=extra_verbose, config=config, app_config=app_config, 00481 build_profile=build_profile) 00482 00483 # The first path will give the name to the library 00484 if name is None: 00485 name = basename(normpath(abspath(src_paths[0]))) 00486 toolchain.info("Building project %s (%s, %s)" % 00487 (name, toolchain.target.name, toolchain_name)) 00488 00489 # Initialize reporting 00490 if report != None: 00491 start = time() 00492 # If project_id is specified, use that over the default name 00493 id_name = project_id.upper() if project_id else name.upper() 00494 description = project_description if project_description else name 00495 vendor_label = toolchain.target.extra_labels[0] 00496 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00497 cur_result = create_result(toolchain.target.name, toolchain_name, 00498 id_name, description) 00499 if properties != None: 00500 prep_properties(properties, toolchain.target.name, toolchain_name, 00501 vendor_label) 00502 00503 try: 00504 # Call unified scan_resources 00505 resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) 00506 00507 # Change linker script if specified 00508 if linker_script is not None: 00509 resources.linker_script = linker_script 00510 00511 # Compile Sources 00512 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00513 resources.objects.extend(objects) 00514 00515 # Link Program 00516 if toolchain.config.has_regions: 00517 res, _ = toolchain.link_program(resources, build_path, name + "_application") 00518 region_list = list(toolchain.config.regions) 00519 region_list = [r._replace(filename=res) if r.active else r 00520 for r in region_list] 00521 res = join(build_path, name) + ".bin" 00522 merge_region_list(region_list, res) 00523 else: 00524 res, _ = toolchain.link_program(resources, build_path, name) 00525 00526 memap_instance = getattr(toolchain, 'memap_instance', None) 00527 memap_table = '' 00528 if memap_instance: 00529 # Write output to stdout in text (pretty table) format 00530 memap_table = memap_instance.generate_output('table') 00531 00532 if not silent: 00533 print memap_table 00534 00535 # Write output to file in JSON format 00536 map_out = join(build_path, name + "_map.json") 00537 memap_instance.generate_output('json', map_out) 00538 00539 # Write output to file in CSV format for the CI 00540 map_csv = join(build_path, name + "_map.csv") 00541 memap_instance.generate_output('csv-ci', map_csv) 00542 00543 resources.detect_duplicates(toolchain) 00544 00545 if report != None: 00546 end = time() 00547 cur_result["elapsed_time"] = end - start 00548 cur_result["output"] = toolchain.get_output() + memap_table 00549 cur_result["result"] = "OK" 00550 cur_result["memory_usage"] = toolchain.map_outputs 00551 00552 add_result_to_report(report, cur_result) 00553 00554 return res 00555 00556 except Exception as exc: 00557 if report != None: 00558 end = time() 00559 00560 if isinstance(exc, NotSupportedException): 00561 cur_result["result"] = "NOT_SUPPORTED" 00562 else: 00563 cur_result["result"] = "FAIL" 00564 00565 cur_result["elapsed_time"] = end - start 00566 00567 toolchain_output = toolchain.get_output() 00568 if toolchain_output: 00569 cur_result["output"] += toolchain_output 00570 00571 add_result_to_report(report, cur_result) 00572 00573 # Let Exception propagate 00574 raise 00575 00576 def build_library (src_paths, build_path, target, toolchain_name, 00577 dependencies_paths=None, name=None, clean=False, 00578 archive=True, notify=None, verbose=False, macros=None, 00579 inc_dirs=None, jobs=1, silent=False, report=None, 00580 properties=None, extra_verbose=False, project_id=None, 00581 remove_config_header_file=False, app_config=None, 00582 build_profile=None): 00583 """ Build a library 00584 00585 Positional arguments: 00586 src_paths - a path or list of paths that contain all files needed to build 00587 the library 00588 build_path - the directory where all of the object files will be placed 00589 target - the MCU or board that the project will compile for 00590 toolchain_name - the name of the build tools 00591 00592 Keyword arguments: 00593 dependencies_paths - The location of libraries to include when linking 00594 name - the name of the library 00595 clean - Rebuild everything if True 00596 archive - whether the library will create an archive file 00597 notify - Notify function for logs 00598 verbose - Write the actual tools command lines used if True 00599 macros - additional macros 00600 inc_dirs - additional directories where include files may be found 00601 jobs - how many compilers we can run at once 00602 silent - suppress printing of progress indicators 00603 report - a dict where a result may be appended 00604 properties - UUUUHHHHH beats me 00605 extra_verbose - even more output! 00606 project_id - the name that goes in the report 00607 remove_config_header_file - delete config header file when done building 00608 app_config - location of a chosen mbed_app.json file 00609 build_profile - a dict of flags that will be passed to the compiler 00610 """ 00611 00612 # Convert src_path to a list if needed 00613 if type(src_paths) != ListType: 00614 src_paths = [src_paths] 00615 00616 # Build path 00617 if archive: 00618 # Use temp path when building archive 00619 tmp_path = join(build_path, '.temp') 00620 mkdir(tmp_path) 00621 else: 00622 tmp_path = build_path 00623 00624 # Clean the build directory 00625 if clean and exists(tmp_path): 00626 rmtree(tmp_path) 00627 mkdir(tmp_path) 00628 00629 # Pass all params to the unified prepare_toolchain() 00630 toolchain = prepare_toolchain( 00631 src_paths, build_path, target, toolchain_name, macros=macros, 00632 clean=clean, jobs=jobs, notify=notify, silent=silent, 00633 verbose=verbose, extra_verbose=extra_verbose, app_config=app_config, 00634 build_profile=build_profile) 00635 00636 # The first path will give the name to the library 00637 if name is None: 00638 name = basename(normpath(abspath(src_paths[0]))) 00639 toolchain.info("Building library %s (%s, %s)" % 00640 (name, toolchain.target.name, toolchain_name)) 00641 00642 # Initialize reporting 00643 if report != None: 00644 start = time() 00645 # If project_id is specified, use that over the default name 00646 id_name = project_id.upper() if project_id else name.upper() 00647 description = name 00648 vendor_label = toolchain.target.extra_labels[0] 00649 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00650 cur_result = create_result(toolchain.target.name, toolchain_name, 00651 id_name, description) 00652 if properties != None: 00653 prep_properties(properties, toolchain.target.name, toolchain_name, 00654 vendor_label) 00655 00656 for src_path in src_paths: 00657 if not exists(src_path): 00658 error_msg = "The library source folder does not exist: %s", src_path 00659 if report != None: 00660 cur_result["output"] = error_msg 00661 cur_result["result"] = "FAIL" 00662 add_result_to_report(report, cur_result) 00663 raise Exception(error_msg) 00664 00665 try: 00666 # Call unified scan_resources 00667 resources = scan_resources(src_paths, toolchain, 00668 dependencies_paths=dependencies_paths, 00669 inc_dirs=inc_dirs) 00670 00671 00672 # Copy headers, objects and static libraries - all files needed for 00673 # static lib 00674 toolchain.copy_files(resources.headers, build_path, resources=resources) 00675 toolchain.copy_files(resources.objects, build_path, resources=resources) 00676 toolchain.copy_files(resources.libraries, build_path, 00677 resources=resources) 00678 toolchain.copy_files(resources.json_files, build_path, 00679 resources=resources) 00680 if resources.linker_script: 00681 toolchain.copy_files(resources.linker_script, build_path, 00682 resources=resources) 00683 00684 if resources.hex_files: 00685 toolchain.copy_files(resources.hex_files, build_path, 00686 resources=resources) 00687 00688 # Compile Sources 00689 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00690 resources.objects.extend(objects) 00691 00692 if archive: 00693 toolchain.build_library(objects, build_path, name) 00694 00695 if remove_config_header_file: 00696 config_header_path = toolchain.get_config_header() 00697 if config_header_path: 00698 remove(config_header_path) 00699 00700 if report != None: 00701 end = time() 00702 cur_result["elapsed_time"] = end - start 00703 cur_result["output"] = toolchain.get_output() 00704 cur_result["result"] = "OK" 00705 00706 00707 add_result_to_report(report, cur_result) 00708 return True 00709 00710 except Exception as exc: 00711 if report != None: 00712 end = time() 00713 00714 if isinstance(exc, ToolException): 00715 cur_result["result"] = "FAIL" 00716 elif isinstance(exc, NotSupportedException): 00717 cur_result["result"] = "NOT_SUPPORTED" 00718 00719 cur_result["elapsed_time"] = end - start 00720 00721 toolchain_output = toolchain.get_output() 00722 if toolchain_output: 00723 cur_result["output"] += toolchain_output 00724 00725 add_result_to_report(report, cur_result) 00726 00727 # Let Exception propagate 00728 raise 00729 00730 ###################### 00731 ### Legacy methods ### 00732 ###################### 00733 00734 def mbed2_obj_path(target_name, toolchain_name): 00735 real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__ 00736 return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name) 00737 00738 def build_lib (lib_id, target, toolchain_name, verbose=False, 00739 clean=False, macros=None, notify=None, jobs=1, silent=False, 00740 report=None, properties=None, extra_verbose=False, 00741 build_profile=None): 00742 """ Legacy method for building mbed libraries 00743 00744 Positional arguments: 00745 lib_id - the library's unique identifier 00746 target - the MCU or board that the project will compile for 00747 toolchain_name - the name of the build tools 00748 00749 Keyword arguments: 00750 clean - Rebuild everything if True 00751 verbose - Write the actual tools command lines used if True 00752 macros - additional macros 00753 notify - Notify function for logs 00754 jobs - how many compilers we can run at once 00755 silent - suppress printing of progress indicators 00756 report - a dict where a result may be appended 00757 properties - UUUUHHHHH beats me 00758 extra_verbose - even more output! 00759 build_profile - a dict of flags that will be passed to the compiler 00760 """ 00761 lib = Library(lib_id) 00762 if not lib.is_supported(target, toolchain_name): 00763 print('Library "%s" is not yet supported on target %s with toolchain %s' 00764 % (lib_id, target.name, toolchain_name)) 00765 return False 00766 00767 # We need to combine macros from parameter list with macros from library 00768 # definition 00769 lib_macros = lib.macros if lib.macros else [] 00770 if macros: 00771 macros.extend(lib_macros) 00772 else: 00773 macros = lib_macros 00774 00775 src_paths = lib.source_dir 00776 build_path = lib.build_dir 00777 dependencies_paths = lib.dependencies 00778 inc_dirs = lib.inc_dirs 00779 inc_dirs_ext = lib.inc_dirs_ext 00780 00781 if type(src_paths) != ListType: 00782 src_paths = [src_paths] 00783 00784 # The first path will give the name to the library 00785 name = basename(src_paths[0]) 00786 00787 if report != None: 00788 start = time() 00789 id_name = name.upper() 00790 description = name 00791 vendor_label = target.extra_labels[0] 00792 cur_result = None 00793 prep_report(report, target.name, toolchain_name, id_name) 00794 cur_result = create_result(target.name, toolchain_name, id_name, 00795 description) 00796 00797 if properties != None: 00798 prep_properties(properties, target.name, toolchain_name, 00799 vendor_label) 00800 00801 for src_path in src_paths: 00802 if not exists(src_path): 00803 error_msg = "The library source folder does not exist: %s", src_path 00804 00805 if report != None: 00806 cur_result["output"] = error_msg 00807 cur_result["result"] = "FAIL" 00808 add_result_to_report(report, cur_result) 00809 00810 raise Exception(error_msg) 00811 00812 try: 00813 # Toolchain instance 00814 # Create the desired build directory structure 00815 bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name)) 00816 mkdir(bin_path) 00817 tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name, 00818 toolchain_name)) 00819 mkdir(tmp_path) 00820 00821 toolchain = prepare_toolchain( 00822 src_paths, tmp_path, target, toolchain_name, macros=macros, 00823 notify=notify, silent=silent, extra_verbose=extra_verbose, 00824 build_profile=build_profile, jobs=jobs, clean=clean) 00825 00826 toolchain.info("Building library %s (%s, %s)" % 00827 (name.upper(), target.name, toolchain_name)) 00828 00829 # Take into account the library configuration (MBED_CONFIG_FILE) 00830 config = toolchain.config 00831 config.add_config_files([MBED_CONFIG_FILE]) 00832 00833 # Scan Resources 00834 resources = [] 00835 for src_path in src_paths: 00836 resources.append(toolchain.scan_resources(src_path)) 00837 00838 # Add extra include directories / files which are required by library 00839 # This files usually are not in the same directory as source files so 00840 # previous scan will not include them 00841 if inc_dirs_ext is not None: 00842 for inc_ext in inc_dirs_ext: 00843 resources.append(toolchain.scan_resources(inc_ext)) 00844 00845 # Dependencies Include Paths 00846 dependencies_include_dir = [] 00847 if dependencies_paths is not None: 00848 for path in dependencies_paths: 00849 lib_resources = toolchain.scan_resources(path) 00850 dependencies_include_dir.extend(lib_resources.inc_dirs) 00851 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) 00852 00853 if inc_dirs: 00854 dependencies_include_dir.extend(inc_dirs) 00855 00856 # Add other discovered configuration data to the configuration object 00857 for res in resources: 00858 config.load_resources(res) 00859 toolchain.set_config_data(toolchain.config.get_config_data()) 00860 00861 00862 # Copy Headers 00863 for resource in resources: 00864 toolchain.copy_files(resource.headers, build_path, 00865 resources=resource) 00866 00867 dependencies_include_dir.extend( 00868 toolchain.scan_resources(build_path).inc_dirs) 00869 00870 # Compile Sources 00871 objects = [] 00872 for resource in resources: 00873 objects.extend(toolchain.compile_sources(resource, dependencies_include_dir)) 00874 00875 needed_update = toolchain.build_library(objects, bin_path, name) 00876 00877 if report != None and needed_update: 00878 end = time() 00879 cur_result["elapsed_time"] = end - start 00880 cur_result["output"] = toolchain.get_output() 00881 cur_result["result"] = "OK" 00882 00883 add_result_to_report(report, cur_result) 00884 return True 00885 00886 except Exception: 00887 if report != None: 00888 end = time() 00889 cur_result["result"] = "FAIL" 00890 cur_result["elapsed_time"] = end - start 00891 00892 toolchain_output = toolchain.get_output() 00893 if toolchain_output: 00894 cur_result["output"] += toolchain_output 00895 00896 add_result_to_report(report, cur_result) 00897 00898 # Let Exception propagate 00899 raise 00900 00901 # We do have unique legacy conventions about how we build and package the mbed 00902 # library 00903 def build_mbed_libs (target, toolchain_name, verbose=False, 00904 clean=False, macros=None, notify=None, jobs=1, silent=False, 00905 report=None, properties=None, extra_verbose=False, 00906 build_profile=None): 00907 """ Function returns True is library was built and false if building was 00908 skipped 00909 00910 Positional arguments: 00911 target - the MCU or board that the project will compile for 00912 toolchain_name - the name of the build tools 00913 00914 Keyword arguments: 00915 verbose - Write the actual tools command lines used if True 00916 clean - Rebuild everything if True 00917 macros - additional macros 00918 notify - Notify function for logs 00919 jobs - how many compilers we can run at once 00920 silent - suppress printing of progress indicators 00921 report - a dict where a result may be appended 00922 properties - UUUUHHHHH beats me 00923 extra_verbose - even more output! 00924 build_profile - a dict of flags that will be passed to the compiler 00925 """ 00926 00927 if report != None: 00928 start = time() 00929 id_name = "MBED" 00930 description = "mbed SDK" 00931 vendor_label = target.extra_labels[0] 00932 cur_result = None 00933 prep_report(report, target.name, toolchain_name, id_name) 00934 cur_result = create_result(target.name, toolchain_name, id_name, 00935 description) 00936 00937 if properties != None: 00938 prep_properties(properties, target.name, toolchain_name, 00939 vendor_label) 00940 00941 # Check toolchain support 00942 if toolchain_name not in target.supported_toolchains: 00943 supported_toolchains_text = ", ".join(target.supported_toolchains) 00944 print('%s target is not yet supported by toolchain %s' % 00945 (target.name, toolchain_name)) 00946 print('%s target supports %s toolchain%s' % 00947 (target.name, supported_toolchains_text, 's' 00948 if len(target.supported_toolchains) > 1 else '')) 00949 00950 if report != None: 00951 cur_result["result"] = "SKIP" 00952 add_result_to_report(report, cur_result) 00953 00954 return False 00955 00956 try: 00957 # Source and Build Paths 00958 build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) 00959 build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) 00960 mkdir(build_toolchain) 00961 00962 # Toolchain 00963 tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) 00964 mkdir(tmp_path) 00965 00966 toolchain = prepare_toolchain( 00967 [""], tmp_path, target, toolchain_name, macros=macros, 00968 notify=notify, silent=silent, extra_verbose=extra_verbose, 00969 build_profile=build_profile, jobs=jobs, clean=clean) 00970 00971 # Take into account the library configuration (MBED_CONFIG_FILE) 00972 config = toolchain.config 00973 config.add_config_files([MBED_CONFIG_FILE]) 00974 toolchain.set_config_data(toolchain.config.get_config_data()) 00975 00976 # CMSIS 00977 toolchain.info("Building library %s (%s, %s)" % 00978 ('CMSIS', target.name, toolchain_name)) 00979 cmsis_src = MBED_CMSIS_PATH 00980 resources = toolchain.scan_resources(cmsis_src) 00981 00982 toolchain.copy_files(resources.headers, build_target) 00983 toolchain.copy_files(resources.linker_script, build_toolchain) 00984 toolchain.copy_files(resources.bin_files, build_toolchain) 00985 00986 objects = toolchain.compile_sources(resources, tmp_path) 00987 toolchain.copy_files(objects, build_toolchain) 00988 00989 # mbed 00990 toolchain.info("Building library %s (%s, %s)" % 00991 ('MBED', target.name, toolchain_name)) 00992 00993 # Common Headers 00994 toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) 00995 library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] 00996 00997 for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), 00998 (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), 00999 (MBED_HAL, MBED_LIBRARIES_HAL)]: 01000 resources = toolchain.scan_resources(dir) 01001 toolchain.copy_files(resources.headers, dest) 01002 library_incdirs.append(dest) 01003 01004 # Target specific sources 01005 hal_src = MBED_TARGETS_PATH 01006 hal_implementation = toolchain.scan_resources(hal_src) 01007 toolchain.copy_files(hal_implementation.headers + 01008 hal_implementation.hex_files + 01009 hal_implementation.libraries + 01010 [MBED_CONFIG_FILE], 01011 build_target, resources=hal_implementation) 01012 toolchain.copy_files(hal_implementation.linker_script, build_toolchain) 01013 toolchain.copy_files(hal_implementation.bin_files, build_toolchain) 01014 incdirs = toolchain.scan_resources(build_target).inc_dirs 01015 objects = toolchain.compile_sources(hal_implementation, 01016 library_incdirs + incdirs) 01017 toolchain.copy_files(objects, build_toolchain) 01018 01019 # Common Sources 01020 mbed_resources = None 01021 for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: 01022 mbed_resources += toolchain.scan_resources(dir) 01023 01024 objects = toolchain.compile_sources(mbed_resources, 01025 library_incdirs + incdirs) 01026 01027 # A number of compiled files need to be copied as objects as opposed to 01028 # way the linker search for symbols in archives. These are: 01029 # - retarget.o: to make sure that the C standard lib symbols get 01030 # overridden 01031 # - board.o: mbed_die is weak 01032 # - mbed_overrides.o: this contains platform overrides of various 01033 # weak SDK functions 01034 separate_names, separate_objects = ['retarget.o', 'board.o', 01035 'mbed_overrides.o'], [] 01036 01037 for obj in objects: 01038 for name in separate_names: 01039 if obj.endswith(name): 01040 separate_objects.append(obj) 01041 01042 for obj in separate_objects: 01043 objects.remove(obj) 01044 01045 toolchain.build_library(objects, build_toolchain, "mbed") 01046 01047 for obj in separate_objects: 01048 toolchain.copy_files(obj, build_toolchain) 01049 01050 if report != None: 01051 end = time() 01052 cur_result["elapsed_time"] = end - start 01053 cur_result["output"] = toolchain.get_output() 01054 cur_result["result"] = "OK" 01055 01056 add_result_to_report(report, cur_result) 01057 01058 return True 01059 01060 except Exception as exc: 01061 if report != None: 01062 end = time() 01063 cur_result["result"] = "FAIL" 01064 cur_result["elapsed_time"] = end - start 01065 01066 toolchain_output = toolchain.get_output() 01067 if toolchain_output: 01068 cur_result["output"] += toolchain_output 01069 01070 cur_result["output"] += str(exc) 01071 01072 add_result_to_report(report, cur_result) 01073 01074 # Let Exception propagate 01075 raise 01076 01077 01078 def get_unique_supported_toolchains (release_targets=None): 01079 """ Get list of all unique toolchains supported by targets 01080 01081 Keyword arguments: 01082 release_targets - tuple structure returned from get_mbed_official_release(). 01083 If release_targets is not specified, then it queries all 01084 known targets 01085 """ 01086 unique_supported_toolchains = [] 01087 01088 if not release_targets: 01089 for target in TARGET_NAMES: 01090 for toolchain in TARGET_MAP[target].supported_toolchains: 01091 if toolchain not in unique_supported_toolchains: 01092 unique_supported_toolchains.append(toolchain) 01093 else: 01094 for target in release_targets: 01095 for toolchain in target[1]: 01096 if toolchain not in unique_supported_toolchains: 01097 unique_supported_toolchains.append(toolchain) 01098 01099 return unique_supported_toolchains 01100 01101 01102 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None, 01103 release_version='5'): 01104 """ Shows target map using prettytable 01105 01106 Keyword arguments: 01107 verbose_html - emit html instead of a simple table 01108 platform_filter - remove results that match the string 01109 release_version - get the matrix for this major version number 01110 """ 01111 # Only use it in this function so building works without extra modules 01112 from prettytable import PrettyTable 01113 01114 if isinstance(release_version, basestring): 01115 # Force release_version to lowercase if it is a string 01116 release_version = release_version.lower() 01117 else: 01118 # Otherwise default to printing all known targets and toolchains 01119 release_version = 'all' 01120 01121 01122 version_release_targets = {} 01123 version_release_target_names = {} 01124 01125 for version in RELEASE_VERSIONS: 01126 version_release_targets[version] = get_mbed_official_release(version) 01127 version_release_target_names[version] = [x[0] for x in 01128 version_release_targets[ 01129 version]] 01130 01131 if release_version in RELEASE_VERSIONS: 01132 release_targets = version_release_targets[release_version] 01133 else: 01134 release_targets = None 01135 01136 unique_supported_toolchains = get_unique_supported_toolchains( 01137 release_targets) 01138 prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS] 01139 01140 # All tests status table print 01141 columns = prepend_columns + unique_supported_toolchains 01142 table_printer = PrettyTable(columns) 01143 # Align table 01144 for col in columns: 01145 table_printer.align[col] = "c" 01146 table_printer.align["Target"] = "l" 01147 01148 perm_counter = 0 01149 target_counter = 0 01150 01151 target_names = [] 01152 01153 if release_targets: 01154 target_names = [x[0] for x in release_targets] 01155 else: 01156 target_names = TARGET_NAMES 01157 01158 for target in sorted(target_names): 01159 if platform_filter is not None: 01160 # FIlter out platforms using regex 01161 if re.search(platform_filter, target) is None: 01162 continue 01163 target_counter += 1 01164 01165 row = [target] # First column is platform name 01166 01167 for version in RELEASE_VERSIONS: 01168 if target in version_release_target_names[version]: 01169 text = "Supported" 01170 else: 01171 text = "-" 01172 row.append(text) 01173 01174 for unique_toolchain in unique_supported_toolchains: 01175 if unique_toolchain in TARGET_MAP[target].supported_toolchains: 01176 text = "Supported" 01177 perm_counter += 1 01178 else: 01179 text = "-" 01180 01181 row.append(text) 01182 table_printer.add_row(row) 01183 01184 result = table_printer.get_html_string() if verbose_html \ 01185 else table_printer.get_string() 01186 result += "\n" 01187 result += "Supported targets: %d\n"% (target_counter) 01188 if target_counter == 1: 01189 result += "Supported toolchains: %d"% (perm_counter) 01190 return result 01191 01192 01193 def get_target_supported_toolchains (target): 01194 """ Returns target supported toolchains list 01195 01196 Positional arguments: 01197 target - the target to get the supported toolchains of 01198 """ 01199 return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \ 01200 else None 01201 01202 01203 def print_build_results (result_list, build_name): 01204 """ Generate result string for build results 01205 01206 Positional arguments: 01207 result_list - the list of results to print 01208 build_name - the name of the build we are printing result for 01209 """ 01210 result = "" 01211 if len(result_list) > 0: 01212 result += build_name + "\n" 01213 result += "\n".join([" * %s" % f for f in result_list]) 01214 result += "\n" 01215 return result 01216 01217 def print_build_memory_usage (report): 01218 """ Generate result table with memory usage values for build results 01219 Aggregates (puts together) reports obtained from self.get_memory_summary() 01220 01221 Positional arguments: 01222 report - Report generated during build procedure. 01223 """ 01224 from prettytable import PrettyTable 01225 columns_text = ['name', 'target', 'toolchain'] 01226 columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash'] 01227 table = PrettyTable(columns_text + columns_int) 01228 01229 for col in columns_text: 01230 table.align[col] = 'l' 01231 01232 for col in columns_int: 01233 table.align[col] = 'r' 01234 01235 for target in report: 01236 for toolchain in report[target]: 01237 for name in report[target][toolchain]: 01238 for dlist in report[target][toolchain][name]: 01239 for dlistelem in dlist: 01240 # Get 'memory_usage' record and build table with 01241 # statistics 01242 record = dlist[dlistelem] 01243 if 'memory_usage' in record and record['memory_usage']: 01244 # Note that summary should be in the last record of 01245 # 'memory_usage' section. This is why we are 01246 # grabbing last "[-1]" record. 01247 row = [ 01248 record['description'], 01249 record['target_name'], 01250 record['toolchain_name'], 01251 record['memory_usage'][-1]['summary'][ 01252 'static_ram'], 01253 record['memory_usage'][-1]['summary']['stack'], 01254 record['memory_usage'][-1]['summary']['heap'], 01255 record['memory_usage'][-1]['summary'][ 01256 'total_ram'], 01257 record['memory_usage'][-1]['summary'][ 01258 'total_flash'], 01259 ] 01260 table.add_row(row) 01261 01262 result = "Memory map breakdown for built projects (values in Bytes):\n" 01263 result += table.get_string(sortby='name') 01264 return result 01265 01266 def write_build_report (build_report, template_filename, filename): 01267 """Write a build report to disk using a template file 01268 01269 Positional arguments: 01270 build_report - a report generated by the build system 01271 template_filename - a file that contains the template for the style of build 01272 report 01273 filename - the location on disk to write the file to 01274 """ 01275 build_report_failing = [] 01276 build_report_passing = [] 01277 01278 for report in build_report: 01279 if len(report["failing"]) > 0: 01280 build_report_failing.append(report) 01281 else: 01282 build_report_passing.append(report) 01283 01284 env = Environment(extensions=['jinja2.ext.with_']) 01285 env.loader = FileSystemLoader('ci_templates') 01286 template = env.get_template(template_filename) 01287 01288 with open(filename, 'w+') as placeholder: 01289 placeholder.write(template.render( 01290 failing_builds=build_report_failing, 01291 passing_builds=build_report_passing))
Generated on Tue Jul 12 2022 11:02:21 by
1.7.2