Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
build_api.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2016 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 from __future__ import print_function, division, absolute_import 00018 00019 import re 00020 import tempfile 00021 import datetime 00022 import uuid 00023 from shutil import rmtree 00024 from os.path import join, exists, dirname, basename, abspath, normpath, splitext 00025 from os.path import relpath 00026 from os import linesep, remove, makedirs 00027 from time import time 00028 from intelhex import IntelHex 00029 from json import load, dump 00030 from jinja2 import FileSystemLoader 00031 from jinja2.environment import Environment 00032 00033 from .arm_pack_manager import Cache 00034 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException, 00035 ToolException, InvalidReleaseTargetException, 00036 intelhex_offset) 00037 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES, 00038 MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, 00039 MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, 00040 MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, 00041 BUILD_DIR) 00042 from .targets import TARGET_NAMES, TARGET_MAP 00043 from .libraries import Library 00044 from .toolchains import TOOLCHAIN_CLASSES 00045 from .config import Config 00046 00047 RELEASE_VERSIONS = ['2', '5'] 00048 00049 def prep_report (report, target_name, toolchain_name, id_name): 00050 """Setup report keys 00051 00052 Positional arguments: 00053 report - the report to fill 00054 target_name - the target being used 00055 toolchain_name - the toolchain being used 00056 id_name - the name of the executable or library being built 00057 """ 00058 if not target_name in report: 00059 report[target_name] = {} 00060 00061 if not toolchain_name in report[target_name]: 00062 report[target_name][toolchain_name] = {} 00063 00064 if not id_name in report[target_name][toolchain_name]: 00065 report[target_name][toolchain_name][id_name] = [] 00066 00067 def prep_properties (properties, target_name, toolchain_name, vendor_label): 00068 """Setup test properties 00069 00070 Positional arguments: 00071 properties - the dict to fill 00072 target_name - the target the test is targeting 00073 toolchain_name - the toolchain that will compile the test 00074 vendor_label - the vendor 00075 """ 00076 if not target_name in properties: 00077 properties[target_name] = {} 00078 00079 if not toolchain_name in properties[target_name]: 00080 properties[target_name][toolchain_name] = {} 00081 00082 properties[target_name][toolchain_name]["target"] = target_name 00083 properties[target_name][toolchain_name]["vendor"] = vendor_label 00084 properties[target_name][toolchain_name]["toolchain"] = toolchain_name 00085 00086 def create_result (target_name, toolchain_name, id_name, description): 00087 """Create a result dictionary 00088 00089 Positional arguments: 00090 target_name - the target being built for 00091 toolchain_name - the toolchain doing the building 00092 id_name - the name of the executable or library being built 00093 description - a human readable description of what's going on 00094 """ 00095 cur_result = {} 00096 cur_result["target_name"] = target_name 00097 cur_result["toolchain_name"] = toolchain_name 00098 cur_result["id"] = id_name 00099 cur_result["description"] = description 00100 cur_result["elapsed_time"] = 0 00101 cur_result["output"] = "" 00102 00103 return cur_result 00104 00105 def add_result_to_report (report, result): 00106 """Add a single result to a report dictionary 00107 00108 Positional arguments: 00109 report - the report to append to 00110 result - the result to append 00111 """ 00112 result["date"] = datetime.datetime.utcnow().isoformat() 00113 result["uuid"] = str(uuid.uuid1()) 00114 target = result["target_name"] 00115 toolchain = result["toolchain_name"] 00116 id_name = result['id'] 00117 result_wrap = {0: result} 00118 report[target][toolchain][id_name].append(result_wrap) 00119 00120 def get_config (src_paths, target, toolchain_name): 00121 """Get the configuration object for a target-toolchain combination 00122 00123 Positional arguments: 00124 src_paths - paths to scan for the configuration files 00125 target - the device we are building for 00126 toolchain_name - the string that identifies the build tools 00127 """ 00128 # Convert src_paths to a list if needed 00129 if not isinstance(src_paths, list): 00130 src_paths = [src_paths] 00131 00132 # Pass all params to the unified prepare_resources() 00133 toolchain = prepare_toolchain(src_paths, None, target, toolchain_name) 00134 00135 # Scan src_path for config files 00136 resources = toolchain.scan_resources(src_paths[0]) 00137 for path in src_paths[1:]: 00138 resources.add(toolchain.scan_resources(path)) 00139 00140 # Update configuration files until added features creates no changes 00141 prev_features = set() 00142 while True: 00143 # Update the configuration with any .json files found while scanning 00144 toolchain.config.add_config_files(resources.json_files) 00145 00146 # Add features while we find new ones 00147 features = set(toolchain.config.get_features()) 00148 if features == prev_features: 00149 break 00150 00151 for feature in features: 00152 if feature in resources.features: 00153 resources += resources.features[feature] 00154 00155 prev_features = features 00156 toolchain.config.validate_config() 00157 if toolchain.config.has_regions: 00158 _ = list(toolchain.config.regions) 00159 00160 cfg, macros = toolchain.config.get_config_data() 00161 features = toolchain.config.get_features() 00162 return cfg, macros, features 00163 00164 def is_official_target (target_name, version): 00165 """ Returns True, None if a target is part of the official release for the 00166 given version. Return False, 'reason' if a target is not part of the 00167 official release for the given version. 00168 00169 Positional arguments: 00170 target_name - Name if the target (ex. 'K64F') 00171 version - The release version string. Should be a string contained within 00172 RELEASE_VERSIONS 00173 """ 00174 00175 result = True 00176 reason = None 00177 target = TARGET_MAP[target_name] 00178 00179 if hasattr(target, 'release_versions') \ 00180 and version in target.release_versions: 00181 if version == '2': 00182 # For version 2, either ARM or uARM toolchain support is required 00183 required_toolchains = set(['ARM', 'uARM']) 00184 00185 if not len(required_toolchains.intersection( 00186 set(target.supported_toolchains))) > 0: 00187 result = False 00188 reason = ("Target '%s' must support " % target.name) + \ 00189 ("one of the folowing toolchains to be included in the") + \ 00190 ((" mbed 2.0 official release: %s" + linesep) % 00191 ", ".join(required_toolchains)) + \ 00192 ("Currently it is only configured to support the ") + \ 00193 ("following toolchains: %s" % 00194 ", ".join(target.supported_toolchains)) 00195 00196 elif version == '5': 00197 # For version 5, ARM, GCC_ARM, and IAR toolchain support is required 00198 required_toolchains = set(['ARM', 'GCC_ARM', 'IAR']) 00199 required_toolchains_sorted = list(required_toolchains) 00200 required_toolchains_sorted.sort() 00201 supported_toolchains = set(target.supported_toolchains) 00202 supported_toolchains_sorted = list(supported_toolchains) 00203 supported_toolchains_sorted.sort() 00204 00205 if not required_toolchains.issubset(supported_toolchains): 00206 result = False 00207 reason = ("Target '%s' must support " % target.name) + \ 00208 ("ALL of the folowing toolchains to be included in the") + \ 00209 ((" mbed OS 5.0 official release: %s" + linesep) % 00210 ", ".join(required_toolchains_sorted)) + \ 00211 ("Currently it is only configured to support the ") + \ 00212 ("following toolchains: %s" % 00213 ", ".join(supported_toolchains_sorted)) 00214 00215 elif not target.default_lib == 'std': 00216 result = False 00217 reason = ("Target '%s' must set the " % target.name) + \ 00218 ("'default_lib' to 'std' to be included in the ") + \ 00219 ("mbed OS 5.0 official release." + linesep) + \ 00220 ("Currently it is set to '%s'" % target.default_lib) 00221 00222 else: 00223 result = False 00224 reason = ("Target '%s' has set an invalid release version of '%s'" % 00225 version) + \ 00226 ("Please choose from the following release versions: %s" % 00227 ', '.join(RELEASE_VERSIONS)) 00228 00229 else: 00230 result = False 00231 if not hasattr(target, 'release_versions'): 00232 reason = "Target '%s' " % target.name 00233 reason += "does not have the 'release_versions' key set" 00234 elif not version in target.release_versions: 00235 reason = "Target '%s' does not contain the version '%s' " % \ 00236 (target.name, version) 00237 reason += "in its 'release_versions' key" 00238 00239 return result, reason 00240 00241 def transform_release_toolchains (toolchains, version): 00242 """ Given a list of toolchains and a release version, return a list of 00243 only the supported toolchains for that release 00244 00245 Positional arguments: 00246 toolchains - The list of toolchains 00247 version - The release version string. Should be a string contained within 00248 RELEASE_VERSIONS 00249 """ 00250 if version == '5': 00251 return ['ARM', 'GCC_ARM', 'IAR'] 00252 else: 00253 return toolchains 00254 00255 00256 def get_mbed_official_release (version): 00257 """ Given a release version string, return a tuple that contains a target 00258 and the supported toolchains for that release. 00259 Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), 00260 ('K64F', ('ARM', 'GCC_ARM')), ...) 00261 00262 Positional arguments: 00263 version - The version string. Should be a string contained within 00264 RELEASE_VERSIONS 00265 """ 00266 00267 mbed_official_release = ( 00268 tuple( 00269 tuple( 00270 [ 00271 TARGET_MAP[target].name, 00272 tuple(transform_release_toolchains( 00273 TARGET_MAP[target].supported_toolchains, version)) 00274 ] 00275 ) for target in TARGET_NAMES \ 00276 if (hasattr(TARGET_MAP[target], 'release_versions') 00277 and version in TARGET_MAP[target].release_versions) 00278 ) 00279 ) 00280 00281 for target in mbed_official_release: 00282 is_official, reason = is_official_target(target[0], version) 00283 00284 if not is_official: 00285 raise InvalidReleaseTargetException(reason) 00286 00287 return mbed_official_release 00288 00289 00290 def prepare_toolchain (src_paths, build_dir, target, toolchain_name, 00291 macros=None, clean=False, jobs=1, 00292 notify=None, silent=False, verbose=False, 00293 extra_verbose=False, config=None, 00294 app_config=None, build_profile=None): 00295 """ Prepares resource related objects - toolchain, target, config 00296 00297 Positional arguments: 00298 src_paths - the paths to source directories 00299 target - ['LPC1768', 'LPC11U24', etc.] 00300 toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] 00301 00302 Keyword arguments: 00303 macros - additional macros 00304 clean - Rebuild everything if True 00305 jobs - how many compilers we can run at once 00306 notify - Notify function for logs 00307 silent - suppress printing of progress indicators 00308 verbose - Write the actual tools command lines used if True 00309 extra_verbose - even more output! 00310 config - a Config object to use instead of creating one 00311 app_config - location of a chosen mbed_app.json file 00312 build_profile - a list of mergeable build profiles 00313 """ 00314 00315 # We need to remove all paths which are repeated to avoid 00316 # multiple compilations and linking with the same objects 00317 src_paths = [src_paths[0]] + list(set(src_paths[1:])) 00318 00319 # If the configuration object was not yet created, create it now 00320 config = config or Config(target, src_paths, app_config=app_config) 00321 target = config.target 00322 try: 00323 cur_tc = TOOLCHAIN_CLASSES[toolchain_name] 00324 except KeyError: 00325 raise KeyError("Toolchain %s not supported" % toolchain_name) 00326 00327 profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []} 00328 for contents in build_profile or []: 00329 for key in profile: 00330 profile[key].extend(contents[toolchain_name][key]) 00331 00332 toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir, 00333 extra_verbose=extra_verbose, build_profile=profile) 00334 00335 toolchain.config = config 00336 toolchain.jobs = jobs 00337 toolchain.build_all = clean 00338 toolchain.VERBOSE = verbose 00339 00340 return toolchain 00341 00342 def merge_region_list (region_list, destination, padding=b'\xFF'): 00343 """Merege the region_list into a single image 00344 00345 Positional Arguments: 00346 region_list - list of regions, which should contain filenames 00347 destination - file name to write all regions to 00348 padding - bytes to fill gapps with 00349 """ 00350 merged = IntelHex() 00351 _, format = splitext(destination) 00352 00353 print("Merging Regions:") 00354 00355 for region in region_list: 00356 if region.active and not region.filename: 00357 raise ToolException("Active region has no contents: No file found.") 00358 if region.filename: 00359 print(" Filling region %s with %s" % (region.name, region.filename)) 00360 part = intelhex_offset(region.filename, offset=region.start) 00361 part_size = (part.maxaddr() - part.minaddr()) + 1 00362 if part_size > region.size: 00363 raise ToolException("Contents of region %s does not fit" 00364 % region.name) 00365 merged.merge(part) 00366 pad_size = region.size - part_size 00367 if pad_size > 0 and region != region_list[-1]: 00368 print(" Padding region %s with 0x%x bytes" % (region.name, pad_size)) 00369 if format is ".hex": 00370 """The offset will be in the hex file generated when we're done, 00371 so we can skip padding here""" 00372 else: 00373 merged.puts(merged.maxaddr() + 1, padding * pad_size) 00374 00375 if not exists(dirname(destination)): 00376 makedirs(dirname(destination)) 00377 print("Space used after regions merged: 0x%x" % 00378 (merged.maxaddr() - merged.minaddr() + 1)) 00379 with open(destination, "wb+") as output: 00380 merged.tofile(output, format=format.strip(".")) 00381 00382 def scan_resources (src_paths, toolchain, dependencies_paths=None, 00383 inc_dirs=None, base_path=None, collect_ignores=False): 00384 """ Scan resources using initialized toolcain 00385 00386 Positional arguments 00387 src_paths - the paths to source directories 00388 toolchain - valid toolchain object 00389 dependencies_paths - dependency paths that we should scan for include dirs 00390 inc_dirs - additional include directories which should be added to 00391 the scanner resources 00392 """ 00393 00394 # Scan src_path 00395 resources = toolchain.scan_resources(src_paths[0], base_path=base_path, 00396 collect_ignores=collect_ignores) 00397 for path in src_paths[1:]: 00398 resources.add(toolchain.scan_resources(path, base_path=base_path, 00399 collect_ignores=collect_ignores)) 00400 00401 # Scan dependency paths for include dirs 00402 if dependencies_paths is not None: 00403 for path in dependencies_paths: 00404 lib_resources = toolchain.scan_resources(path) 00405 resources.inc_dirs.extend(lib_resources.inc_dirs) 00406 00407 # Add additional include directories if passed 00408 if inc_dirs: 00409 if isinstance(inc_dirs, list): 00410 resources.inc_dirs.extend(inc_dirs) 00411 else: 00412 resources.inc_dirs.append(inc_dirs) 00413 00414 # Load resources into the config system which might expand/modify resources 00415 # based on config data 00416 resources = toolchain.config.load_resources(resources) 00417 00418 # Set the toolchain's configuration data 00419 toolchain.set_config_data(toolchain.config.get_config_data()) 00420 00421 if (hasattr(toolchain.target, "release_versions") and 00422 "5" not in toolchain.target.release_versions and 00423 "rtos" in toolchain.config.lib_config_data): 00424 raise NotSupportedException("Target does not support mbed OS 5") 00425 00426 return resources 00427 00428 def build_project (src_paths, build_path, target, toolchain_name, 00429 libraries_paths=None, linker_script=None, 00430 clean=False, notify=None, verbose=False, name=None, 00431 macros=None, inc_dirs=None, jobs=1, silent=False, 00432 report=None, properties=None, project_id=None, 00433 project_description=None, extra_verbose=False, config=None, 00434 app_config=None, build_profile=None, stats_depth=None): 00435 """ Build a project. A project may be a test or a user program. 00436 00437 Positional arguments: 00438 src_paths - a path or list of paths that contain all files needed to build 00439 the project 00440 build_path - the directory where all of the object files will be placed 00441 target - the MCU or board that the project will compile for 00442 toolchain_name - the name of the build tools 00443 00444 Keyword arguments: 00445 libraries_paths - The location of libraries to include when linking 00446 linker_script - the file that drives the linker to do it's job 00447 clean - Rebuild everything if True 00448 notify - Notify function for logs 00449 verbose - Write the actual tools command lines used if True 00450 name - the name of the project 00451 macros - additional macros 00452 inc_dirs - additional directories where include files may be found 00453 jobs - how many compilers we can run at once 00454 silent - suppress printing of progress indicators 00455 report - a dict where a result may be appended 00456 properties - UUUUHHHHH beats me 00457 project_id - the name put in the report 00458 project_description - the human-readable version of what this thing does 00459 extra_verbose - even more output! 00460 config - a Config object to use instead of creating one 00461 app_config - location of a chosen mbed_app.json file 00462 build_profile - a dict of flags that will be passed to the compiler 00463 stats_depth - depth level for memap to display file/dirs 00464 """ 00465 00466 # Convert src_path to a list if needed 00467 if not isinstance(src_paths, list): 00468 src_paths = [src_paths] 00469 # Extend src_paths wiht libraries_paths 00470 if libraries_paths is not None: 00471 src_paths.extend(libraries_paths) 00472 inc_dirs.extend(map(dirname, libraries_paths)) 00473 00474 if clean and exists(build_path): 00475 rmtree(build_path) 00476 mkdir(build_path) 00477 00478 toolchain = prepare_toolchain( 00479 src_paths, build_path, target, toolchain_name, macros=macros, 00480 clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, 00481 extra_verbose=extra_verbose, config=config, app_config=app_config, 00482 build_profile=build_profile) 00483 00484 # The first path will give the name to the library 00485 name = (name or toolchain.config.name or 00486 basename(normpath(abspath(src_paths[0])))) 00487 toolchain.info("Building project %s (%s, %s)" % 00488 (name, toolchain.target.name, toolchain_name)) 00489 00490 # Initialize reporting 00491 if report != None: 00492 start = time() 00493 # If project_id is specified, use that over the default name 00494 id_name = project_id.upper() if project_id else name.upper() 00495 description = project_description if project_description else name 00496 vendor_label = toolchain.target.extra_labels[0] 00497 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00498 cur_result = create_result(toolchain.target.name, toolchain_name, 00499 id_name, description) 00500 if properties != None: 00501 prep_properties(properties, toolchain.target.name, toolchain_name, 00502 vendor_label) 00503 00504 try: 00505 # Call unified scan_resources 00506 resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) 00507 00508 # Change linker script if specified 00509 if linker_script is not None: 00510 resources.linker_script = linker_script 00511 00512 # Compile Sources 00513 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00514 resources.objects.extend(objects) 00515 00516 # Link Program 00517 if toolchain.config.has_regions: 00518 res, _ = toolchain.link_program(resources, build_path, name + "_application") 00519 region_list = list(toolchain.config.regions) 00520 region_list = [r._replace(filename=res) if r.active else r 00521 for r in region_list] 00522 res = "%s.%s" % (join(build_path, name), 00523 getattr(toolchain.target, "OUTPUT_EXT", "bin")) 00524 merge_region_list(region_list, res) 00525 else: 00526 res, _ = toolchain.link_program(resources, build_path, name) 00527 00528 memap_instance = getattr(toolchain, 'memap_instance', None) 00529 memap_table = '' 00530 if memap_instance: 00531 real_stats_depth = stats_depth if stats_depth is not None else 2 00532 memap_table = memap_instance.generate_output('table', real_stats_depth) 00533 if not silent: 00534 if not stats_depth: 00535 memap_bars = memap_instance.generate_output('bars', 00536 real_stats_depth, None, 00537 getattr(toolchain.target, 'device_name', None)) 00538 print(memap_bars) 00539 else: 00540 print(memap_table) 00541 00542 # Write output to file in JSON format 00543 map_out = join(build_path, name + "_map.json") 00544 memap_instance.generate_output('json', real_stats_depth, map_out) 00545 00546 # Write output to file in CSV format for the CI 00547 map_csv = join(build_path, name + "_map.csv") 00548 memap_instance.generate_output('csv-ci', real_stats_depth, map_csv) 00549 00550 resources.detect_duplicates(toolchain) 00551 00552 if report != None: 00553 end = time() 00554 cur_result["elapsed_time"] = end - start 00555 cur_result["output"] = toolchain.get_output() + memap_table 00556 cur_result["result"] = "OK" 00557 cur_result["memory_usage"] = (memap_instance.mem_report 00558 if memap_instance is not None else None) 00559 cur_result["bin"] = res 00560 cur_result["elf"] = splitext(res)[0] + ".elf" 00561 cur_result.update(toolchain.report) 00562 00563 add_result_to_report(report, cur_result) 00564 00565 return res 00566 00567 except Exception as exc: 00568 if report != None: 00569 end = time() 00570 00571 if isinstance(exc, NotSupportedException): 00572 cur_result["result"] = "NOT_SUPPORTED" 00573 else: 00574 cur_result["result"] = "FAIL" 00575 00576 cur_result["elapsed_time"] = end - start 00577 00578 toolchain_output = toolchain.get_output() 00579 if toolchain_output: 00580 cur_result["output"] += toolchain_output 00581 00582 add_result_to_report(report, cur_result) 00583 00584 # Let Exception propagate 00585 raise 00586 00587 def build_library (src_paths, build_path, target, toolchain_name, 00588 dependencies_paths=None, name=None, clean=False, 00589 archive=True, notify=None, verbose=False, macros=None, 00590 inc_dirs=None, jobs=1, silent=False, report=None, 00591 properties=None, extra_verbose=False, project_id=None, 00592 remove_config_header_file=False, app_config=None, 00593 build_profile=None): 00594 """ Build a library 00595 00596 Positional arguments: 00597 src_paths - a path or list of paths that contain all files needed to build 00598 the library 00599 build_path - the directory where all of the object files will be placed 00600 target - the MCU or board that the project will compile for 00601 toolchain_name - the name of the build tools 00602 00603 Keyword arguments: 00604 dependencies_paths - The location of libraries to include when linking 00605 name - the name of the library 00606 clean - Rebuild everything if True 00607 archive - whether the library will create an archive file 00608 notify - Notify function for logs 00609 verbose - Write the actual tools command lines used if True 00610 macros - additional macros 00611 inc_dirs - additional directories where include files may be found 00612 jobs - how many compilers we can run at once 00613 silent - suppress printing of progress indicators 00614 report - a dict where a result may be appended 00615 properties - UUUUHHHHH beats me 00616 extra_verbose - even more output! 00617 project_id - the name that goes in the report 00618 remove_config_header_file - delete config header file when done building 00619 app_config - location of a chosen mbed_app.json file 00620 build_profile - a dict of flags that will be passed to the compiler 00621 """ 00622 00623 # Convert src_path to a list if needed 00624 if not isinstance(src_paths, list): 00625 src_paths = [src_paths] 00626 00627 # Build path 00628 if archive: 00629 # Use temp path when building archive 00630 tmp_path = join(build_path, '.temp') 00631 mkdir(tmp_path) 00632 else: 00633 tmp_path = build_path 00634 00635 # Clean the build directory 00636 if clean and exists(tmp_path): 00637 rmtree(tmp_path) 00638 mkdir(tmp_path) 00639 00640 # Pass all params to the unified prepare_toolchain() 00641 toolchain = prepare_toolchain( 00642 src_paths, build_path, target, toolchain_name, macros=macros, 00643 clean=clean, jobs=jobs, notify=notify, silent=silent, 00644 verbose=verbose, extra_verbose=extra_verbose, app_config=app_config, 00645 build_profile=build_profile) 00646 00647 # The first path will give the name to the library 00648 if name is None: 00649 name = basename(normpath(abspath(src_paths[0]))) 00650 toolchain.info("Building library %s (%s, %s)" % 00651 (name, toolchain.target.name, toolchain_name)) 00652 00653 # Initialize reporting 00654 if report != None: 00655 start = time() 00656 # If project_id is specified, use that over the default name 00657 id_name = project_id.upper() if project_id else name.upper() 00658 description = name 00659 vendor_label = toolchain.target.extra_labels[0] 00660 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00661 cur_result = create_result(toolchain.target.name, toolchain_name, 00662 id_name, description) 00663 cur_result['type'] = 'library' 00664 if properties != None: 00665 prep_properties(properties, toolchain.target.name, toolchain_name, 00666 vendor_label) 00667 00668 for src_path in src_paths: 00669 if not exists(src_path): 00670 error_msg = "The library source folder does not exist: %s", src_path 00671 if report != None: 00672 cur_result["output"] = error_msg 00673 cur_result["result"] = "FAIL" 00674 add_result_to_report(report, cur_result) 00675 raise Exception(error_msg) 00676 00677 try: 00678 # Call unified scan_resources 00679 resources = scan_resources(src_paths, toolchain, 00680 dependencies_paths=dependencies_paths, 00681 inc_dirs=inc_dirs) 00682 00683 00684 # Copy headers, objects and static libraries - all files needed for 00685 # static lib 00686 toolchain.copy_files(resources.headers, build_path, resources=resources) 00687 toolchain.copy_files(resources.objects, build_path, resources=resources) 00688 toolchain.copy_files(resources.libraries, build_path, 00689 resources=resources) 00690 toolchain.copy_files(resources.json_files, build_path, 00691 resources=resources) 00692 if resources.linker_script: 00693 toolchain.copy_files(resources.linker_script, build_path, 00694 resources=resources) 00695 00696 if resources.hex_files: 00697 toolchain.copy_files(resources.hex_files, build_path, 00698 resources=resources) 00699 00700 # Compile Sources 00701 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00702 resources.objects.extend(objects) 00703 00704 if archive: 00705 toolchain.build_library(objects, build_path, name) 00706 00707 if remove_config_header_file: 00708 config_header_path = toolchain.get_config_header() 00709 if config_header_path: 00710 remove(config_header_path) 00711 00712 if report != None: 00713 end = time() 00714 cur_result["elapsed_time"] = end - start 00715 cur_result["output"] = toolchain.get_output() 00716 cur_result["result"] = "OK" 00717 00718 00719 add_result_to_report(report, cur_result) 00720 return True 00721 00722 except Exception as exc: 00723 if report != None: 00724 end = time() 00725 00726 if isinstance(exc, ToolException): 00727 cur_result["result"] = "FAIL" 00728 elif isinstance(exc, NotSupportedException): 00729 cur_result["result"] = "NOT_SUPPORTED" 00730 00731 cur_result["elapsed_time"] = end - start 00732 00733 toolchain_output = toolchain.get_output() 00734 if toolchain_output: 00735 cur_result["output"] += toolchain_output 00736 00737 add_result_to_report(report, cur_result) 00738 00739 # Let Exception propagate 00740 raise 00741 00742 ###################### 00743 ### Legacy methods ### 00744 ###################### 00745 00746 def mbed2_obj_path(target_name, toolchain_name): 00747 real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__ 00748 return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name) 00749 00750 def build_lib (lib_id, target, toolchain_name, verbose=False, 00751 clean=False, macros=None, notify=None, jobs=1, silent=False, 00752 report=None, properties=None, extra_verbose=False, 00753 build_profile=None): 00754 """ Legacy method for building mbed libraries 00755 00756 Positional arguments: 00757 lib_id - the library's unique identifier 00758 target - the MCU or board that the project will compile for 00759 toolchain_name - the name of the build tools 00760 00761 Keyword arguments: 00762 clean - Rebuild everything if True 00763 verbose - Write the actual tools command lines used if True 00764 macros - additional macros 00765 notify - Notify function for logs 00766 jobs - how many compilers we can run at once 00767 silent - suppress printing of progress indicators 00768 report - a dict where a result may be appended 00769 properties - UUUUHHHHH beats me 00770 extra_verbose - even more output! 00771 build_profile - a dict of flags that will be passed to the compiler 00772 """ 00773 lib = Library(lib_id) 00774 if not lib.is_supported(target, toolchain_name): 00775 print('Library "%s" is not yet supported on target %s with toolchain %s' 00776 % (lib_id, target.name, toolchain_name)) 00777 return False 00778 00779 # We need to combine macros from parameter list with macros from library 00780 # definition 00781 lib_macros = lib.macros if lib.macros else [] 00782 if macros: 00783 macros.extend(lib_macros) 00784 else: 00785 macros = lib_macros 00786 00787 src_paths = lib.source_dir 00788 build_path = lib.build_dir 00789 dependencies_paths = lib.dependencies 00790 inc_dirs = lib.inc_dirs 00791 inc_dirs_ext = lib.inc_dirs_ext 00792 00793 if not isinstance(src_paths, list): 00794 src_paths = [src_paths] 00795 00796 # The first path will give the name to the library 00797 name = basename(src_paths[0]) 00798 00799 if report != None: 00800 start = time() 00801 id_name = name.upper() 00802 description = name 00803 vendor_label = target.extra_labels[0] 00804 cur_result = None 00805 prep_report(report, target.name, toolchain_name, id_name) 00806 cur_result = create_result(target.name, toolchain_name, id_name, 00807 description) 00808 00809 if properties != None: 00810 prep_properties(properties, target.name, toolchain_name, 00811 vendor_label) 00812 00813 for src_path in src_paths: 00814 if not exists(src_path): 00815 error_msg = "The library source folder does not exist: %s", src_path 00816 00817 if report != None: 00818 cur_result["output"] = error_msg 00819 cur_result["result"] = "FAIL" 00820 add_result_to_report(report, cur_result) 00821 00822 raise Exception(error_msg) 00823 00824 try: 00825 # Toolchain instance 00826 # Create the desired build directory structure 00827 bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name)) 00828 mkdir(bin_path) 00829 tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name, 00830 toolchain_name)) 00831 mkdir(tmp_path) 00832 00833 toolchain = prepare_toolchain( 00834 src_paths, tmp_path, target, toolchain_name, macros=macros, 00835 notify=notify, silent=silent, extra_verbose=extra_verbose, 00836 build_profile=build_profile, jobs=jobs, clean=clean) 00837 00838 toolchain.info("Building library %s (%s, %s)" % 00839 (name.upper(), target.name, toolchain_name)) 00840 00841 # Take into account the library configuration (MBED_CONFIG_FILE) 00842 config = toolchain.config 00843 config.add_config_files([MBED_CONFIG_FILE]) 00844 00845 # Scan Resources 00846 resources = [] 00847 for src_path in src_paths: 00848 resources.append(toolchain.scan_resources(src_path)) 00849 00850 # Add extra include directories / files which are required by library 00851 # This files usually are not in the same directory as source files so 00852 # previous scan will not include them 00853 if inc_dirs_ext is not None: 00854 for inc_ext in inc_dirs_ext: 00855 resources.append(toolchain.scan_resources(inc_ext)) 00856 00857 # Dependencies Include Paths 00858 dependencies_include_dir = [] 00859 if dependencies_paths is not None: 00860 for path in dependencies_paths: 00861 lib_resources = toolchain.scan_resources(path) 00862 dependencies_include_dir.extend(lib_resources.inc_dirs) 00863 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) 00864 00865 if inc_dirs: 00866 dependencies_include_dir.extend(inc_dirs) 00867 00868 # Add other discovered configuration data to the configuration object 00869 for res in resources: 00870 config.load_resources(res) 00871 toolchain.set_config_data(toolchain.config.get_config_data()) 00872 00873 00874 # Copy Headers 00875 for resource in resources: 00876 toolchain.copy_files(resource.headers, build_path, 00877 resources=resource) 00878 00879 dependencies_include_dir.extend( 00880 toolchain.scan_resources(build_path).inc_dirs) 00881 00882 # Compile Sources 00883 objects = [] 00884 for resource in resources: 00885 objects.extend(toolchain.compile_sources(resource, dependencies_include_dir)) 00886 00887 needed_update = toolchain.build_library(objects, bin_path, name) 00888 00889 if report != None and needed_update: 00890 end = time() 00891 cur_result["elapsed_time"] = end - start 00892 cur_result["output"] = toolchain.get_output() 00893 cur_result["result"] = "OK" 00894 00895 add_result_to_report(report, cur_result) 00896 return True 00897 00898 except Exception: 00899 if report != None: 00900 end = time() 00901 cur_result["result"] = "FAIL" 00902 cur_result["elapsed_time"] = end - start 00903 00904 toolchain_output = toolchain.get_output() 00905 if toolchain_output: 00906 cur_result["output"] += toolchain_output 00907 00908 add_result_to_report(report, cur_result) 00909 00910 # Let Exception propagate 00911 raise 00912 00913 # We do have unique legacy conventions about how we build and package the mbed 00914 # library 00915 def build_mbed_libs (target, toolchain_name, verbose=False, 00916 clean=False, macros=None, notify=None, jobs=1, silent=False, 00917 report=None, properties=None, extra_verbose=False, 00918 build_profile=None): 00919 """ Function returns True is library was built and false if building was 00920 skipped 00921 00922 Positional arguments: 00923 target - the MCU or board that the project will compile for 00924 toolchain_name - the name of the build tools 00925 00926 Keyword arguments: 00927 verbose - Write the actual tools command lines used if True 00928 clean - Rebuild everything if True 00929 macros - additional macros 00930 notify - Notify function for logs 00931 jobs - how many compilers we can run at once 00932 silent - suppress printing of progress indicators 00933 report - a dict where a result may be appended 00934 properties - UUUUHHHHH beats me 00935 extra_verbose - even more output! 00936 build_profile - a dict of flags that will be passed to the compiler 00937 """ 00938 00939 if report != None: 00940 start = time() 00941 id_name = "MBED" 00942 description = "mbed SDK" 00943 vendor_label = target.extra_labels[0] 00944 cur_result = None 00945 prep_report(report, target.name, toolchain_name, id_name) 00946 cur_result = create_result(target.name, toolchain_name, id_name, 00947 description) 00948 00949 if properties != None: 00950 prep_properties(properties, target.name, toolchain_name, 00951 vendor_label) 00952 00953 # Check toolchain support 00954 if toolchain_name not in target.supported_toolchains: 00955 supported_toolchains_text = ", ".join(target.supported_toolchains) 00956 print('%s target is not yet supported by toolchain %s' % 00957 (target.name, toolchain_name)) 00958 print('%s target supports %s toolchain%s' % 00959 (target.name, supported_toolchains_text, 's' 00960 if len(target.supported_toolchains) > 1 else '')) 00961 00962 if report != None: 00963 cur_result["result"] = "SKIP" 00964 add_result_to_report(report, cur_result) 00965 00966 return False 00967 00968 try: 00969 # Source and Build Paths 00970 build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) 00971 build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) 00972 mkdir(build_toolchain) 00973 00974 # Toolchain 00975 tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) 00976 mkdir(tmp_path) 00977 00978 toolchain = prepare_toolchain( 00979 [""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose, 00980 notify=notify, silent=silent, extra_verbose=extra_verbose, 00981 build_profile=build_profile, jobs=jobs, clean=clean) 00982 00983 # Take into account the library configuration (MBED_CONFIG_FILE) 00984 config = toolchain.config 00985 config.add_config_files([MBED_CONFIG_FILE]) 00986 toolchain.set_config_data(toolchain.config.get_config_data()) 00987 00988 # mbed 00989 toolchain.info("Building library %s (%s, %s)" % 00990 ('MBED', target.name, toolchain_name)) 00991 00992 # Common Headers 00993 toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) 00994 library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] 00995 00996 for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), 00997 (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), 00998 (MBED_HAL, MBED_LIBRARIES_HAL)]: 00999 resources = toolchain.scan_resources(dir) 01000 toolchain.copy_files(resources.headers, dest) 01001 library_incdirs.append(dest) 01002 01003 cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH) 01004 toolchain.copy_files(cmsis_implementation.headers, build_target) 01005 toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain) 01006 toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain) 01007 01008 hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH) 01009 toolchain.copy_files(hal_implementation.headers + 01010 hal_implementation.hex_files + 01011 hal_implementation.libraries + 01012 [MBED_CONFIG_FILE], 01013 build_target, resources=hal_implementation) 01014 toolchain.copy_files(hal_implementation.linker_script, build_toolchain) 01015 toolchain.copy_files(hal_implementation.bin_files, build_toolchain) 01016 incdirs = toolchain.scan_resources(build_target).inc_dirs 01017 objects = toolchain.compile_sources(cmsis_implementation + hal_implementation, 01018 library_incdirs + incdirs + [tmp_path]) 01019 toolchain.copy_files(objects, build_toolchain) 01020 01021 # Common Sources 01022 mbed_resources = None 01023 for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: 01024 mbed_resources += toolchain.scan_resources(dir) 01025 01026 objects = toolchain.compile_sources(mbed_resources, 01027 library_incdirs + incdirs) 01028 01029 # A number of compiled files need to be copied as objects as opposed to 01030 # way the linker search for symbols in archives. These are: 01031 # - mbed_retarget.o: to make sure that the C standard lib symbols get 01032 # overridden 01033 # - mbed_board.o: mbed_die is weak 01034 # - mbed_overrides.o: this contains platform overrides of various 01035 # weak SDK functions 01036 # - mbed_main.o: this contains main redirection 01037 separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o', 01038 'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], [] 01039 01040 for obj in objects: 01041 for name in separate_names: 01042 if obj.endswith(name): 01043 separate_objects.append(obj) 01044 01045 for obj in separate_objects: 01046 objects.remove(obj) 01047 01048 toolchain.build_library(objects, build_toolchain, "mbed") 01049 01050 for obj in separate_objects: 01051 toolchain.copy_files(obj, build_toolchain) 01052 01053 if report != None: 01054 end = time() 01055 cur_result["elapsed_time"] = end - start 01056 cur_result["output"] = toolchain.get_output() 01057 cur_result["result"] = "OK" 01058 01059 add_result_to_report(report, cur_result) 01060 01061 return True 01062 01063 except Exception as exc: 01064 if report != None: 01065 end = time() 01066 cur_result["result"] = "FAIL" 01067 cur_result["elapsed_time"] = end - start 01068 01069 toolchain_output = toolchain.get_output() 01070 if toolchain_output: 01071 cur_result["output"] += toolchain_output 01072 01073 cur_result["output"] += str(exc) 01074 01075 add_result_to_report(report, cur_result) 01076 01077 # Let Exception propagate 01078 raise 01079 01080 01081 def get_unique_supported_toolchains (release_targets=None): 01082 """ Get list of all unique toolchains supported by targets 01083 01084 Keyword arguments: 01085 release_targets - tuple structure returned from get_mbed_official_release(). 01086 If release_targets is not specified, then it queries all 01087 known targets 01088 """ 01089 unique_supported_toolchains = [] 01090 01091 if not release_targets: 01092 for target in TARGET_NAMES: 01093 for toolchain in TARGET_MAP[target].supported_toolchains: 01094 if toolchain not in unique_supported_toolchains: 01095 unique_supported_toolchains.append(toolchain) 01096 else: 01097 for target in release_targets: 01098 for toolchain in target[1]: 01099 if toolchain not in unique_supported_toolchains: 01100 unique_supported_toolchains.append(toolchain) 01101 01102 if "ARM" in unique_supported_toolchains: 01103 unique_supported_toolchains.append("ARMC6") 01104 01105 return unique_supported_toolchains 01106 01107 def mcu_toolchain_list (release_version='5'): 01108 """ Shows list of toolchains 01109 01110 """ 01111 01112 if isinstance(release_version, basestring): 01113 # Force release_version to lowercase if it is a string 01114 release_version = release_version.lower() 01115 else: 01116 # Otherwise default to printing all known targets and toolchains 01117 release_version = 'all' 01118 01119 01120 version_release_targets = {} 01121 version_release_target_names = {} 01122 01123 for version in RELEASE_VERSIONS: 01124 version_release_targets[version] = get_mbed_official_release(version) 01125 version_release_target_names[version] = [x[0] for x in 01126 version_release_targets[ 01127 version]] 01128 01129 if release_version in RELEASE_VERSIONS: 01130 release_targets = version_release_targets[release_version] 01131 else: 01132 release_targets = None 01133 01134 unique_supported_toolchains = get_unique_supported_toolchains( 01135 release_targets) 01136 columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains 01137 return "\n".join(columns) 01138 01139 01140 def mcu_target_list (release_version='5'): 01141 """ Shows target list 01142 01143 """ 01144 01145 if isinstance(release_version, basestring): 01146 # Force release_version to lowercase if it is a string 01147 release_version = release_version.lower() 01148 else: 01149 # Otherwise default to printing all known targets and toolchains 01150 release_version = 'all' 01151 01152 01153 version_release_targets = {} 01154 version_release_target_names = {} 01155 01156 for version in RELEASE_VERSIONS: 01157 version_release_targets[version] = get_mbed_official_release(version) 01158 version_release_target_names[version] = [x[0] for x in 01159 version_release_targets[ 01160 version]] 01161 01162 if release_version in RELEASE_VERSIONS: 01163 release_targets = version_release_targets[release_version] 01164 else: 01165 release_targets = None 01166 01167 target_names = [] 01168 01169 if release_targets: 01170 target_names = [x[0] for x in release_targets] 01171 else: 01172 target_names = TARGET_NAMES 01173 01174 return "\n".join(target_names) 01175 01176 01177 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None, 01178 release_version='5'): 01179 """ Shows target map using prettytable 01180 01181 Keyword arguments: 01182 verbose_html - emit html instead of a simple table 01183 platform_filter - remove results that match the string 01184 release_version - get the matrix for this major version number 01185 """ 01186 # Only use it in this function so building works without extra modules 01187 from prettytable import PrettyTable 01188 01189 if isinstance(release_version, basestring): 01190 # Force release_version to lowercase if it is a string 01191 release_version = release_version.lower() 01192 else: 01193 # Otherwise default to printing all known targets and toolchains 01194 release_version = 'all' 01195 01196 01197 version_release_targets = {} 01198 version_release_target_names = {} 01199 01200 for version in RELEASE_VERSIONS: 01201 version_release_targets[version] = get_mbed_official_release(version) 01202 version_release_target_names[version] = [x[0] for x in 01203 version_release_targets[ 01204 version]] 01205 01206 if release_version in RELEASE_VERSIONS: 01207 release_targets = version_release_targets[release_version] 01208 else: 01209 release_targets = None 01210 01211 unique_supported_toolchains = get_unique_supported_toolchains( 01212 release_targets) 01213 prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS] 01214 01215 # All tests status table print 01216 columns = prepend_columns + unique_supported_toolchains 01217 table_printer = PrettyTable(columns) 01218 # Align table 01219 for col in columns: 01220 table_printer.align[col] = "c" 01221 table_printer.align["Target"] = "l" 01222 01223 perm_counter = 0 01224 target_counter = 0 01225 01226 target_names = [] 01227 01228 if release_targets: 01229 target_names = [x[0] for x in release_targets] 01230 else: 01231 target_names = TARGET_NAMES 01232 01233 for target in sorted(target_names): 01234 if platform_filter is not None: 01235 # FIlter out platforms using regex 01236 if re.search(platform_filter, target) is None: 01237 continue 01238 target_counter += 1 01239 01240 row = [target] # First column is platform name 01241 01242 for version in RELEASE_VERSIONS: 01243 if target in version_release_target_names[version]: 01244 text = "Supported" 01245 else: 01246 text = "-" 01247 row.append(text) 01248 01249 for unique_toolchain in unique_supported_toolchains: 01250 if (unique_toolchain in TARGET_MAP[target].supported_toolchains or 01251 (unique_toolchain == "ARMC6" and 01252 "ARM" in TARGET_MAP[target].supported_toolchains)): 01253 text = "Supported" 01254 perm_counter += 1 01255 else: 01256 text = "-" 01257 01258 row.append(text) 01259 table_printer.add_row(row) 01260 01261 result = table_printer.get_html_string() if verbose_html \ 01262 else table_printer.get_string() 01263 result += "\n" 01264 result += "Supported targets: %d\n"% (target_counter) 01265 if target_counter == 1: 01266 result += "Supported toolchains: %d"% (perm_counter) 01267 return result 01268 01269 01270 def get_target_supported_toolchains (target): 01271 """ Returns target supported toolchains list 01272 01273 Positional arguments: 01274 target - the target to get the supported toolchains of 01275 """ 01276 return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \ 01277 else None 01278 01279 01280 def print_build_results (result_list, build_name): 01281 """ Generate result string for build results 01282 01283 Positional arguments: 01284 result_list - the list of results to print 01285 build_name - the name of the build we are printing result for 01286 """ 01287 result = "" 01288 if len(result_list) > 0: 01289 result += build_name + "\n" 01290 result += "\n".join([" * %s" % f for f in result_list]) 01291 result += "\n" 01292 return result 01293 01294 def print_build_memory_usage (report): 01295 """ Generate result table with memory usage values for build results 01296 Aggregates (puts together) reports obtained from self.get_memory_summary() 01297 01298 Positional arguments: 01299 report - Report generated during build procedure. 01300 """ 01301 from prettytable import PrettyTable 01302 columns_text = ['name', 'target', 'toolchain'] 01303 columns_int = ['static_ram', 'total_flash'] 01304 table = PrettyTable(columns_text + columns_int) 01305 01306 for col in columns_text: 01307 table.align[col] = 'l' 01308 01309 for col in columns_int: 01310 table.align[col] = 'r' 01311 01312 for target in report: 01313 for toolchain in report[target]: 01314 for name in report[target][toolchain]: 01315 for dlist in report[target][toolchain][name]: 01316 for dlistelem in dlist: 01317 # Get 'memory_usage' record and build table with 01318 # statistics 01319 record = dlist[dlistelem] 01320 if 'memory_usage' in record and record['memory_usage']: 01321 # Note that summary should be in the last record of 01322 # 'memory_usage' section. This is why we are 01323 # grabbing last "[-1]" record. 01324 row = [ 01325 record['description'], 01326 record['target_name'], 01327 record['toolchain_name'], 01328 record['memory_usage'][-1]['summary'][ 01329 'static_ram'], 01330 record['memory_usage'][-1]['summary'][ 01331 'total_flash'], 01332 ] 01333 table.add_row(row) 01334 01335 result = "Memory map breakdown for built projects (values in Bytes):\n" 01336 result += table.get_string(sortby='name') 01337 return result 01338 01339 def write_build_report (build_report, template_filename, filename): 01340 """Write a build report to disk using a template file 01341 01342 Positional arguments: 01343 build_report - a report generated by the build system 01344 template_filename - a file that contains the template for the style of build 01345 report 01346 filename - the location on disk to write the file to 01347 """ 01348 build_report_failing = [] 01349 build_report_passing = [] 01350 01351 for report in build_report: 01352 if len(report["failing"]) > 0: 01353 build_report_failing.append(report) 01354 else: 01355 build_report_passing.append(report) 01356 01357 env = Environment(extensions=['jinja2.ext.with_']) 01358 env.loader = FileSystemLoader('ci_templates') 01359 template = env.get_template(template_filename) 01360 01361 with open(filename, 'w+') as placeholder: 01362 placeholder.write(template.render( 01363 failing_builds=build_report_failing, 01364 passing_builds=build_report_passing)) 01365 01366 01367 def merge_build_data(filename, toolchain_report, app_type): 01368 path_to_file = dirname(abspath(filename)) 01369 try: 01370 build_data = load(open(filename)) 01371 except (IOError, ValueError): 01372 build_data = {'builds': []} 01373 for tgt in toolchain_report.values(): 01374 for tc in tgt.values(): 01375 for project in tc.values(): 01376 for build in project: 01377 try: 01378 build[0]['elf'] = relpath(build[0]['elf'], path_to_file) 01379 build[0]['bin'] = relpath(build[0]['bin'], path_to_file) 01380 except KeyError: 01381 pass 01382 if 'type' not in build[0]: 01383 build[0]['type'] = app_type 01384 build_data['builds'].append(build[0]) 01385 dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))
Generated on Tue Jul 12 2022 13:24:33 by
1.7.2