Marco Zecchini
/
Example_RTOS
Rtos API example
Embed:
(wiki syntax)
Show/hide line numbers
build_api.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2016 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import tempfile 00020 import datetime 00021 import uuid 00022 from types import ListType 00023 from shutil import rmtree 00024 from os.path import join, exists, dirname, basename, abspath, normpath, splitext 00025 from os.path import relpath 00026 from os import linesep, remove, makedirs 00027 from time import time 00028 from intelhex import IntelHex 00029 from json import load, dump 00030 00031 from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\ 00032 ToolException, InvalidReleaseTargetException, intelhex_offset 00033 from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\ 00034 MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\ 00035 MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\ 00036 BUILD_DIR 00037 from tools.targets import TARGET_NAMES, TARGET_MAP 00038 from tools.libraries import Library 00039 from tools.toolchains import TOOLCHAIN_CLASSES 00040 from jinja2 import FileSystemLoader 00041 from jinja2.environment import Environment 00042 from tools.config import Config 00043 00044 RELEASE_VERSIONS = ['2', '5'] 00045 00046 def prep_report (report, target_name, toolchain_name, id_name): 00047 """Setup report keys 00048 00049 Positional arguments: 00050 report - the report to fill 00051 target_name - the target being used 00052 toolchain_name - the toolchain being used 00053 id_name - the name of the executable or library being built 00054 """ 00055 if not target_name in report: 00056 report[target_name] = {} 00057 00058 if not toolchain_name in report[target_name]: 00059 report[target_name][toolchain_name] = {} 00060 00061 if not id_name in report[target_name][toolchain_name]: 00062 report[target_name][toolchain_name][id_name] = [] 00063 00064 def prep_properties (properties, target_name, toolchain_name, vendor_label): 00065 """Setup test properties 00066 00067 Positional arguments: 00068 properties - the dict to fill 00069 target_name - the target the test is targeting 00070 toolchain_name - the toolchain that will compile the test 00071 vendor_label - the vendor 00072 """ 00073 if not target_name in properties: 00074 properties[target_name] = {} 00075 00076 if not toolchain_name in properties[target_name]: 00077 properties[target_name][toolchain_name] = {} 00078 00079 properties[target_name][toolchain_name]["target"] = target_name 00080 properties[target_name][toolchain_name]["vendor"] = vendor_label 00081 properties[target_name][toolchain_name]["toolchain"] = toolchain_name 00082 00083 def create_result (target_name, toolchain_name, id_name, description): 00084 """Create a result dictionary 00085 00086 Positional arguments: 00087 target_name - the target being built for 00088 toolchain_name - the toolchain doing the building 00089 id_name - the name of the executable or library being built 00090 description - a human readable description of what's going on 00091 """ 00092 cur_result = {} 00093 cur_result["target_name"] = target_name 00094 cur_result["toolchain_name"] = toolchain_name 00095 cur_result["id"] = id_name 00096 cur_result["description"] = description 00097 cur_result["elapsed_time"] = 0 00098 cur_result["output"] = "" 00099 00100 return cur_result 00101 00102 def add_result_to_report (report, result): 00103 """Add a single result to a report dictionary 00104 00105 Positional arguments: 00106 report - the report to append to 00107 result - the result to append 00108 """ 00109 result["date"] = datetime.datetime.utcnow().isoformat() 00110 result["uuid"] = str(uuid.uuid1()) 00111 target = result["target_name"] 00112 toolchain = result["toolchain_name"] 00113 id_name = result['id'] 00114 result_wrap = {0: result} 00115 report[target][toolchain][id_name].append(result_wrap) 00116 00117 def get_config (src_paths, target, toolchain_name): 00118 """Get the configuration object for a target-toolchain combination 00119 00120 Positional arguments: 00121 src_paths - paths to scan for the configuration files 00122 target - the device we are building for 00123 toolchain_name - the string that identifies the build tools 00124 """ 00125 # Convert src_paths to a list if needed 00126 if type(src_paths) != ListType: 00127 src_paths = [src_paths] 00128 00129 # Pass all params to the unified prepare_resources() 00130 toolchain = prepare_toolchain(src_paths, None, target, toolchain_name) 00131 00132 # Scan src_path for config files 00133 resources = toolchain.scan_resources(src_paths[0]) 00134 for path in src_paths[1:]: 00135 resources.add(toolchain.scan_resources(path)) 00136 00137 # Update configuration files until added features creates no changes 00138 prev_features = set() 00139 while True: 00140 # Update the configuration with any .json files found while scanning 00141 toolchain.config.add_config_files(resources.json_files) 00142 00143 # Add features while we find new ones 00144 features = set(toolchain.config.get_features()) 00145 if features == prev_features: 00146 break 00147 00148 for feature in features: 00149 if feature in resources.features: 00150 resources += resources.features[feature] 00151 00152 prev_features = features 00153 toolchain.config.validate_config() 00154 if toolchain.config.has_regions: 00155 _ = list(toolchain.config.regions) 00156 00157 cfg, macros = toolchain.config.get_config_data() 00158 features = toolchain.config.get_features() 00159 return cfg, macros, features 00160 00161 def is_official_target (target_name, version): 00162 """ Returns True, None if a target is part of the official release for the 00163 given version. Return False, 'reason' if a target is not part of the 00164 official release for the given version. 00165 00166 Positional arguments: 00167 target_name - Name if the target (ex. 'K64F') 00168 version - The release version string. Should be a string contained within 00169 RELEASE_VERSIONS 00170 """ 00171 00172 result = True 00173 reason = None 00174 target = TARGET_MAP[target_name] 00175 00176 if hasattr(target, 'release_versions') \ 00177 and version in target.release_versions: 00178 if version == '2': 00179 # For version 2, either ARM or uARM toolchain support is required 00180 required_toolchains = set(['ARM', 'uARM']) 00181 00182 if not len(required_toolchains.intersection( 00183 set(target.supported_toolchains))) > 0: 00184 result = False 00185 reason = ("Target '%s' must support " % target.name) + \ 00186 ("one of the folowing toolchains to be included in the") + \ 00187 ((" mbed 2.0 official release: %s" + linesep) % 00188 ", ".join(required_toolchains)) + \ 00189 ("Currently it is only configured to support the ") + \ 00190 ("following toolchains: %s" % 00191 ", ".join(target.supported_toolchains)) 00192 00193 elif version == '5': 00194 # For version 5, ARM, GCC_ARM, and IAR toolchain support is required 00195 required_toolchains = set(['ARM', 'GCC_ARM', 'IAR']) 00196 required_toolchains_sorted = list(required_toolchains) 00197 required_toolchains_sorted.sort() 00198 supported_toolchains = set(target.supported_toolchains) 00199 supported_toolchains_sorted = list(supported_toolchains) 00200 supported_toolchains_sorted.sort() 00201 00202 if not required_toolchains.issubset(supported_toolchains): 00203 result = False 00204 reason = ("Target '%s' must support " % target.name) + \ 00205 ("ALL of the folowing toolchains to be included in the") + \ 00206 ((" mbed OS 5.0 official release: %s" + linesep) % 00207 ", ".join(required_toolchains_sorted)) + \ 00208 ("Currently it is only configured to support the ") + \ 00209 ("following toolchains: %s" % 00210 ", ".join(supported_toolchains_sorted)) 00211 00212 elif not target.default_lib == 'std': 00213 result = False 00214 reason = ("Target '%s' must set the " % target.name) + \ 00215 ("'default_lib' to 'std' to be included in the ") + \ 00216 ("mbed OS 5.0 official release." + linesep) + \ 00217 ("Currently it is set to '%s'" % target.default_lib) 00218 00219 else: 00220 result = False 00221 reason = ("Target '%s' has set an invalid release version of '%s'" % 00222 version) + \ 00223 ("Please choose from the following release versions: %s" % 00224 ', '.join(RELEASE_VERSIONS)) 00225 00226 else: 00227 result = False 00228 if not hasattr(target, 'release_versions'): 00229 reason = "Target '%s' " % target.name 00230 reason += "does not have the 'release_versions' key set" 00231 elif not version in target.release_versions: 00232 reason = "Target '%s' does not contain the version '%s' " % \ 00233 (target.name, version) 00234 reason += "in its 'release_versions' key" 00235 00236 return result, reason 00237 00238 def transform_release_toolchains (toolchains, version): 00239 """ Given a list of toolchains and a release version, return a list of 00240 only the supported toolchains for that release 00241 00242 Positional arguments: 00243 toolchains - The list of toolchains 00244 version - The release version string. Should be a string contained within 00245 RELEASE_VERSIONS 00246 """ 00247 if version == '5': 00248 return ['ARM', 'GCC_ARM', 'IAR'] 00249 else: 00250 return toolchains 00251 00252 00253 def get_mbed_official_release (version): 00254 """ Given a release version string, return a tuple that contains a target 00255 and the supported toolchains for that release. 00256 Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), 00257 ('K64F', ('ARM', 'GCC_ARM')), ...) 00258 00259 Positional arguments: 00260 version - The version string. Should be a string contained within 00261 RELEASE_VERSIONS 00262 """ 00263 00264 mbed_official_release = ( 00265 tuple( 00266 tuple( 00267 [ 00268 TARGET_MAP[target].name, 00269 tuple(transform_release_toolchains( 00270 TARGET_MAP[target].supported_toolchains, version)) 00271 ] 00272 ) for target in TARGET_NAMES \ 00273 if (hasattr(TARGET_MAP[target], 'release_versions') 00274 and version in TARGET_MAP[target].release_versions) 00275 ) 00276 ) 00277 00278 for target in mbed_official_release: 00279 is_official, reason = is_official_target(target[0], version) 00280 00281 if not is_official: 00282 raise InvalidReleaseTargetException(reason) 00283 00284 return mbed_official_release 00285 00286 def add_regions_to_profile (profile, config, toolchain_class): 00287 """Add regions to the build profile, if there are any. 00288 00289 Positional Arguments: 00290 profile - the profile to update 00291 config - the configuration object that owns the region 00292 toolchain_class - the class of the toolchain being used 00293 """ 00294 if not profile: 00295 return 00296 regions = list(config.regions) 00297 for region in regions: 00298 for define in [(region.name.upper() + "_ADDR", region.start), 00299 (region.name.upper() + "_SIZE", region.size)]: 00300 profile["common"].append("-D%s=0x%x" % define) 00301 active_region = [r for r in regions if r.active][0] 00302 for define in [("MBED_APP_START", active_region.start), 00303 ("MBED_APP_SIZE", active_region.size)]: 00304 profile["ld"].append(toolchain_class.make_ld_define(*define)) 00305 00306 print("Using regions in this build:") 00307 for region in regions: 00308 print(" Region %s size 0x%x, offset 0x%x" 00309 % (region.name, region.size, region.start)) 00310 00311 00312 def prepare_toolchain (src_paths, build_dir, target, toolchain_name, 00313 macros=None, clean=False, jobs=1, 00314 notify=None, silent=False, verbose=False, 00315 extra_verbose=False, config=None, 00316 app_config=None, build_profile=None): 00317 """ Prepares resource related objects - toolchain, target, config 00318 00319 Positional arguments: 00320 src_paths - the paths to source directories 00321 target - ['LPC1768', 'LPC11U24', etc.] 00322 toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] 00323 00324 Keyword arguments: 00325 macros - additional macros 00326 clean - Rebuild everything if True 00327 jobs - how many compilers we can run at once 00328 notify - Notify function for logs 00329 silent - suppress printing of progress indicators 00330 verbose - Write the actual tools command lines used if True 00331 extra_verbose - even more output! 00332 config - a Config object to use instead of creating one 00333 app_config - location of a chosen mbed_app.json file 00334 build_profile - a list of mergeable build profiles 00335 """ 00336 00337 # We need to remove all paths which are repeated to avoid 00338 # multiple compilations and linking with the same objects 00339 src_paths = [src_paths[0]] + list(set(src_paths[1:])) 00340 00341 # If the configuration object was not yet created, create it now 00342 config = config or Config(target, src_paths, app_config=app_config) 00343 target = config.target 00344 try: 00345 cur_tc = TOOLCHAIN_CLASSES[toolchain_name] 00346 except KeyError: 00347 raise KeyError("Toolchain %s not supported" % toolchain_name) 00348 00349 profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []} 00350 for contents in build_profile or []: 00351 for key in profile: 00352 profile[key].extend(contents[toolchain_name][key]) 00353 00354 if config.has_regions: 00355 add_regions_to_profile(profile, config, cur_tc) 00356 00357 toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir, 00358 extra_verbose=extra_verbose, build_profile=profile) 00359 00360 toolchain.config = config 00361 toolchain.jobs = jobs 00362 toolchain.build_all = clean 00363 toolchain.VERBOSE = verbose 00364 00365 return toolchain 00366 00367 def merge_region_list (region_list, destination, padding=b'\xFF'): 00368 """Merege the region_list into a single image 00369 00370 Positional Arguments: 00371 region_list - list of regions, which should contain filenames 00372 destination - file name to write all regions to 00373 padding - bytes to fill gapps with 00374 """ 00375 merged = IntelHex() 00376 00377 print("Merging Regions:") 00378 00379 for region in region_list: 00380 if region.active and not region.filename: 00381 raise ToolException("Active region has no contents: No file found.") 00382 if region.filename: 00383 print(" Filling region %s with %s" % (region.name, region.filename)) 00384 part = intelhex_offset(region.filename, offset=region.start) 00385 part_size = (part.maxaddr() - part.minaddr()) + 1 00386 if part_size > region.size: 00387 raise ToolException("Contents of region %s does not fit" 00388 % region.name) 00389 merged.merge(part) 00390 pad_size = region.size - part_size 00391 if pad_size > 0 and region != region_list[-1]: 00392 print(" Padding region %s with 0x%x bytes" % (region.name, pad_size)) 00393 merged.puts(merged.maxaddr() + 1, padding * pad_size) 00394 00395 if not exists(dirname(destination)): 00396 makedirs(dirname(destination)) 00397 print("Space used after regions merged: 0x%x" % 00398 (merged.maxaddr() - merged.minaddr() + 1)) 00399 with open(destination, "wb+") as output: 00400 merged.tofile(output, format='bin') 00401 00402 def scan_resources (src_paths, toolchain, dependencies_paths=None, 00403 inc_dirs=None, base_path=None, collect_ignores=False): 00404 """ Scan resources using initialized toolcain 00405 00406 Positional arguments 00407 src_paths - the paths to source directories 00408 toolchain - valid toolchain object 00409 dependencies_paths - dependency paths that we should scan for include dirs 00410 inc_dirs - additional include directories which should be added to 00411 the scanner resources 00412 """ 00413 00414 # Scan src_path 00415 resources = toolchain.scan_resources(src_paths[0], base_path=base_path, 00416 collect_ignores=collect_ignores) 00417 for path in src_paths[1:]: 00418 resources.add(toolchain.scan_resources(path, base_path=base_path, 00419 collect_ignores=collect_ignores)) 00420 00421 # Scan dependency paths for include dirs 00422 if dependencies_paths is not None: 00423 for path in dependencies_paths: 00424 lib_resources = toolchain.scan_resources(path) 00425 resources.inc_dirs.extend(lib_resources.inc_dirs) 00426 00427 # Add additional include directories if passed 00428 if inc_dirs: 00429 if type(inc_dirs) == ListType: 00430 resources.inc_dirs.extend(inc_dirs) 00431 else: 00432 resources.inc_dirs.append(inc_dirs) 00433 00434 # Load resources into the config system which might expand/modify resources 00435 # based on config data 00436 resources = toolchain.config.load_resources(resources) 00437 00438 # Set the toolchain's configuration data 00439 toolchain.set_config_data(toolchain.config.get_config_data()) 00440 00441 if (hasattr(toolchain.target, "release_versions") and 00442 "5" not in toolchain.target.release_versions and 00443 "rtos" in toolchain.config.lib_config_data): 00444 if "Cortex-A" in toolchain.target.core: 00445 raise NotSupportedException( 00446 ("%s Will be supported in mbed OS 5.6. " 00447 "To use the %s, please checkout the mbed OS 5.4 release branch. " 00448 "See https://developer.mbed.org/platforms/Renesas-GR-PEACH/#important-notice " 00449 "for more information") % (toolchain.target.name, toolchain.target.name)) 00450 else: 00451 raise NotSupportedException("Target does not support mbed OS 5") 00452 00453 return resources 00454 00455 def build_project (src_paths, build_path, target, toolchain_name, 00456 libraries_paths=None, linker_script=None, 00457 clean=False, notify=None, verbose=False, name=None, 00458 macros=None, inc_dirs=None, jobs=1, silent=False, 00459 report=None, properties=None, project_id=None, 00460 project_description=None, extra_verbose=False, config=None, 00461 app_config=None, build_profile=None, stats_depth=None): 00462 """ Build a project. A project may be a test or a user program. 00463 00464 Positional arguments: 00465 src_paths - a path or list of paths that contain all files needed to build 00466 the project 00467 build_path - the directory where all of the object files will be placed 00468 target - the MCU or board that the project will compile for 00469 toolchain_name - the name of the build tools 00470 00471 Keyword arguments: 00472 libraries_paths - The location of libraries to include when linking 00473 linker_script - the file that drives the linker to do it's job 00474 clean - Rebuild everything if True 00475 notify - Notify function for logs 00476 verbose - Write the actual tools command lines used if True 00477 name - the name of the project 00478 macros - additional macros 00479 inc_dirs - additional directories where include files may be found 00480 jobs - how many compilers we can run at once 00481 silent - suppress printing of progress indicators 00482 report - a dict where a result may be appended 00483 properties - UUUUHHHHH beats me 00484 project_id - the name put in the report 00485 project_description - the human-readable version of what this thing does 00486 extra_verbose - even more output! 00487 config - a Config object to use instead of creating one 00488 app_config - location of a chosen mbed_app.json file 00489 build_profile - a dict of flags that will be passed to the compiler 00490 stats_depth - depth level for memap to display file/dirs 00491 """ 00492 00493 # Convert src_path to a list if needed 00494 if type(src_paths) != ListType: 00495 src_paths = [src_paths] 00496 # Extend src_paths wiht libraries_paths 00497 if libraries_paths is not None: 00498 src_paths.extend(libraries_paths) 00499 inc_dirs.extend(map(dirname, libraries_paths)) 00500 00501 if clean and exists(build_path): 00502 rmtree(build_path) 00503 mkdir(build_path) 00504 00505 toolchain = prepare_toolchain( 00506 src_paths, build_path, target, toolchain_name, macros=macros, 00507 clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, 00508 extra_verbose=extra_verbose, config=config, app_config=app_config, 00509 build_profile=build_profile) 00510 00511 # The first path will give the name to the library 00512 name = (name or toolchain.config.name or 00513 basename(normpath(abspath(src_paths[0])))) 00514 toolchain.info("Building project %s (%s, %s)" % 00515 (name, toolchain.target.name, toolchain_name)) 00516 00517 # Initialize reporting 00518 if report != None: 00519 start = time() 00520 # If project_id is specified, use that over the default name 00521 id_name = project_id.upper() if project_id else name.upper() 00522 description = project_description if project_description else name 00523 vendor_label = toolchain.target.extra_labels[0] 00524 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00525 cur_result = create_result(toolchain.target.name, toolchain_name, 00526 id_name, description) 00527 if properties != None: 00528 prep_properties(properties, toolchain.target.name, toolchain_name, 00529 vendor_label) 00530 00531 try: 00532 # Call unified scan_resources 00533 resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) 00534 00535 # Change linker script if specified 00536 if linker_script is not None: 00537 resources.linker_script = linker_script 00538 00539 # Compile Sources 00540 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00541 resources.objects.extend(objects) 00542 00543 # Link Program 00544 if toolchain.config.has_regions: 00545 res, _ = toolchain.link_program(resources, build_path, name + "_application") 00546 region_list = list(toolchain.config.regions) 00547 region_list = [r._replace(filename=res) if r.active else r 00548 for r in region_list] 00549 res = join(build_path, name) + ".bin" 00550 merge_region_list(region_list, res) 00551 else: 00552 res, _ = toolchain.link_program(resources, build_path, name) 00553 00554 memap_instance = getattr(toolchain, 'memap_instance', None) 00555 memap_table = '' 00556 if memap_instance: 00557 # Write output to stdout in text (pretty table) format 00558 memap_table = memap_instance.generate_output('table', stats_depth) 00559 00560 if not silent: 00561 print memap_table 00562 00563 # Write output to file in JSON format 00564 map_out = join(build_path, name + "_map.json") 00565 memap_instance.generate_output('json', stats_depth, map_out) 00566 00567 # Write output to file in CSV format for the CI 00568 map_csv = join(build_path, name + "_map.csv") 00569 memap_instance.generate_output('csv-ci', stats_depth, map_csv) 00570 00571 resources.detect_duplicates(toolchain) 00572 00573 if report != None: 00574 end = time() 00575 cur_result["elapsed_time"] = end - start 00576 cur_result["output"] = toolchain.get_output() + memap_table 00577 cur_result["result"] = "OK" 00578 cur_result["memory_usage"] = (memap_instance.mem_report 00579 if memap_instance is not None else None) 00580 cur_result["bin"] = res 00581 cur_result["elf"] = splitext(res)[0] + ".elf" 00582 cur_result.update(toolchain.report) 00583 00584 add_result_to_report(report, cur_result) 00585 00586 return res 00587 00588 except Exception as exc: 00589 if report != None: 00590 end = time() 00591 00592 if isinstance(exc, NotSupportedException): 00593 cur_result["result"] = "NOT_SUPPORTED" 00594 else: 00595 cur_result["result"] = "FAIL" 00596 00597 cur_result["elapsed_time"] = end - start 00598 00599 toolchain_output = toolchain.get_output() 00600 if toolchain_output: 00601 cur_result["output"] += toolchain_output 00602 00603 add_result_to_report(report, cur_result) 00604 00605 # Let Exception propagate 00606 raise 00607 00608 def build_library (src_paths, build_path, target, toolchain_name, 00609 dependencies_paths=None, name=None, clean=False, 00610 archive=True, notify=None, verbose=False, macros=None, 00611 inc_dirs=None, jobs=1, silent=False, report=None, 00612 properties=None, extra_verbose=False, project_id=None, 00613 remove_config_header_file=False, app_config=None, 00614 build_profile=None): 00615 """ Build a library 00616 00617 Positional arguments: 00618 src_paths - a path or list of paths that contain all files needed to build 00619 the library 00620 build_path - the directory where all of the object files will be placed 00621 target - the MCU or board that the project will compile for 00622 toolchain_name - the name of the build tools 00623 00624 Keyword arguments: 00625 dependencies_paths - The location of libraries to include when linking 00626 name - the name of the library 00627 clean - Rebuild everything if True 00628 archive - whether the library will create an archive file 00629 notify - Notify function for logs 00630 verbose - Write the actual tools command lines used if True 00631 macros - additional macros 00632 inc_dirs - additional directories where include files may be found 00633 jobs - how many compilers we can run at once 00634 silent - suppress printing of progress indicators 00635 report - a dict where a result may be appended 00636 properties - UUUUHHHHH beats me 00637 extra_verbose - even more output! 00638 project_id - the name that goes in the report 00639 remove_config_header_file - delete config header file when done building 00640 app_config - location of a chosen mbed_app.json file 00641 build_profile - a dict of flags that will be passed to the compiler 00642 """ 00643 00644 # Convert src_path to a list if needed 00645 if type(src_paths) != ListType: 00646 src_paths = [src_paths] 00647 00648 # Build path 00649 if archive: 00650 # Use temp path when building archive 00651 tmp_path = join(build_path, '.temp') 00652 mkdir(tmp_path) 00653 else: 00654 tmp_path = build_path 00655 00656 # Clean the build directory 00657 if clean and exists(tmp_path): 00658 rmtree(tmp_path) 00659 mkdir(tmp_path) 00660 00661 # Pass all params to the unified prepare_toolchain() 00662 toolchain = prepare_toolchain( 00663 src_paths, build_path, target, toolchain_name, macros=macros, 00664 clean=clean, jobs=jobs, notify=notify, silent=silent, 00665 verbose=verbose, extra_verbose=extra_verbose, app_config=app_config, 00666 build_profile=build_profile) 00667 00668 # The first path will give the name to the library 00669 if name is None: 00670 name = basename(normpath(abspath(src_paths[0]))) 00671 toolchain.info("Building library %s (%s, %s)" % 00672 (name, toolchain.target.name, toolchain_name)) 00673 00674 # Initialize reporting 00675 if report != None: 00676 start = time() 00677 # If project_id is specified, use that over the default name 00678 id_name = project_id.upper() if project_id else name.upper() 00679 description = name 00680 vendor_label = toolchain.target.extra_labels[0] 00681 prep_report(report, toolchain.target.name, toolchain_name, id_name) 00682 cur_result = create_result(toolchain.target.name, toolchain_name, 00683 id_name, description) 00684 cur_result['type'] = 'library' 00685 if properties != None: 00686 prep_properties(properties, toolchain.target.name, toolchain_name, 00687 vendor_label) 00688 00689 for src_path in src_paths: 00690 if not exists(src_path): 00691 error_msg = "The library source folder does not exist: %s", src_path 00692 if report != None: 00693 cur_result["output"] = error_msg 00694 cur_result["result"] = "FAIL" 00695 add_result_to_report(report, cur_result) 00696 raise Exception(error_msg) 00697 00698 try: 00699 # Call unified scan_resources 00700 resources = scan_resources(src_paths, toolchain, 00701 dependencies_paths=dependencies_paths, 00702 inc_dirs=inc_dirs) 00703 00704 00705 # Copy headers, objects and static libraries - all files needed for 00706 # static lib 00707 toolchain.copy_files(resources.headers, build_path, resources=resources) 00708 toolchain.copy_files(resources.objects, build_path, resources=resources) 00709 toolchain.copy_files(resources.libraries, build_path, 00710 resources=resources) 00711 toolchain.copy_files(resources.json_files, build_path, 00712 resources=resources) 00713 if resources.linker_script: 00714 toolchain.copy_files(resources.linker_script, build_path, 00715 resources=resources) 00716 00717 if resources.hex_files: 00718 toolchain.copy_files(resources.hex_files, build_path, 00719 resources=resources) 00720 00721 # Compile Sources 00722 objects = toolchain.compile_sources(resources, resources.inc_dirs) 00723 resources.objects.extend(objects) 00724 00725 if archive: 00726 toolchain.build_library(objects, build_path, name) 00727 00728 if remove_config_header_file: 00729 config_header_path = toolchain.get_config_header() 00730 if config_header_path: 00731 remove(config_header_path) 00732 00733 if report != None: 00734 end = time() 00735 cur_result["elapsed_time"] = end - start 00736 cur_result["output"] = toolchain.get_output() 00737 cur_result["result"] = "OK" 00738 00739 00740 add_result_to_report(report, cur_result) 00741 return True 00742 00743 except Exception as exc: 00744 if report != None: 00745 end = time() 00746 00747 if isinstance(exc, ToolException): 00748 cur_result["result"] = "FAIL" 00749 elif isinstance(exc, NotSupportedException): 00750 cur_result["result"] = "NOT_SUPPORTED" 00751 00752 cur_result["elapsed_time"] = end - start 00753 00754 toolchain_output = toolchain.get_output() 00755 if toolchain_output: 00756 cur_result["output"] += toolchain_output 00757 00758 add_result_to_report(report, cur_result) 00759 00760 # Let Exception propagate 00761 raise 00762 00763 ###################### 00764 ### Legacy methods ### 00765 ###################### 00766 00767 def mbed2_obj_path(target_name, toolchain_name): 00768 real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__ 00769 return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name) 00770 00771 def build_lib (lib_id, target, toolchain_name, verbose=False, 00772 clean=False, macros=None, notify=None, jobs=1, silent=False, 00773 report=None, properties=None, extra_verbose=False, 00774 build_profile=None): 00775 """ Legacy method for building mbed libraries 00776 00777 Positional arguments: 00778 lib_id - the library's unique identifier 00779 target - the MCU or board that the project will compile for 00780 toolchain_name - the name of the build tools 00781 00782 Keyword arguments: 00783 clean - Rebuild everything if True 00784 verbose - Write the actual tools command lines used if True 00785 macros - additional macros 00786 notify - Notify function for logs 00787 jobs - how many compilers we can run at once 00788 silent - suppress printing of progress indicators 00789 report - a dict where a result may be appended 00790 properties - UUUUHHHHH beats me 00791 extra_verbose - even more output! 00792 build_profile - a dict of flags that will be passed to the compiler 00793 """ 00794 lib = Library(lib_id) 00795 if not lib.is_supported(target, toolchain_name): 00796 print('Library "%s" is not yet supported on target %s with toolchain %s' 00797 % (lib_id, target.name, toolchain_name)) 00798 return False 00799 00800 # We need to combine macros from parameter list with macros from library 00801 # definition 00802 lib_macros = lib.macros if lib.macros else [] 00803 if macros: 00804 macros.extend(lib_macros) 00805 else: 00806 macros = lib_macros 00807 00808 src_paths = lib.source_dir 00809 build_path = lib.build_dir 00810 dependencies_paths = lib.dependencies 00811 inc_dirs = lib.inc_dirs 00812 inc_dirs_ext = lib.inc_dirs_ext 00813 00814 if type(src_paths) != ListType: 00815 src_paths = [src_paths] 00816 00817 # The first path will give the name to the library 00818 name = basename(src_paths[0]) 00819 00820 if report != None: 00821 start = time() 00822 id_name = name.upper() 00823 description = name 00824 vendor_label = target.extra_labels[0] 00825 cur_result = None 00826 prep_report(report, target.name, toolchain_name, id_name) 00827 cur_result = create_result(target.name, toolchain_name, id_name, 00828 description) 00829 00830 if properties != None: 00831 prep_properties(properties, target.name, toolchain_name, 00832 vendor_label) 00833 00834 for src_path in src_paths: 00835 if not exists(src_path): 00836 error_msg = "The library source folder does not exist: %s", src_path 00837 00838 if report != None: 00839 cur_result["output"] = error_msg 00840 cur_result["result"] = "FAIL" 00841 add_result_to_report(report, cur_result) 00842 00843 raise Exception(error_msg) 00844 00845 try: 00846 # Toolchain instance 00847 # Create the desired build directory structure 00848 bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name)) 00849 mkdir(bin_path) 00850 tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name, 00851 toolchain_name)) 00852 mkdir(tmp_path) 00853 00854 toolchain = prepare_toolchain( 00855 src_paths, tmp_path, target, toolchain_name, macros=macros, 00856 notify=notify, silent=silent, extra_verbose=extra_verbose, 00857 build_profile=build_profile, jobs=jobs, clean=clean) 00858 00859 toolchain.info("Building library %s (%s, %s)" % 00860 (name.upper(), target.name, toolchain_name)) 00861 00862 # Take into account the library configuration (MBED_CONFIG_FILE) 00863 config = toolchain.config 00864 config.add_config_files([MBED_CONFIG_FILE]) 00865 00866 # Scan Resources 00867 resources = [] 00868 for src_path in src_paths: 00869 resources.append(toolchain.scan_resources(src_path)) 00870 00871 # Add extra include directories / files which are required by library 00872 # This files usually are not in the same directory as source files so 00873 # previous scan will not include them 00874 if inc_dirs_ext is not None: 00875 for inc_ext in inc_dirs_ext: 00876 resources.append(toolchain.scan_resources(inc_ext)) 00877 00878 # Dependencies Include Paths 00879 dependencies_include_dir = [] 00880 if dependencies_paths is not None: 00881 for path in dependencies_paths: 00882 lib_resources = toolchain.scan_resources(path) 00883 dependencies_include_dir.extend(lib_resources.inc_dirs) 00884 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) 00885 00886 if inc_dirs: 00887 dependencies_include_dir.extend(inc_dirs) 00888 00889 # Add other discovered configuration data to the configuration object 00890 for res in resources: 00891 config.load_resources(res) 00892 toolchain.set_config_data(toolchain.config.get_config_data()) 00893 00894 00895 # Copy Headers 00896 for resource in resources: 00897 toolchain.copy_files(resource.headers, build_path, 00898 resources=resource) 00899 00900 dependencies_include_dir.extend( 00901 toolchain.scan_resources(build_path).inc_dirs) 00902 00903 # Compile Sources 00904 objects = [] 00905 for resource in resources: 00906 objects.extend(toolchain.compile_sources(resource, dependencies_include_dir)) 00907 00908 needed_update = toolchain.build_library(objects, bin_path, name) 00909 00910 if report != None and needed_update: 00911 end = time() 00912 cur_result["elapsed_time"] = end - start 00913 cur_result["output"] = toolchain.get_output() 00914 cur_result["result"] = "OK" 00915 00916 add_result_to_report(report, cur_result) 00917 return True 00918 00919 except Exception: 00920 if report != None: 00921 end = time() 00922 cur_result["result"] = "FAIL" 00923 cur_result["elapsed_time"] = end - start 00924 00925 toolchain_output = toolchain.get_output() 00926 if toolchain_output: 00927 cur_result["output"] += toolchain_output 00928 00929 add_result_to_report(report, cur_result) 00930 00931 # Let Exception propagate 00932 raise 00933 00934 # We do have unique legacy conventions about how we build and package the mbed 00935 # library 00936 def build_mbed_libs (target, toolchain_name, verbose=False, 00937 clean=False, macros=None, notify=None, jobs=1, silent=False, 00938 report=None, properties=None, extra_verbose=False, 00939 build_profile=None): 00940 """ Function returns True is library was built and false if building was 00941 skipped 00942 00943 Positional arguments: 00944 target - the MCU or board that the project will compile for 00945 toolchain_name - the name of the build tools 00946 00947 Keyword arguments: 00948 verbose - Write the actual tools command lines used if True 00949 clean - Rebuild everything if True 00950 macros - additional macros 00951 notify - Notify function for logs 00952 jobs - how many compilers we can run at once 00953 silent - suppress printing of progress indicators 00954 report - a dict where a result may be appended 00955 properties - UUUUHHHHH beats me 00956 extra_verbose - even more output! 00957 build_profile - a dict of flags that will be passed to the compiler 00958 """ 00959 00960 if report != None: 00961 start = time() 00962 id_name = "MBED" 00963 description = "mbed SDK" 00964 vendor_label = target.extra_labels[0] 00965 cur_result = None 00966 prep_report(report, target.name, toolchain_name, id_name) 00967 cur_result = create_result(target.name, toolchain_name, id_name, 00968 description) 00969 00970 if properties != None: 00971 prep_properties(properties, target.name, toolchain_name, 00972 vendor_label) 00973 00974 # Check toolchain support 00975 if toolchain_name not in target.supported_toolchains: 00976 supported_toolchains_text = ", ".join(target.supported_toolchains) 00977 print('%s target is not yet supported by toolchain %s' % 00978 (target.name, toolchain_name)) 00979 print('%s target supports %s toolchain%s' % 00980 (target.name, supported_toolchains_text, 's' 00981 if len(target.supported_toolchains) > 1 else '')) 00982 00983 if report != None: 00984 cur_result["result"] = "SKIP" 00985 add_result_to_report(report, cur_result) 00986 00987 return False 00988 00989 try: 00990 # Source and Build Paths 00991 build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) 00992 build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) 00993 mkdir(build_toolchain) 00994 00995 # Toolchain 00996 tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) 00997 mkdir(tmp_path) 00998 00999 toolchain = prepare_toolchain( 01000 [""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose, 01001 notify=notify, silent=silent, extra_verbose=extra_verbose, 01002 build_profile=build_profile, jobs=jobs, clean=clean) 01003 01004 # Take into account the library configuration (MBED_CONFIG_FILE) 01005 config = toolchain.config 01006 config.add_config_files([MBED_CONFIG_FILE]) 01007 toolchain.set_config_data(toolchain.config.get_config_data()) 01008 01009 # CMSIS 01010 toolchain.info("Building library %s (%s, %s)" % 01011 ('CMSIS', target.name, toolchain_name)) 01012 cmsis_src = MBED_CMSIS_PATH 01013 resources = toolchain.scan_resources(cmsis_src) 01014 01015 toolchain.copy_files(resources.headers, build_target) 01016 toolchain.copy_files(resources.linker_script, build_toolchain) 01017 toolchain.copy_files(resources.bin_files, build_toolchain) 01018 01019 objects = toolchain.compile_sources(resources, tmp_path) 01020 toolchain.copy_files(objects, build_toolchain) 01021 01022 # mbed 01023 toolchain.info("Building library %s (%s, %s)" % 01024 ('MBED', target.name, toolchain_name)) 01025 01026 # Common Headers 01027 toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) 01028 library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] 01029 01030 for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), 01031 (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), 01032 (MBED_HAL, MBED_LIBRARIES_HAL)]: 01033 resources = toolchain.scan_resources(dir) 01034 toolchain.copy_files(resources.headers, dest) 01035 library_incdirs.append(dest) 01036 01037 # Target specific sources 01038 hal_src = MBED_TARGETS_PATH 01039 hal_implementation = toolchain.scan_resources(hal_src) 01040 toolchain.copy_files(hal_implementation.headers + 01041 hal_implementation.hex_files + 01042 hal_implementation.libraries + 01043 [MBED_CONFIG_FILE], 01044 build_target, resources=hal_implementation) 01045 toolchain.copy_files(hal_implementation.linker_script, build_toolchain) 01046 toolchain.copy_files(hal_implementation.bin_files, build_toolchain) 01047 incdirs = toolchain.scan_resources(build_target).inc_dirs 01048 objects = toolchain.compile_sources(hal_implementation, 01049 library_incdirs + incdirs) 01050 toolchain.copy_files(objects, build_toolchain) 01051 01052 # Common Sources 01053 mbed_resources = None 01054 for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: 01055 mbed_resources += toolchain.scan_resources(dir) 01056 01057 objects = toolchain.compile_sources(mbed_resources, 01058 library_incdirs + incdirs) 01059 01060 # A number of compiled files need to be copied as objects as opposed to 01061 # way the linker search for symbols in archives. These are: 01062 # - mbed_retarget.o: to make sure that the C standard lib symbols get 01063 # overridden 01064 # - mbed_board.o: mbed_die is weak 01065 # - mbed_overrides.o: this contains platform overrides of various 01066 # weak SDK functions 01067 # - mbed_main.o: this contains main redirection 01068 separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o', 01069 'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], [] 01070 01071 for obj in objects: 01072 for name in separate_names: 01073 if obj.endswith(name): 01074 separate_objects.append(obj) 01075 01076 for obj in separate_objects: 01077 objects.remove(obj) 01078 01079 toolchain.build_library(objects, build_toolchain, "mbed") 01080 01081 for obj in separate_objects: 01082 toolchain.copy_files(obj, build_toolchain) 01083 01084 if report != None: 01085 end = time() 01086 cur_result["elapsed_time"] = end - start 01087 cur_result["output"] = toolchain.get_output() 01088 cur_result["result"] = "OK" 01089 01090 add_result_to_report(report, cur_result) 01091 01092 return True 01093 01094 except Exception as exc: 01095 if report != None: 01096 end = time() 01097 cur_result["result"] = "FAIL" 01098 cur_result["elapsed_time"] = end - start 01099 01100 toolchain_output = toolchain.get_output() 01101 if toolchain_output: 01102 cur_result["output"] += toolchain_output 01103 01104 cur_result["output"] += str(exc) 01105 01106 add_result_to_report(report, cur_result) 01107 01108 # Let Exception propagate 01109 raise 01110 01111 01112 def get_unique_supported_toolchains (release_targets=None): 01113 """ Get list of all unique toolchains supported by targets 01114 01115 Keyword arguments: 01116 release_targets - tuple structure returned from get_mbed_official_release(). 01117 If release_targets is not specified, then it queries all 01118 known targets 01119 """ 01120 unique_supported_toolchains = [] 01121 01122 if not release_targets: 01123 for target in TARGET_NAMES: 01124 for toolchain in TARGET_MAP[target].supported_toolchains: 01125 if toolchain not in unique_supported_toolchains: 01126 unique_supported_toolchains.append(toolchain) 01127 else: 01128 for target in release_targets: 01129 for toolchain in target[1]: 01130 if toolchain not in unique_supported_toolchains: 01131 unique_supported_toolchains.append(toolchain) 01132 01133 if "ARM" in unique_supported_toolchains: 01134 unique_supported_toolchains.append("ARMC6") 01135 01136 return unique_supported_toolchains 01137 01138 def mcu_toolchain_list (release_version='5'): 01139 """ Shows list of toolchains 01140 01141 """ 01142 01143 if isinstance(release_version, basestring): 01144 # Force release_version to lowercase if it is a string 01145 release_version = release_version.lower() 01146 else: 01147 # Otherwise default to printing all known targets and toolchains 01148 release_version = 'all' 01149 01150 01151 version_release_targets = {} 01152 version_release_target_names = {} 01153 01154 for version in RELEASE_VERSIONS: 01155 version_release_targets[version] = get_mbed_official_release(version) 01156 version_release_target_names[version] = [x[0] for x in 01157 version_release_targets[ 01158 version]] 01159 01160 if release_version in RELEASE_VERSIONS: 01161 release_targets = version_release_targets[release_version] 01162 else: 01163 release_targets = None 01164 01165 unique_supported_toolchains = get_unique_supported_toolchains( 01166 release_targets) 01167 columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains 01168 return "\n".join(columns) 01169 01170 01171 def mcu_target_list (release_version='5'): 01172 """ Shows target list 01173 01174 """ 01175 01176 if isinstance(release_version, basestring): 01177 # Force release_version to lowercase if it is a string 01178 release_version = release_version.lower() 01179 else: 01180 # Otherwise default to printing all known targets and toolchains 01181 release_version = 'all' 01182 01183 01184 version_release_targets = {} 01185 version_release_target_names = {} 01186 01187 for version in RELEASE_VERSIONS: 01188 version_release_targets[version] = get_mbed_official_release(version) 01189 version_release_target_names[version] = [x[0] for x in 01190 version_release_targets[ 01191 version]] 01192 01193 if release_version in RELEASE_VERSIONS: 01194 release_targets = version_release_targets[release_version] 01195 else: 01196 release_targets = None 01197 01198 target_names = [] 01199 01200 if release_targets: 01201 target_names = [x[0] for x in release_targets] 01202 else: 01203 target_names = TARGET_NAMES 01204 01205 return "\n".join(target_names) 01206 01207 01208 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None, 01209 release_version='5'): 01210 """ Shows target map using prettytable 01211 01212 Keyword arguments: 01213 verbose_html - emit html instead of a simple table 01214 platform_filter - remove results that match the string 01215 release_version - get the matrix for this major version number 01216 """ 01217 # Only use it in this function so building works without extra modules 01218 from prettytable import PrettyTable 01219 01220 if isinstance(release_version, basestring): 01221 # Force release_version to lowercase if it is a string 01222 release_version = release_version.lower() 01223 else: 01224 # Otherwise default to printing all known targets and toolchains 01225 release_version = 'all' 01226 01227 01228 version_release_targets = {} 01229 version_release_target_names = {} 01230 01231 for version in RELEASE_VERSIONS: 01232 version_release_targets[version] = get_mbed_official_release(version) 01233 version_release_target_names[version] = [x[0] for x in 01234 version_release_targets[ 01235 version]] 01236 01237 if release_version in RELEASE_VERSIONS: 01238 release_targets = version_release_targets[release_version] 01239 else: 01240 release_targets = None 01241 01242 unique_supported_toolchains = get_unique_supported_toolchains( 01243 release_targets) 01244 prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS] 01245 01246 # All tests status table print 01247 columns = prepend_columns + unique_supported_toolchains 01248 table_printer = PrettyTable(columns) 01249 # Align table 01250 for col in columns: 01251 table_printer.align[col] = "c" 01252 table_printer.align["Target"] = "l" 01253 01254 perm_counter = 0 01255 target_counter = 0 01256 01257 target_names = [] 01258 01259 if release_targets: 01260 target_names = [x[0] for x in release_targets] 01261 else: 01262 target_names = TARGET_NAMES 01263 01264 for target in sorted(target_names): 01265 if platform_filter is not None: 01266 # FIlter out platforms using regex 01267 if re.search(platform_filter, target) is None: 01268 continue 01269 target_counter += 1 01270 01271 row = [target] # First column is platform name 01272 01273 for version in RELEASE_VERSIONS: 01274 if target in version_release_target_names[version]: 01275 text = "Supported" 01276 else: 01277 text = "-" 01278 row.append(text) 01279 01280 for unique_toolchain in unique_supported_toolchains: 01281 if (unique_toolchain in TARGET_MAP[target].supported_toolchains or 01282 (unique_toolchain == "ARMC6" and 01283 "ARM" in TARGET_MAP[target].supported_toolchains)): 01284 text = "Supported" 01285 perm_counter += 1 01286 else: 01287 text = "-" 01288 01289 row.append(text) 01290 table_printer.add_row(row) 01291 01292 result = table_printer.get_html_string() if verbose_html \ 01293 else table_printer.get_string() 01294 result += "\n" 01295 result += "Supported targets: %d\n"% (target_counter) 01296 if target_counter == 1: 01297 result += "Supported toolchains: %d"% (perm_counter) 01298 return result 01299 01300 01301 def get_target_supported_toolchains (target): 01302 """ Returns target supported toolchains list 01303 01304 Positional arguments: 01305 target - the target to get the supported toolchains of 01306 """ 01307 return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \ 01308 else None 01309 01310 01311 def print_build_results (result_list, build_name): 01312 """ Generate result string for build results 01313 01314 Positional arguments: 01315 result_list - the list of results to print 01316 build_name - the name of the build we are printing result for 01317 """ 01318 result = "" 01319 if len(result_list) > 0: 01320 result += build_name + "\n" 01321 result += "\n".join([" * %s" % f for f in result_list]) 01322 result += "\n" 01323 return result 01324 01325 def print_build_memory_usage (report): 01326 """ Generate result table with memory usage values for build results 01327 Aggregates (puts together) reports obtained from self.get_memory_summary() 01328 01329 Positional arguments: 01330 report - Report generated during build procedure. 01331 """ 01332 from prettytable import PrettyTable 01333 columns_text = ['name', 'target', 'toolchain'] 01334 columns_int = ['static_ram', 'total_flash'] 01335 table = PrettyTable(columns_text + columns_int) 01336 01337 for col in columns_text: 01338 table.align[col] = 'l' 01339 01340 for col in columns_int: 01341 table.align[col] = 'r' 01342 01343 for target in report: 01344 for toolchain in report[target]: 01345 for name in report[target][toolchain]: 01346 for dlist in report[target][toolchain][name]: 01347 for dlistelem in dlist: 01348 # Get 'memory_usage' record and build table with 01349 # statistics 01350 record = dlist[dlistelem] 01351 if 'memory_usage' in record and record['memory_usage']: 01352 # Note that summary should be in the last record of 01353 # 'memory_usage' section. This is why we are 01354 # grabbing last "[-1]" record. 01355 row = [ 01356 record['description'], 01357 record['target_name'], 01358 record['toolchain_name'], 01359 record['memory_usage'][-1]['summary'][ 01360 'static_ram'], 01361 record['memory_usage'][-1]['summary'][ 01362 'total_flash'], 01363 ] 01364 table.add_row(row) 01365 01366 result = "Memory map breakdown for built projects (values in Bytes):\n" 01367 result += table.get_string(sortby='name') 01368 return result 01369 01370 def write_build_report (build_report, template_filename, filename): 01371 """Write a build report to disk using a template file 01372 01373 Positional arguments: 01374 build_report - a report generated by the build system 01375 template_filename - a file that contains the template for the style of build 01376 report 01377 filename - the location on disk to write the file to 01378 """ 01379 build_report_failing = [] 01380 build_report_passing = [] 01381 01382 for report in build_report: 01383 if len(report["failing"]) > 0: 01384 build_report_failing.append(report) 01385 else: 01386 build_report_passing.append(report) 01387 01388 env = Environment(extensions=['jinja2.ext.with_']) 01389 env.loader = FileSystemLoader('ci_templates') 01390 template = env.get_template(template_filename) 01391 01392 with open(filename, 'w+') as placeholder: 01393 placeholder.write(template.render( 01394 failing_builds=build_report_failing, 01395 passing_builds=build_report_passing)) 01396 01397 01398 def merge_build_data(filename, toolchain_report, app_type): 01399 path_to_file = dirname(abspath(filename)) 01400 try: 01401 build_data = load(open(filename)) 01402 except (IOError, ValueError): 01403 build_data = {'builds': []} 01404 for tgt in toolchain_report.values(): 01405 for tc in tgt.values(): 01406 for project in tc.values(): 01407 for build in project: 01408 try: 01409 build[0]['elf'] = relpath(build[0]['elf'], path_to_file) 01410 build[0]['bin'] = relpath(build[0]['bin'], path_to_file) 01411 except KeyError: 01412 pass 01413 if 'type' not in build[0]: 01414 build[0]['type'] = app_type 01415 build_data['builds'].append(build[0]) 01416 dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))
Generated on Sun Jul 17 2022 08:25:20 by 1.7.2