Nicolas Borla / Mbed OS BBR_1Ebene
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .targets import TARGET_NAMES, TARGET_MAP
00046 from .libraries import Library
00047 from .toolchains import TOOLCHAIN_CLASSES
00048 from .config import Config
00049 
00050 RELEASE_VERSIONS = ['2', '5']
00051 
00052 def prep_report (report, target_name, toolchain_name, id_name):
00053     """Setup report keys
00054 
00055     Positional arguments:
00056     report - the report to fill
00057     target_name - the target being used
00058     toolchain_name - the toolchain being used
00059     id_name - the name of the executable or library being built
00060     """
00061     if not target_name in report:
00062         report[target_name] = {}
00063 
00064     if not toolchain_name in report[target_name]:
00065         report[target_name][toolchain_name] = {}
00066 
00067     if not id_name in report[target_name][toolchain_name]:
00068         report[target_name][toolchain_name][id_name] = []
00069 
00070 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00071     """Setup test properties
00072 
00073     Positional arguments:
00074     properties - the dict to fill
00075     target_name - the target the test is targeting
00076     toolchain_name - the toolchain that will compile the test
00077     vendor_label - the vendor
00078     """
00079     if not target_name in properties:
00080         properties[target_name] = {}
00081 
00082     if not toolchain_name in properties[target_name]:
00083         properties[target_name][toolchain_name] = {}
00084 
00085     properties[target_name][toolchain_name]["target"] = target_name
00086     properties[target_name][toolchain_name]["vendor"] = vendor_label
00087     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00088 
00089 def create_result (target_name, toolchain_name, id_name, description):
00090     """Create a result dictionary
00091 
00092     Positional arguments:
00093     target_name - the target being built for
00094     toolchain_name - the toolchain doing the building
00095     id_name - the name of the executable or library being built
00096     description - a human readable description of what's going on
00097     """
00098     cur_result = {}
00099     cur_result["target_name"] = target_name
00100     cur_result["toolchain_name"] = toolchain_name
00101     cur_result["id"] = id_name
00102     cur_result["description"] = description
00103     cur_result["elapsed_time"] = 0
00104     cur_result["output"] = ""
00105 
00106     return cur_result
00107 
00108 def add_result_to_report (report, result):
00109     """Add a single result to a report dictionary
00110 
00111     Positional arguments:
00112     report - the report to append to
00113     result - the result to append
00114     """
00115     result["date"] = datetime.datetime.utcnow().isoformat()
00116     result["uuid"] = str(uuid.uuid1())
00117     target = result["target_name"]
00118     toolchain = result["toolchain_name"]
00119     id_name = result['id']
00120     result_wrap = {0: result}
00121     report[target][toolchain][id_name].append(result_wrap)
00122 
00123 def get_config (src_paths, target, toolchain_name, app_config=None):
00124     """Get the configuration object for a target-toolchain combination
00125 
00126     Positional arguments:
00127     src_paths - paths to scan for the configuration files
00128     target - the device we are building for
00129     toolchain_name - the string that identifies the build tools
00130     """
00131     # Convert src_paths to a list if needed
00132     if not isinstance(src_paths, list):
00133         src_paths = [src_paths]
00134 
00135     # Pass all params to the unified prepare_resources()
00136     toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
00137                                   app_config=app_config)
00138 
00139     # Scan src_path for config files
00140     resources = toolchain.scan_resources(src_paths[0])
00141     for path in src_paths[1:]:
00142         resources.add(toolchain.scan_resources(path))
00143 
00144     # Update configuration files until added features creates no changes
00145     prev_features = set()
00146     while True:
00147         # Update the configuration with any .json files found while scanning
00148         toolchain.config.add_config_files(resources.json_files)
00149 
00150         # Add features while we find new ones
00151         features = set(toolchain.config.get_features())
00152         if features == prev_features:
00153             break
00154 
00155         for feature in features:
00156             if feature in resources.features:
00157                 resources += resources.features[feature]
00158 
00159         prev_features = features
00160     toolchain.config.validate_config()
00161     if toolchain.config.has_regions:
00162         _ = list(toolchain.config.regions)
00163 
00164     cfg, macros = toolchain.config.get_config_data()
00165     features = toolchain.config.get_features()
00166     return cfg, macros, features
00167 
00168 def is_official_target (target_name, version):
00169     """ Returns True, None if a target is part of the official release for the
00170     given version. Return False, 'reason' if a target is not part of the
00171     official release for the given version.
00172 
00173     Positional arguments:
00174     target_name - Name if the target (ex. 'K64F')
00175     version - The release version string. Should be a string contained within
00176               RELEASE_VERSIONS
00177     """
00178 
00179     result = True
00180     reason = None
00181     target = TARGET_MAP[target_name]
00182 
00183     if hasattr(target, 'release_versions') \
00184        and version in target.release_versions:
00185         if version == '2':
00186             # For version 2, either ARM or uARM toolchain support is required
00187             required_toolchains = set(['ARM', 'uARM'])
00188 
00189             if not len(required_toolchains.intersection(
00190                     set(target.supported_toolchains))) > 0:
00191                 result = False
00192                 reason = ("Target '%s' must support " % target.name) + \
00193                     ("one of the folowing toolchains to be included in the") + \
00194                     ((" mbed 2.0 official release: %s" + linesep) %
00195                      ", ".join(required_toolchains)) + \
00196                     ("Currently it is only configured to support the ") + \
00197                     ("following toolchains: %s" %
00198                      ", ".join(target.supported_toolchains))
00199 
00200         elif version == '5':
00201             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00202             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00203             required_toolchains_sorted = list(required_toolchains)
00204             required_toolchains_sorted.sort()
00205             supported_toolchains = set(target.supported_toolchains)
00206             supported_toolchains_sorted = list(supported_toolchains)
00207             supported_toolchains_sorted.sort()
00208 
00209             if not required_toolchains.issubset(supported_toolchains):
00210                 result = False
00211                 reason = ("Target '%s' must support " % target.name) + \
00212                     ("ALL of the folowing toolchains to be included in the") + \
00213                     ((" mbed OS 5.0 official release: %s" + linesep) %
00214                      ", ".join(required_toolchains_sorted)) + \
00215                     ("Currently it is only configured to support the ") + \
00216                     ("following toolchains: %s" %
00217                      ", ".join(supported_toolchains_sorted))
00218 
00219             elif not target.default_lib == 'std':
00220                 result = False
00221                 reason = ("Target '%s' must set the " % target.name) + \
00222                     ("'default_lib' to 'std' to be included in the ") + \
00223                     ("mbed OS 5.0 official release." + linesep) + \
00224                     ("Currently it is set to '%s'" % target.default_lib)
00225 
00226         else:
00227             result = False
00228             reason = ("Target '%s' has set an invalid release version of '%s'" %
00229                       version) + \
00230                 ("Please choose from the following release versions: %s" %
00231                  ', '.join(RELEASE_VERSIONS))
00232 
00233     else:
00234         result = False
00235         if not hasattr(target, 'release_versions'):
00236             reason = "Target '%s' " % target.name
00237             reason += "does not have the 'release_versions' key set"
00238         elif not version in target.release_versions:
00239             reason = "Target '%s' does not contain the version '%s' " % \
00240                      (target.name, version)
00241             reason += "in its 'release_versions' key"
00242 
00243     return result, reason
00244 
00245 def transform_release_toolchains (toolchains, version):
00246     """ Given a list of toolchains and a release version, return a list of
00247     only the supported toolchains for that release
00248 
00249     Positional arguments:
00250     toolchains - The list of toolchains
00251     version - The release version string. Should be a string contained within
00252               RELEASE_VERSIONS
00253     """
00254     if version == '5':
00255         return ['ARM', 'GCC_ARM', 'IAR']
00256     else:
00257         return toolchains
00258 
00259 
00260 def get_mbed_official_release (version):
00261     """ Given a release version string, return a tuple that contains a target
00262     and the supported toolchains for that release.
00263     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00264                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00265 
00266     Positional arguments:
00267     version - The version string. Should be a string contained within
00268               RELEASE_VERSIONS
00269     """
00270 
00271     mbed_official_release = (
00272         tuple(
00273             tuple(
00274                 [
00275                     TARGET_MAP[target].name,
00276                     tuple(transform_release_toolchains(
00277                         TARGET_MAP[target].supported_toolchains, version))
00278                 ]
00279             ) for target in TARGET_NAMES \
00280             if (hasattr(TARGET_MAP[target], 'release_versions')
00281                 and version in TARGET_MAP[target].release_versions)
00282         )
00283     )
00284 
00285     for target in mbed_official_release:
00286         is_official, reason = is_official_target(target[0], version)
00287 
00288         if not is_official:
00289             raise InvalidReleaseTargetException(reason)
00290 
00291     return mbed_official_release
00292 
00293 ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
00294 def target_supports_toolchain(target, toolchain_name):
00295     if toolchain_name in ARM_COMPILERS:
00296         return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
00297     else:
00298         return toolchain_name in target.supported_toolchains
00299 
00300 
00301 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00302                       macros=None, clean=False, jobs=1,
00303                       notify=None, config=None, app_config=None,
00304                       build_profile=None):
00305     """ Prepares resource related objects - toolchain, target, config
00306 
00307     Positional arguments:
00308     src_paths - the paths to source directories
00309     target - ['LPC1768', 'LPC11U24', etc.]
00310     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00311 
00312     Keyword arguments:
00313     macros - additional macros
00314     clean - Rebuild everything if True
00315     jobs - how many compilers we can run at once
00316     notify - Notify function for logs
00317     config - a Config object to use instead of creating one
00318     app_config - location of a chosen mbed_app.json file
00319     build_profile - a list of mergeable build profiles
00320     """
00321 
00322     # We need to remove all paths which are repeated to avoid
00323     # multiple compilations and linking with the same objects
00324     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00325 
00326     # If the configuration object was not yet created, create it now
00327     config = config or Config(target, src_paths, app_config=app_config)
00328     target = config.target
00329     if not target_supports_toolchain(target, toolchain_name):
00330         raise NotSupportedException(
00331             "Target {} is not supported by toolchain {}".format(
00332                 target.name, toolchain_name))
00333 
00334     try:
00335         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00336     except KeyError:
00337         raise KeyError("Toolchain %s not supported" % toolchain_name)
00338 
00339     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00340     for contents in build_profile or []:
00341         for key in profile:
00342             profile[key].extend(contents[toolchain_name].get(key, []))
00343 
00344     toolchain = cur_tc(
00345         target, notify, macros, build_dir=build_dir, build_profile=profile)
00346 
00347     toolchain.config = config
00348     toolchain.jobs = jobs
00349     toolchain.build_all = clean
00350 
00351     return toolchain
00352 
00353 def _printihex(ihex):
00354     import pprint
00355     pprint.PrettyPrinter().pprint(ihex.todict())
00356 
00357 def _real_region_size(region):
00358     try:
00359         part = intelhex_offset(region.filename, offset=region.start)
00360         return (part.maxaddr() - part.minaddr()) + 1
00361     except AttributeError:
00362         return region.size
00363 
00364 
00365 def _fill_header(region_list, current_region):
00366     """Fill an application header region
00367 
00368     This is done it three steps:
00369      * Fill the whole region with zeros
00370      * Fill const, timestamp and size entries with their data
00371      * Fill the digests using this header as the header region
00372     """
00373     region_dict = {r.name: r for r in region_list}
00374     header = IntelHex()
00375     header.puts(current_region.start, b'\x00' * current_region.size)
00376     start = current_region.start
00377     for member in current_region.filename:
00378         _, type, subtype, data = member
00379         member_size = Config.header_member_size(member)
00380         if type == "const":
00381             fmt = {
00382                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00383                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00384             }[subtype]
00385             header.puts(start, struct.pack(fmt, integer(data, 0)))
00386         elif type == "timestamp":
00387             fmt = {"32le": "<L", "64le": "<Q",
00388                    "32be": ">L", "64be": ">Q"}[subtype]
00389             header.puts(start, struct.pack(fmt, time()))
00390         elif type == "size":
00391             fmt = {"32le": "<L", "64le": "<Q",
00392                    "32be": ">L", "64be": ">Q"}[subtype]
00393             size = sum(_real_region_size(region_dict[r]) for r in data)
00394             header.puts(start, struct.pack(fmt, size))
00395         elif type  == "digest":
00396             if data == "header":
00397                 ih = header[:start]
00398             else:
00399                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00400             if subtype.startswith("CRCITT32"):
00401                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00402                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00403             elif subtype.startswith("SHA"):
00404                 if subtype == "SHA256":
00405                     hash = hashlib.sha256()
00406                 elif subtype == "SHA512":
00407                     hash = hashlib.sha512()
00408                 hash.update(ih.tobinarray())
00409                 header.puts(start, hash.digest())
00410         start += Config.header_member_size(member)
00411     return header
00412 
00413 def merge_region_list (region_list, destination, notify, padding=b'\xFF'):
00414     """Merge the region_list into a single image
00415 
00416     Positional Arguments:
00417     region_list - list of regions, which should contain filenames
00418     destination - file name to write all regions to
00419     padding - bytes to fill gapps with
00420     """
00421     merged = IntelHex()
00422     _, format = splitext(destination)
00423 
00424     notify.info("Merging Regions")
00425 
00426     for region in region_list:
00427         if region.active and not region.filename:
00428             raise ToolException("Active region has no contents: No file found.")
00429         if isinstance(region.filename, list):
00430             header_basename, _ = splitext(destination)
00431             header_filename = header_basename + "_header.hex"
00432             _fill_header(region_list, region).tofile(header_filename, format='hex')
00433             region = region._replace(filename=header_filename)
00434         if region.filename:
00435             notify.info("  Filling region %s with %s" % (region.name, region.filename))
00436             part = intelhex_offset(region.filename, offset=region.start)
00437             part_size = (part.maxaddr() - part.minaddr()) + 1
00438             if part_size > region.size:
00439                 raise ToolException("Contents of region %s does not fit"
00440                                     % region.name)
00441             merged.merge(part)
00442             pad_size = region.size - part_size
00443             if pad_size > 0 and region != region_list[-1]:
00444                 notify.info("  Padding region %s with 0x%x bytes" %
00445                             (region.name, pad_size))
00446                 if format is ".hex":
00447                     """The offset will be in the hex file generated when we're done,
00448                     so we can skip padding here"""
00449                 else:
00450                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00451 
00452     if not exists(dirname(destination)):
00453         makedirs(dirname(destination))
00454     notify.info("Space used after regions merged: 0x%x" %
00455                 (merged.maxaddr() - merged.minaddr() + 1))
00456     with open(destination, "wb+") as output:
00457         merged.tofile(output, format=format.strip("."))
00458 
00459 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00460                    inc_dirs=None, base_path=None, collect_ignores=False):
00461     """ Scan resources using initialized toolcain
00462 
00463     Positional arguments
00464     src_paths - the paths to source directories
00465     toolchain - valid toolchain object
00466     dependencies_paths - dependency paths that we should scan for include dirs
00467     inc_dirs - additional include directories which should be added to
00468                the scanner resources
00469     """
00470 
00471     # Scan src_path
00472     resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
00473                                          collect_ignores=collect_ignores)
00474     for path in src_paths[1:]:
00475         resources.add(toolchain.scan_resources(path, base_path=base_path,
00476                                                collect_ignores=collect_ignores))
00477 
00478     # Scan dependency paths for include dirs
00479     if dependencies_paths is not None:
00480         for path in dependencies_paths:
00481             lib_resources = toolchain.scan_resources(path)
00482             resources.inc_dirs.extend(lib_resources.inc_dirs)
00483 
00484     # Add additional include directories if passed
00485     if inc_dirs:
00486         if isinstance(inc_dirs, list):
00487             resources.inc_dirs.extend(inc_dirs)
00488         else:
00489             resources.inc_dirs.append(inc_dirs)
00490 
00491     # Load resources into the config system which might expand/modify resources
00492     # based on config data
00493     resources = toolchain.config.load_resources(resources)
00494 
00495     # Set the toolchain's configuration data
00496     toolchain.set_config_data(toolchain.config.get_config_data())
00497 
00498     return resources
00499 
00500 def build_project (src_paths, build_path, target, toolchain_name,
00501                   libraries_paths=None, linker_script=None, clean=False,
00502                   notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
00503                   report=None, properties=None, project_id=None,
00504                   project_description=None, config=None,
00505                   app_config=None, build_profile=None, stats_depth=None):
00506     """ Build a project. A project may be a test or a user program.
00507 
00508     Positional arguments:
00509     src_paths - a path or list of paths that contain all files needed to build
00510                 the project
00511     build_path - the directory where all of the object files will be placed
00512     target - the MCU or board that the project will compile for
00513     toolchain_name - the name of the build tools
00514 
00515     Keyword arguments:
00516     libraries_paths - The location of libraries to include when linking
00517     linker_script - the file that drives the linker to do it's job
00518     clean - Rebuild everything if True
00519     notify - Notify function for logs
00520     name - the name of the project
00521     macros - additional macros
00522     inc_dirs - additional directories where include files may be found
00523     jobs - how many compilers we can run at once
00524     report - a dict where a result may be appended
00525     properties - UUUUHHHHH beats me
00526     project_id - the name put in the report
00527     project_description - the human-readable version of what this thing does
00528     config - a Config object to use instead of creating one
00529     app_config - location of a chosen mbed_app.json file
00530     build_profile - a dict of flags that will be passed to the compiler
00531     stats_depth - depth level for memap to display file/dirs
00532     """
00533 
00534     # Convert src_path to a list if needed
00535     if not isinstance(src_paths, list):
00536         src_paths = [src_paths]
00537     # Extend src_paths wiht libraries_paths
00538     if libraries_paths is not None:
00539         src_paths.extend(libraries_paths)
00540         inc_dirs.extend(map(dirname, libraries_paths))
00541 
00542     if clean and exists(build_path):
00543         rmtree(build_path)
00544     mkdir(build_path)
00545 
00546     toolchain = prepare_toolchain(
00547         src_paths, build_path, target, toolchain_name, macros=macros,
00548         clean=clean, jobs=jobs, notify=notify, config=config,
00549         app_config=app_config, build_profile=build_profile)
00550 
00551     # The first path will give the name to the library
00552     name = (name or toolchain.config.name or
00553             basename(normpath(abspath(src_paths[0]))))
00554     notify.info("Building project %s (%s, %s)" %
00555                 (name, toolchain.target.name, toolchain_name))
00556 
00557     # Initialize reporting
00558     if report != None:
00559         start = time()
00560         # If project_id is specified, use that over the default name
00561         id_name = project_id.upper() if project_id else name.upper()
00562         description = project_description if project_description else name
00563         vendor_label = toolchain.target.extra_labels[0]
00564         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00565         cur_result = create_result(toolchain.target.name, toolchain_name,
00566                                    id_name, description)
00567         if properties != None:
00568             prep_properties(properties, toolchain.target.name, toolchain_name,
00569                             vendor_label)
00570 
00571     try:
00572         # Call unified scan_resources
00573         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00574 
00575         # Change linker script if specified
00576         if linker_script is not None:
00577             resources.linker_script = linker_script
00578 
00579         # Compile Sources
00580         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00581         resources.objects.extend(objects)
00582 
00583         # Link Program
00584         if toolchain.config.has_regions:
00585             res, _ = toolchain.link_program(resources, build_path, name + "_application")
00586             region_list = list(toolchain.config.regions)
00587             region_list = [r._replace(filename=res) if r.active else r
00588                            for r in region_list]
00589             res = "%s.%s" % (join(build_path, name),
00590                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00591             merge_region_list(region_list, res, notify)
00592         else:
00593             res, _ = toolchain.link_program(resources, build_path, name)
00594 
00595         memap_instance = getattr(toolchain, 'memap_instance', None)
00596         memap_table = ''
00597         if memap_instance:
00598             # Write output to stdout in text (pretty table) format
00599             memap_table = memap_instance.generate_output('table', stats_depth)
00600             notify.info(memap_table)
00601 
00602             # Write output to file in JSON format
00603             map_out = join(build_path, name + "_map.json")
00604             memap_instance.generate_output('json', stats_depth, map_out)
00605 
00606             # Write output to file in CSV format for the CI
00607             map_csv = join(build_path, name + "_map.csv")
00608             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00609 
00610         resources.detect_duplicates(toolchain)
00611 
00612         if report != None:
00613             end = time()
00614             cur_result["elapsed_time"] = end - start
00615             cur_result["result"] = "OK"
00616             cur_result["memory_usage"] = (memap_instance.mem_report
00617                                           if memap_instance is not None else None)
00618             cur_result["bin"] = res
00619             cur_result["elf"] = splitext(res)[0] + ".elf"
00620             cur_result.update(toolchain.report)
00621 
00622             add_result_to_report(report, cur_result)
00623 
00624         return res
00625 
00626     except Exception as exc:
00627         if report != None:
00628             end = time()
00629 
00630             if isinstance(exc, NotSupportedException):
00631                 cur_result["result"] = "NOT_SUPPORTED"
00632             else:
00633                 cur_result["result"] = "FAIL"
00634 
00635             cur_result["elapsed_time"] = end - start
00636 
00637             add_result_to_report(report, cur_result)
00638         # Let Exception propagate
00639         raise
00640 
00641 def build_library (src_paths, build_path, target, toolchain_name,
00642                   dependencies_paths=None, name=None, clean=False,
00643                   archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
00644                   report=None, properties=None, project_id=None,
00645                   remove_config_header_file=False, app_config=None,
00646                   build_profile=None):
00647     """ Build a library
00648 
00649     Positional arguments:
00650     src_paths - a path or list of paths that contain all files needed to build
00651                 the library
00652     build_path - the directory where all of the object files will be placed
00653     target - the MCU or board that the project will compile for
00654     toolchain_name - the name of the build tools
00655 
00656     Keyword arguments:
00657     dependencies_paths - The location of libraries to include when linking
00658     name - the name of the library
00659     clean - Rebuild everything if True
00660     archive - whether the library will create an archive file
00661     notify - Notify function for logs
00662     macros - additional macros
00663     inc_dirs - additional directories where include files may be found
00664     jobs - how many compilers we can run at once
00665     report - a dict where a result may be appended
00666     properties - UUUUHHHHH beats me
00667     project_id - the name that goes in the report
00668     remove_config_header_file - delete config header file when done building
00669     app_config - location of a chosen mbed_app.json file
00670     build_profile - a dict of flags that will be passed to the compiler
00671     """
00672 
00673     # Convert src_path to a list if needed
00674     if not isinstance(src_paths, list):
00675         src_paths = [src_paths]
00676 
00677     # Build path
00678     if archive:
00679         # Use temp path when building archive
00680         tmp_path = join(build_path, '.temp')
00681         mkdir(tmp_path)
00682     else:
00683         tmp_path = build_path
00684 
00685     # Clean the build directory
00686     if clean and exists(tmp_path):
00687         rmtree(tmp_path)
00688     mkdir(tmp_path)
00689 
00690     # Pass all params to the unified prepare_toolchain()
00691     toolchain = prepare_toolchain(
00692         src_paths, build_path, target, toolchain_name, macros=macros,
00693         clean=clean, jobs=jobs, notify=notify, app_config=app_config,
00694         build_profile=build_profile)
00695 
00696     # The first path will give the name to the library
00697     if name is None:
00698         name = basename(normpath(abspath(src_paths[0])))
00699     notify.info("Building library %s (%s, %s)" %
00700                    (name, toolchain.target.name, toolchain_name))
00701 
00702     # Initialize reporting
00703     if report != None:
00704         start = time()
00705         # If project_id is specified, use that over the default name
00706         id_name = project_id.upper() if project_id else name.upper()
00707         description = name
00708         vendor_label = toolchain.target.extra_labels[0]
00709         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00710         cur_result = create_result(toolchain.target.name, toolchain_name,
00711                                    id_name, description)
00712         cur_result['type'] = 'library'
00713         if properties != None:
00714             prep_properties(properties, toolchain.target.name, toolchain_name,
00715                             vendor_label)
00716 
00717     for src_path in src_paths:
00718         if not exists(src_path):
00719             error_msg = "The library source folder does not exist: %s", src_path
00720             if report != None:
00721                 cur_result["output"] = error_msg
00722                 cur_result["result"] = "FAIL"
00723                 add_result_to_report(report, cur_result)
00724             raise Exception(error_msg)
00725 
00726     try:
00727         # Call unified scan_resources
00728         resources = scan_resources(src_paths, toolchain,
00729                                    dependencies_paths=dependencies_paths,
00730                                    inc_dirs=inc_dirs)
00731 
00732 
00733         # Copy headers, objects and static libraries - all files needed for
00734         # static lib
00735         toolchain.copy_files(resources.headers, build_path, resources=resources)
00736         toolchain.copy_files(resources.objects, build_path, resources=resources)
00737         toolchain.copy_files(resources.libraries, build_path,
00738                              resources=resources)
00739         toolchain.copy_files(resources.json_files, build_path,
00740                              resources=resources)
00741         if resources.linker_script:
00742             toolchain.copy_files(resources.linker_script, build_path,
00743                                  resources=resources)
00744 
00745         if resources.hex_files:
00746             toolchain.copy_files(resources.hex_files, build_path,
00747                                  resources=resources)
00748 
00749         # Compile Sources
00750         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00751         resources.objects.extend(objects)
00752 
00753         if archive:
00754             toolchain.build_library(objects, build_path, name)
00755 
00756         if remove_config_header_file:
00757             config_header_path = toolchain.get_config_header()
00758             if config_header_path:
00759                 remove(config_header_path)
00760 
00761         if report != None:
00762             end = time()
00763             cur_result["elapsed_time"] = end - start
00764             cur_result["result"] = "OK"
00765 
00766 
00767             add_result_to_report(report, cur_result)
00768         return True
00769 
00770     except Exception as exc:
00771         if report != None:
00772             end = time()
00773 
00774             if isinstance(exc, ToolException):
00775                 cur_result["result"] = "FAIL"
00776             elif isinstance(exc, NotSupportedException):
00777                 cur_result["result"] = "NOT_SUPPORTED"
00778 
00779             cur_result["elapsed_time"] = end - start
00780 
00781             add_result_to_report(report, cur_result)
00782 
00783         # Let Exception propagate
00784         raise
00785 
00786 ######################
00787 ### Legacy methods ###
00788 ######################
00789 
00790 def mbed2_obj_path(target_name, toolchain_name):
00791     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00792     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00793 
00794 def build_lib (lib_id, target, toolchain_name, clean=False, macros=None,
00795               notify=None, jobs=1, report=None, properties=None,
00796               build_profile=None):
00797     """ Legacy method for building mbed libraries
00798 
00799     Positional arguments:
00800     lib_id - the library's unique identifier
00801     target - the MCU or board that the project will compile for
00802     toolchain_name - the name of the build tools
00803 
00804     Keyword arguments:
00805     clean - Rebuild everything if True
00806     macros - additional macros
00807     notify - Notify function for logs
00808     jobs - how many compilers we can run at once
00809     report - a dict where a result may be appended
00810     properties - UUUUHHHHH beats me
00811     build_profile - a dict of flags that will be passed to the compiler
00812     """
00813     lib = Library(lib_id)
00814     if not lib.is_supported(target, toolchain_name):
00815         print('Library "%s" is not yet supported on target %s with toolchain %s'
00816               % (lib_id, target.name, toolchain_name))
00817         return False
00818 
00819     # We need to combine macros from parameter list with macros from library
00820     # definition
00821     lib_macros = lib.macros if lib.macros else []
00822     if macros:
00823         macros.extend(lib_macros)
00824     else:
00825         macros = lib_macros
00826 
00827     src_paths = lib.source_dir
00828     build_path = lib.build_dir
00829     dependencies_paths = lib.dependencies
00830     inc_dirs = lib.inc_dirs
00831     inc_dirs_ext = lib.inc_dirs_ext
00832 
00833     if not isinstance(src_paths, list):
00834         src_paths = [src_paths]
00835 
00836     # The first path will give the name to the library
00837     name = basename(src_paths[0])
00838 
00839     if report != None:
00840         start = time()
00841         id_name = name.upper()
00842         description = name
00843         vendor_label = target.extra_labels[0]
00844         cur_result = None
00845         prep_report(report, target.name, toolchain_name, id_name)
00846         cur_result = create_result(target.name, toolchain_name, id_name,
00847                                    description)
00848 
00849         if properties != None:
00850             prep_properties(properties, target.name, toolchain_name,
00851                             vendor_label)
00852 
00853     for src_path in src_paths:
00854         if not exists(src_path):
00855             error_msg = "The library source folder does not exist: %s", src_path
00856 
00857             if report != None:
00858                 cur_result["output"] = error_msg
00859                 cur_result["result"] = "FAIL"
00860                 add_result_to_report(report, cur_result)
00861 
00862             raise Exception(error_msg)
00863 
00864     try:
00865         # Toolchain instance
00866         # Create the desired build directory structure
00867         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00868         mkdir(bin_path)
00869         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00870                                                             toolchain_name))
00871         mkdir(tmp_path)
00872 
00873         toolchain = prepare_toolchain(
00874             src_paths, tmp_path, target, toolchain_name, macros=macros,
00875             notify=notify, build_profile=build_profile, jobs=jobs, clean=clean)
00876 
00877         notify.info("Building library %s (%s, %s)" %
00878                        (name.upper(), target.name, toolchain_name))
00879 
00880         # Take into account the library configuration (MBED_CONFIG_FILE)
00881         config = toolchain.config
00882         config.add_config_files([MBED_CONFIG_FILE])
00883 
00884         # Scan Resources
00885         resources = []
00886         for src_path in src_paths:
00887             resources.append(toolchain.scan_resources(src_path))
00888 
00889         # Add extra include directories / files which are required by library
00890         # This files usually are not in the same directory as source files so
00891         # previous scan will not include them
00892         if inc_dirs_ext is not None:
00893             for inc_ext in inc_dirs_ext:
00894                 resources.append(toolchain.scan_resources(inc_ext))
00895 
00896         # Dependencies Include Paths
00897         dependencies_include_dir = []
00898         if dependencies_paths is not None:
00899             for path in dependencies_paths:
00900                 lib_resources = toolchain.scan_resources(path)
00901                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00902                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00903 
00904         if inc_dirs:
00905             dependencies_include_dir.extend(inc_dirs)
00906 
00907         # Add other discovered configuration data to the configuration object
00908         for res in resources:
00909             config.load_resources(res)
00910         toolchain.set_config_data(toolchain.config.get_config_data())
00911 
00912 
00913         # Copy Headers
00914         for resource in resources:
00915             toolchain.copy_files(resource.headers, build_path,
00916                                  resources=resource)
00917 
00918         dependencies_include_dir.extend(
00919             toolchain.scan_resources(build_path).inc_dirs)
00920 
00921         # Compile Sources
00922         objects = []
00923         for resource in resources:
00924             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00925 
00926         needed_update = toolchain.build_library(objects, bin_path, name)
00927 
00928         if report != None and needed_update:
00929             end = time()
00930             cur_result["elapsed_time"] = end - start
00931             cur_result["result"] = "OK"
00932 
00933             add_result_to_report(report, cur_result)
00934         return True
00935 
00936     except Exception:
00937         if report != None:
00938             end = time()
00939             cur_result["result"] = "FAIL"
00940             cur_result["elapsed_time"] = end - start
00941 
00942             add_result_to_report(report, cur_result)
00943 
00944         # Let Exception propagate
00945         raise
00946 
00947 # We do have unique legacy conventions about how we build and package the mbed
00948 # library
00949 def build_mbed_libs (target, toolchain_name, clean=False, macros=None,
00950                     notify=None, jobs=1, report=None, properties=None,
00951                     build_profile=None):
00952     """ Function returns True is library was built and false if building was
00953     skipped
00954 
00955     Positional arguments:
00956     target - the MCU or board that the project will compile for
00957     toolchain_name - the name of the build tools
00958 
00959     Keyword arguments:
00960     clean - Rebuild everything if True
00961     macros - additional macros
00962     notify - Notify function for logs
00963     jobs - how many compilers we can run at once
00964     report - a dict where a result may be appended
00965     properties - UUUUHHHHH beats me
00966     build_profile - a dict of flags that will be passed to the compiler
00967     """
00968 
00969     if report != None:
00970         start = time()
00971         id_name = "MBED"
00972         description = "mbed SDK"
00973         vendor_label = target.extra_labels[0]
00974         cur_result = None
00975         prep_report(report, target.name, toolchain_name, id_name)
00976         cur_result = create_result(target.name, toolchain_name, id_name,
00977                                    description)
00978 
00979         if properties != None:
00980             prep_properties(properties, target.name, toolchain_name,
00981                             vendor_label)
00982 
00983     # Check toolchain support
00984     if toolchain_name not in target.supported_toolchains:
00985         supported_toolchains_text = ", ".join(target.supported_toolchains)
00986         print('%s target is not yet supported by toolchain %s' %
00987               (target.name, toolchain_name))
00988         print('%s target supports %s toolchain%s' %
00989               (target.name, supported_toolchains_text, 's'
00990                if len(target.supported_toolchains) > 1 else ''))
00991 
00992         if report != None:
00993             cur_result["result"] = "SKIP"
00994             add_result_to_report(report, cur_result)
00995 
00996         return False
00997 
00998     try:
00999         # Source and Build Paths
01000         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01001         build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01002         mkdir(build_toolchain)
01003 
01004         # Toolchain
01005         tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
01006         mkdir(tmp_path)
01007 
01008         toolchain = prepare_toolchain(
01009             [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
01010             build_profile=build_profile, jobs=jobs, clean=clean)
01011 
01012         # Take into account the library configuration (MBED_CONFIG_FILE)
01013         config = toolchain.config
01014         config.add_config_files([MBED_CONFIG_FILE])
01015         toolchain.set_config_data(toolchain.config.get_config_data())
01016 
01017         # mbed
01018         notify.info("Building library %s (%s, %s)" %
01019                        ('MBED', target.name, toolchain_name))
01020 
01021         # Common Headers
01022         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01023         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01024 
01025         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01026                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01027                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01028             resources = toolchain.scan_resources(dir)
01029             toolchain.copy_files(resources.headers, dest)
01030             library_incdirs.append(dest)
01031 
01032         cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH)
01033         toolchain.copy_files(cmsis_implementation.headers, build_target)
01034         toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain)
01035         toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain)
01036 
01037         hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH)
01038         toolchain.copy_files(hal_implementation.headers +
01039                              hal_implementation.hex_files +
01040                              hal_implementation.libraries +
01041                              [MBED_CONFIG_FILE],
01042                              build_target, resources=hal_implementation)
01043         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
01044         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
01045         incdirs = toolchain.scan_resources(build_target).inc_dirs
01046         objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
01047                                             library_incdirs + incdirs + [tmp_path])
01048         toolchain.copy_files(objects, build_toolchain)
01049 
01050         # Common Sources
01051         mbed_resources = None
01052         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
01053             mbed_resources += toolchain.scan_resources(dir)
01054 
01055         objects = toolchain.compile_sources(mbed_resources,
01056                                             library_incdirs + incdirs)
01057 
01058         # A number of compiled files need to be copied as objects as opposed to
01059         # way the linker search for symbols in archives. These are:
01060         #   - mbed_retarget.o: to make sure that the C standard lib symbols get
01061         #                 overridden
01062         #   - mbed_board.o: mbed_die is weak
01063         #   - mbed_overrides.o: this contains platform overrides of various
01064         #                       weak SDK functions
01065         #   - mbed_main.o: this contains main redirection
01066         separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
01067                                             'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
01068 
01069         for obj in objects:
01070             for name in separate_names:
01071                 if obj.endswith(name):
01072                     separate_objects.append(obj)
01073 
01074         for obj in separate_objects:
01075             objects.remove(obj)
01076 
01077         toolchain.build_library(objects, build_toolchain, "mbed")
01078 
01079         for obj in separate_objects:
01080             toolchain.copy_files(obj, build_toolchain)
01081 
01082         if report != None:
01083             end = time()
01084             cur_result["elapsed_time"] = end - start
01085             cur_result["result"] = "OK"
01086 
01087             add_result_to_report(report, cur_result)
01088 
01089         return True
01090 
01091     except Exception as exc:
01092         if report != None:
01093             end = time()
01094             cur_result["result"] = "FAIL"
01095             cur_result["elapsed_time"] = end - start
01096 
01097             cur_result["output"] += str(exc)
01098 
01099             add_result_to_report(report, cur_result)
01100 
01101         # Let Exception propagate
01102         raise
01103 
01104 
01105 def get_unique_supported_toolchains (release_targets=None):
01106     """ Get list of all unique toolchains supported by targets
01107 
01108     Keyword arguments:
01109     release_targets - tuple structure returned from get_mbed_official_release().
01110                       If release_targets is not specified, then it queries all
01111                       known targets
01112     """
01113     unique_supported_toolchains = []
01114 
01115     if not release_targets:
01116         for target in TARGET_NAMES:
01117             for toolchain in TARGET_MAP[target].supported_toolchains:
01118                 if toolchain not in unique_supported_toolchains:
01119                     unique_supported_toolchains.append(toolchain)
01120     else:
01121         for target in release_targets:
01122             for toolchain in target[1]:
01123                 if toolchain not in unique_supported_toolchains:
01124                     unique_supported_toolchains.append(toolchain)
01125 
01126     if "ARM" in unique_supported_toolchains:
01127         unique_supported_toolchains.append("ARMC6")
01128 
01129     return unique_supported_toolchains
01130 
01131 def mcu_toolchain_list (release_version='5'):
01132     """  Shows list of toolchains
01133 
01134     """
01135 
01136     if isinstance(release_version, basestring):
01137         # Force release_version to lowercase if it is a string
01138         release_version = release_version.lower()
01139     else:
01140         # Otherwise default to printing all known targets and toolchains
01141         release_version = 'all'
01142 
01143 
01144     version_release_targets = {}
01145     version_release_target_names = {}
01146 
01147     for version in RELEASE_VERSIONS:
01148         version_release_targets[version] = get_mbed_official_release(version)
01149         version_release_target_names[version] = [x[0] for x in
01150                                                  version_release_targets[
01151                                                      version]]
01152 
01153     if release_version in RELEASE_VERSIONS:
01154         release_targets = version_release_targets[release_version]
01155     else:
01156         release_targets = None
01157 
01158     unique_supported_toolchains = get_unique_supported_toolchains(
01159         release_targets)
01160     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01161     return "\n".join(columns)
01162 
01163 
01164 def mcu_target_list (release_version='5'):
01165     """  Shows target list
01166 
01167     """
01168 
01169     if isinstance(release_version, basestring):
01170         # Force release_version to lowercase if it is a string
01171         release_version = release_version.lower()
01172     else:
01173         # Otherwise default to printing all known targets and toolchains
01174         release_version = 'all'
01175 
01176 
01177     version_release_targets = {}
01178     version_release_target_names = {}
01179 
01180     for version in RELEASE_VERSIONS:
01181         version_release_targets[version] = get_mbed_official_release(version)
01182         version_release_target_names[version] = [x[0] for x in
01183                                                  version_release_targets[
01184                                                      version]]
01185 
01186     if release_version in RELEASE_VERSIONS:
01187         release_targets = version_release_targets[release_version]
01188     else:
01189         release_targets = None
01190 
01191     target_names = []
01192 
01193     if release_targets:
01194         target_names = [x[0] for x in release_targets]
01195     else:
01196         target_names = TARGET_NAMES
01197 
01198     return "\n".join(target_names)
01199 
01200 
01201 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01202                          release_version='5'):
01203     """  Shows target map using prettytable
01204 
01205     Keyword arguments:
01206     verbose_html - emit html instead of a simple table
01207     platform_filter - remove results that match the string
01208     release_version - get the matrix for this major version number
01209     """
01210     # Only use it in this function so building works without extra modules
01211     from prettytable import PrettyTable
01212 
01213     if isinstance(release_version, basestring):
01214         # Force release_version to lowercase if it is a string
01215         release_version = release_version.lower()
01216     else:
01217         # Otherwise default to printing all known targets and toolchains
01218         release_version = 'all'
01219 
01220 
01221     version_release_targets = {}
01222     version_release_target_names = {}
01223 
01224     for version in RELEASE_VERSIONS:
01225         version_release_targets[version] = get_mbed_official_release(version)
01226         version_release_target_names[version] = [x[0] for x in
01227                                                  version_release_targets[
01228                                                      version]]
01229 
01230     if release_version in RELEASE_VERSIONS:
01231         release_targets = version_release_targets[release_version]
01232     else:
01233         release_targets = None
01234 
01235     unique_supported_toolchains = get_unique_supported_toolchains(
01236         release_targets)
01237     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01238 
01239     # All tests status table print
01240     columns = prepend_columns + unique_supported_toolchains
01241     table_printer = PrettyTable(columns)
01242     # Align table
01243     for col in columns:
01244         table_printer.align[col] = "c"
01245     table_printer.align["Target"] = "l"
01246 
01247     perm_counter = 0
01248     target_counter = 0
01249 
01250     target_names = []
01251 
01252     if release_targets:
01253         target_names = [x[0] for x in release_targets]
01254     else:
01255         target_names = TARGET_NAMES
01256 
01257     for target in sorted(target_names):
01258         if platform_filter is not None:
01259             # FIlter out platforms using regex
01260             if re.search(platform_filter, target) is None:
01261                 continue
01262         target_counter += 1
01263 
01264         row = [target]  # First column is platform name
01265 
01266         for version in RELEASE_VERSIONS:
01267             if target in version_release_target_names[version]:
01268                 text = "Supported"
01269             else:
01270                 text = "-"
01271             row.append(text)
01272 
01273         for unique_toolchain in unique_supported_toolchains:
01274             if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
01275                 (unique_toolchain == "ARMC6" and
01276                  "ARM" in TARGET_MAP[target].supported_toolchains)):
01277                 text = "Supported"
01278                 perm_counter += 1
01279             else:
01280                 text = "-"
01281 
01282             row.append(text)
01283         table_printer.add_row(row)
01284 
01285     result = table_printer.get_html_string() if verbose_html \
01286              else table_printer.get_string()
01287     result += "\n"
01288     result += "Supported targets: %d\n"% (target_counter)
01289     if target_counter == 1:
01290         result += "Supported toolchains: %d"% (perm_counter)
01291     return result
01292 
01293 
01294 def get_target_supported_toolchains (target):
01295     """ Returns target supported toolchains list
01296 
01297     Positional arguments:
01298     target - the target to get the supported toolchains of
01299     """
01300     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01301         else None
01302 
01303 
01304 def print_build_results (result_list, build_name):
01305     """ Generate result string for build results
01306 
01307     Positional arguments:
01308     result_list - the list of results to print
01309     build_name - the name of the build we are printing result for
01310     """
01311     result = ""
01312     if len(result_list) > 0:
01313         result += build_name + "\n"
01314         result += "\n".join(["  * %s" % f for f in result_list])
01315         result += "\n"
01316     return result
01317 
01318 def print_build_memory_usage (report):
01319     """ Generate result table with memory usage values for build results
01320     Aggregates (puts together) reports obtained from self.get_memory_summary()
01321 
01322     Positional arguments:
01323     report - Report generated during build procedure.
01324     """
01325     from prettytable import PrettyTable
01326     columns_text = ['name', 'target', 'toolchain']
01327     columns_int = ['static_ram', 'total_flash']
01328     table = PrettyTable(columns_text + columns_int)
01329 
01330     for col in columns_text:
01331         table.align[col] = 'l'
01332 
01333     for col in columns_int:
01334         table.align[col] = 'r'
01335 
01336     for target in report:
01337         for toolchain in report[target]:
01338             for name in report[target][toolchain]:
01339                 for dlist in report[target][toolchain][name]:
01340                     for dlistelem in dlist:
01341                         # Get 'memory_usage' record and build table with
01342                         # statistics
01343                         record = dlist[dlistelem]
01344                         if 'memory_usage' in record and record['memory_usage']:
01345                             # Note that summary should be in the last record of
01346                             # 'memory_usage' section. This is why we are
01347                             # grabbing last "[-1]" record.
01348                             row = [
01349                                 record['description'],
01350                                 record['target_name'],
01351                                 record['toolchain_name'],
01352                                 record['memory_usage'][-1]['summary'][
01353                                     'static_ram'],
01354                                 record['memory_usage'][-1]['summary'][
01355                                     'total_flash'],
01356                             ]
01357                             table.add_row(row)
01358 
01359     result = "Memory map breakdown for built projects (values in Bytes):\n"
01360     result += table.get_string(sortby='name')
01361     return result
01362 
01363 def write_build_report (build_report, template_filename, filename):
01364     """Write a build report to disk using a template file
01365 
01366     Positional arguments:
01367     build_report - a report generated by the build system
01368     template_filename - a file that contains the template for the style of build
01369                         report
01370     filename - the location on disk to write the file to
01371     """
01372     build_report_failing = []
01373     build_report_passing = []
01374 
01375     for report in build_report:
01376         if len(report["failing"]) > 0:
01377             build_report_failing.append(report)
01378         else:
01379             build_report_passing.append(report)
01380 
01381     env = Environment(extensions=['jinja2.ext.with_'])
01382     env.loader = FileSystemLoader('ci_templates')
01383     template = env.get_template(template_filename)
01384 
01385     with open(filename, 'w+') as placeholder:
01386         placeholder.write(template.render(
01387             failing_builds=build_report_failing,
01388             passing_builds=build_report_passing))
01389 
01390 
01391 def merge_build_data(filename, toolchain_report, app_type):
01392     path_to_file = dirname(abspath(filename))
01393     try:
01394         build_data = load(open(filename))
01395     except (IOError, ValueError):
01396         build_data = {'builds': []}
01397     for tgt in toolchain_report.values():
01398         for tc in tgt.values():
01399             for project in tc.values():
01400                 for build in project:
01401                     try:
01402                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01403                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01404                     except KeyError:
01405                         pass
01406                     if 'type' not in build[0]:
01407                         build[0]['type'] = app_type
01408                     build_data['builds'].append(build[0])
01409     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))