Anders Blomdell / mbed-sdk-tools
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .resources import Resources, FileType, FileRef
00046 from .notifier.mock import MockNotifier
00047 from .targets import TARGET_NAMES, TARGET_MAP, CORE_ARCH, set_targets_json_location
00048 from .libraries import Library
00049 from .toolchains import TOOLCHAIN_CLASSES, mbedToolchain
00050 from .config import Config
00051 from .build_profiles import find_build_profile, get_toolchain_profile, find_targets_json
00052 
00053 RELEASE_VERSIONS = ['2', '5']
00054 
00055 def prep_report (report, target_name, toolchain_name, id_name):
00056     """Setup report keys
00057 
00058     Positional arguments:
00059     report - the report to fill
00060     target_name - the target being used
00061     toolchain_name - the toolchain being used
00062     id_name - the name of the executable or library being built
00063     """
00064     if not target_name in report:
00065         report[target_name] = {}
00066 
00067     if not toolchain_name in report[target_name]:
00068         report[target_name][toolchain_name] = {}
00069 
00070     if not id_name in report[target_name][toolchain_name]:
00071         report[target_name][toolchain_name][id_name] = []
00072 
00073 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00074     """Setup test properties
00075 
00076     Positional arguments:
00077     properties - the dict to fill
00078     target_name - the target the test is targeting
00079     toolchain_name - the toolchain that will compile the test
00080     vendor_label - the vendor
00081     """
00082     if not target_name in properties:
00083         properties[target_name] = {}
00084 
00085     if not toolchain_name in properties[target_name]:
00086         properties[target_name][toolchain_name] = {}
00087 
00088     properties[target_name][toolchain_name]["target"] = target_name
00089     properties[target_name][toolchain_name]["vendor"] = vendor_label
00090     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00091 
00092 def create_result (target_name, toolchain_name, id_name, description):
00093     """Create a result dictionary
00094 
00095     Positional arguments:
00096     target_name - the target being built for
00097     toolchain_name - the toolchain doing the building
00098     id_name - the name of the executable or library being built
00099     description - a human readable description of what's going on
00100     """
00101     cur_result = {}
00102     cur_result["target_name"] = target_name
00103     cur_result["toolchain_name"] = toolchain_name
00104     cur_result["id"] = id_name
00105     cur_result["description"] = description
00106     cur_result["elapsed_time"] = 0
00107     cur_result["output"] = ""
00108 
00109     return cur_result
00110 
00111 def add_result_to_report (report, result):
00112     """Add a single result to a report dictionary
00113 
00114     Positional arguments:
00115     report - the report to append to
00116     result - the result to append
00117     """
00118     result["date"] = datetime.datetime.utcnow().isoformat()
00119     result["uuid"] = str(uuid.uuid1())
00120     target = result["target_name"]
00121     toolchain = result["toolchain_name"]
00122     id_name = result['id']
00123     result_wrap = {0: result}
00124     report[target][toolchain][id_name].append(result_wrap)
00125 
00126 def get_config (src_paths, target, toolchain_name=None, app_config=None):
00127     """Get the configuration object for a target-toolchain combination
00128 
00129     Positional arguments:
00130     src_paths - paths to scan for the configuration files
00131     target - the device we are building for
00132     toolchain_name - the string that identifies the build tools
00133     """
00134     # Convert src_paths to a list if needed
00135     if not isinstance(src_paths, list):
00136         src_paths = [src_paths]
00137 
00138     res = Resources(MockNotifier())
00139     if toolchain_name:
00140         toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
00141                                       app_config=app_config)
00142         config = toolchain.config
00143         res.scan_with_toolchain(src_paths, toolchain, exclude=False)
00144     else:
00145         config = Config(target, src_paths, app_config=app_config)
00146         res.scan_with_config(src_paths, config)
00147     if config.has_regions:
00148         _ = list(config.regions)
00149 
00150     cfg, macros = config.get_config_data()
00151     features = config.get_features()
00152     return cfg, macros, features
00153 
00154 def is_official_target (target_name, version):
00155     """ Returns True, None if a target is part of the official release for the
00156     given version. Return False, 'reason' if a target is not part of the
00157     official release for the given version.
00158 
00159     Positional arguments:
00160     target_name - Name if the target (ex. 'K64F')
00161     version - The release version string. Should be a string contained within
00162               RELEASE_VERSIONS
00163     """
00164 
00165     result = True
00166     reason = None
00167     target = TARGET_MAP[target_name]
00168 
00169     if hasattr(target, 'release_versions') \
00170        and version in target.release_versions:
00171         if version == '2':
00172             # For version 2, either ARM or uARM toolchain support is required
00173             required_toolchains = set(['ARM', 'uARM'])
00174 
00175             if not len(required_toolchains.intersection(
00176                     set(target.supported_toolchains))) > 0:
00177                 result = False
00178                 reason = ("Target '%s' must support " % target.name) + \
00179                     ("one of the folowing toolchains to be included in the") + \
00180                     ((" mbed 2.0 official release: %s" + linesep) %
00181                      ", ".join(required_toolchains)) + \
00182                     ("Currently it is only configured to support the ") + \
00183                     ("following toolchains: %s" %
00184                      ", ".join(target.supported_toolchains))
00185 
00186         elif version == '5':
00187             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00188             required_toolchains = [
00189                 set(['ARM', 'GCC_ARM', 'IAR']),
00190                 set(['ARMC6'])
00191             ]
00192             supported_toolchains = set(target.supported_toolchains)
00193 
00194             if not any(r.issubset(supported_toolchains)
00195                        for r in required_toolchains):
00196                 result = False
00197                 reason = ("Target '%s' must support " % target.name) + \
00198                     ("ALL of the folowing toolchains to be included in the") + \
00199                     ((" mbed OS 5.0 official release: %s" + linesep) %
00200                      ", ".join(sorted(required_toolchains[0]))) + \
00201                     ("Currently it is only configured to support the ") + \
00202                     ("following toolchains: %s" %
00203                      ", ".join(sorted(supported_toolchains)))
00204 
00205             elif not target.default_lib == 'std':
00206                 result = False
00207                 reason = ("Target '%s' must set the " % target.name) + \
00208                     ("'default_lib' to 'std' to be included in the ") + \
00209                     ("mbed OS 5.0 official release." + linesep) + \
00210                     ("Currently it is set to '%s'" % target.default_lib)
00211 
00212         else:
00213             result = False
00214             reason = ("Target '%s' has set an invalid release version of '%s'" %
00215                       version) + \
00216                 ("Please choose from the following release versions: %s" %
00217                  ', '.join(RELEASE_VERSIONS))
00218 
00219     else:
00220         result = False
00221         if not hasattr(target, 'release_versions'):
00222             reason = "Target '%s' " % target.name
00223             reason += "does not have the 'release_versions' key set"
00224         elif not version in target.release_versions:
00225             reason = "Target '%s' does not contain the version '%s' " % \
00226                      (target.name, version)
00227             reason += "in its 'release_versions' key"
00228 
00229     return result, reason
00230 
00231 def transform_release_toolchains (toolchains, version):
00232     """ Given a list of toolchains and a release version, return a list of
00233     only the supported toolchains for that release
00234 
00235     Positional arguments:
00236     toolchains - The list of toolchains
00237     version - The release version string. Should be a string contained within
00238               RELEASE_VERSIONS
00239     """
00240     if version == '5':
00241         return ['ARM', 'GCC_ARM', 'IAR']
00242     else:
00243         return toolchains
00244 
00245 
00246 def get_mbed_official_release (version):
00247     """ Given a release version string, return a tuple that contains a target
00248     and the supported toolchains for that release.
00249     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00250                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00251 
00252     Positional arguments:
00253     version - The version string. Should be a string contained within
00254               RELEASE_VERSIONS
00255     """
00256 
00257     mbed_official_release = (
00258         tuple(
00259             tuple(
00260                 [
00261                     TARGET_MAP[target].name,
00262                     tuple(transform_release_toolchains(
00263                         TARGET_MAP[target].supported_toolchains, version))
00264                 ]
00265             ) for target in TARGET_NAMES \
00266             if (hasattr(TARGET_MAP[target], 'release_versions')
00267                 and version in TARGET_MAP[target].release_versions)
00268         )
00269     )
00270 
00271     for target in mbed_official_release:
00272         is_official, reason = is_official_target(target[0], version)
00273 
00274         if not is_official:
00275             raise InvalidReleaseTargetException(reason)
00276 
00277     return mbed_official_release
00278 
00279 ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
00280 def target_supports_toolchain(target, toolchain_name):
00281     if toolchain_name in ARM_COMPILERS:
00282         return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
00283     else:
00284         return toolchain_name in target.supported_toolchains
00285 
00286 
00287 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00288                       macros=None, clean=False, jobs=1,
00289                       notify=None, config=None, app_config=None,
00290                       build_profile=None, ignore=None):
00291     """ Prepares resource related objects - toolchain, target, config
00292 
00293     Positional arguments:
00294     src_paths - the paths to source directories
00295     target - ['LPC1768', 'LPC11U24', etc.]
00296     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00297 
00298     Keyword arguments:
00299     macros - additional macros
00300     clean - Rebuild everything if True
00301     jobs - how many compilers we can run at once
00302     notify - Notify function for logs
00303     config - a Config object to use instead of creating one
00304     app_config - location of a chosen mbed_app.json file
00305     build_profile - a list of mergeable build profiles
00306     ignore - list of paths to add to mbedignore
00307     """
00308 
00309     # We need to remove all paths which are repeated to avoid
00310     # multiple compilations and linking with the same objects
00311     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00312 
00313     # If the configuration object was not yet created, create it now
00314     config = config or Config(target, src_paths, app_config=app_config)
00315     target = config.target
00316     if not target_supports_toolchain(target, toolchain_name):
00317         raise NotSupportedException(
00318             "Target {} is not supported by toolchain {}".format(
00319                 target.name, toolchain_name))
00320     if (toolchain_name == "ARM" and CORE_ARCH[target.core] == 8):
00321         toolchain_name = "ARMC6"
00322 
00323     try:
00324         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00325     except KeyError:
00326         raise KeyError("Toolchain %s not supported" % toolchain_name)
00327 
00328     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00329     for contents in build_profile or []:
00330         for key in profile:
00331             profile[key].extend(contents[toolchain_name].get(key, []))
00332 
00333     toolchain = cur_tc(
00334         target, notify, macros, build_dir=build_dir, build_profile=profile)
00335 
00336     toolchain.config = config
00337     toolchain.jobs = jobs
00338     toolchain.build_all = clean
00339 
00340     if ignore:
00341         toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore)
00342 
00343     return toolchain
00344 
00345 def _printihex(ihex):
00346     import pprint
00347     pprint.PrettyPrinter().pprint(ihex.todict())
00348 
00349 def _real_region_size(region):
00350     try:
00351         part = intelhex_offset(region.filename, offset=region.start)
00352         return (part.maxaddr() - part.minaddr()) + 1
00353     except AttributeError:
00354         return region.size
00355 
00356 
00357 def _fill_header(region_list, current_region):
00358     """Fill an application header region
00359 
00360     This is done it three steps:
00361      * Fill the whole region with zeros
00362      * Fill const, timestamp and size entries with their data
00363      * Fill the digests using this header as the header region
00364     """
00365     region_dict = {r.name: r for r in region_list}
00366     header = IntelHex()
00367     header.puts(current_region.start, b'\x00' * current_region.size)
00368     start = current_region.start
00369     for member in current_region.filename:
00370         _, type, subtype, data = member
00371         member_size = Config.header_member_size(member)
00372         if type == "const":
00373             fmt = {
00374                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00375                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00376             }[subtype]
00377             header.puts(start, struct.pack(fmt, integer(data, 0)))
00378         elif type == "timestamp":
00379             fmt = {"32le": "<L", "64le": "<Q",
00380                    "32be": ">L", "64be": ">Q"}[subtype]
00381             header.puts(start, struct.pack(fmt, time()))
00382         elif type == "size":
00383             fmt = {"32le": "<L", "64le": "<Q",
00384                    "32be": ">L", "64be": ">Q"}[subtype]
00385             size = sum(_real_region_size(region_dict[r]) for r in data)
00386             header.puts(start, struct.pack(fmt, size))
00387         elif type  == "digest":
00388             if data == "header":
00389                 ih = header[:start]
00390             else:
00391                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00392             if subtype.startswith("CRCITT32"):
00393                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00394                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00395             elif subtype.startswith("SHA"):
00396                 if subtype == "SHA256":
00397                     hash = hashlib.sha256()
00398                 elif subtype == "SHA512":
00399                     hash = hashlib.sha512()
00400                 hash.update(ih.tobinarray())
00401                 header.puts(start, hash.digest())
00402         start += Config.header_member_size(member)
00403     return header
00404 
00405 def merge_region_list (region_list, destination, notify, padding=b'\xFF'):
00406     """Merge the region_list into a single image
00407 
00408     Positional Arguments:
00409     region_list - list of regions, which should contain filenames
00410     destination - file name to write all regions to
00411     padding - bytes to fill gapps with
00412     """
00413     merged = IntelHex()
00414     _, format = splitext(destination)
00415 
00416     notify.info("Merging Regions")
00417 
00418     for region in region_list:
00419         if region.active and not region.filename:
00420             raise ToolException("Active region has no contents: No file found.")
00421         if isinstance(region.filename, list):
00422             header_basename, _ = splitext(destination)
00423             header_filename = header_basename + "_header.hex"
00424             _fill_header(region_list, region).tofile(header_filename, format='hex')
00425             region = region._replace(filename=header_filename)
00426         if region.filename:
00427             notify.info("  Filling region %s with %s" % (region.name, region.filename))
00428             part = intelhex_offset(region.filename, offset=region.start)
00429             part.start_addr = None
00430             part_size = (part.maxaddr() - part.minaddr()) + 1
00431             if part_size > region.size:
00432                 raise ToolException("Contents of region %s does not fit"
00433                                     % region.name)
00434             merged.merge(part)
00435             pad_size = region.size - part_size
00436             if pad_size > 0 and region != region_list[-1]:
00437                 notify.info("  Padding region %s with 0x%x bytes" %
00438                             (region.name, pad_size))
00439                 if format is ".hex":
00440                     """The offset will be in the hex file generated when we're done,
00441                     so we can skip padding here"""
00442                 else:
00443                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00444 
00445     if not exists(dirname(destination)):
00446         makedirs(dirname(destination))
00447     notify.info("Space used after regions merged: 0x%x" %
00448                 (merged.maxaddr() - merged.minaddr() + 1))
00449     merged.tofile(destination, format=format.strip("."))
00450 
00451 
00452 UPDATE_WHITELIST = (
00453     "application",
00454 )
00455 
00456 
00457 def build_project (src_paths, build_path, target, toolchain_name,
00458                   libraries_paths=None, linker_script=None, clean=False,
00459                   notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
00460                   report=None, properties=None, project_id=None,
00461                   project_description=None, config=None,
00462                   app_config=None, build_profile=None, stats_depth=None, ignore=None):
00463     """ Build a project. A project may be a test or a user program.
00464 
00465     Positional arguments:
00466     src_paths - a path or list of paths that contain all files needed to build
00467                 the project
00468     build_path - the directory where all of the object files will be placed
00469     target - the MCU or board that the project will compile for
00470     toolchain_name - the name of the build tools
00471 
00472     Keyword arguments:
00473     libraries_paths - The location of libraries to include when linking
00474     linker_script - the file that drives the linker to do it's job
00475     clean - Rebuild everything if True
00476     notify - Notify function for logs
00477     name - the name of the project
00478     macros - additional macros
00479     inc_dirs - additional directories where include files may be found
00480     jobs - how many compilers we can run at once
00481     report - a dict where a result may be appended
00482     properties - UUUUHHHHH beats me
00483     project_id - the name put in the report
00484     project_description - the human-readable version of what this thing does
00485     config - a Config object to use instead of creating one
00486     app_config - location of a chosen mbed_app.json file
00487     build_profile - a dict of flags that will be passed to the compiler
00488     stats_depth - depth level for memap to display file/dirs
00489     ignore - list of paths to add to mbedignore
00490     """
00491     # Convert src_path to a list if needed
00492     if not isinstance(src_paths, list):
00493         src_paths = [src_paths]
00494     # Extend src_paths wiht libraries_paths
00495     if libraries_paths is not None:
00496         src_paths.extend(libraries_paths)
00497         inc_dirs.extend(map(dirname, libraries_paths))
00498 
00499     if clean and exists(build_path):
00500         rmtree(build_path)
00501     mkdir(build_path)
00502 
00503     ###################################
00504     # mbed Classic/2.0/libary support #
00505 
00506     # Find build system profile
00507     profile = None
00508     targets_json = None
00509     for path in src_paths:
00510         profile = find_build_profile(path) or profile
00511         if profile:
00512             targets_json = join(dirname(abspath(__file__)), 'legacy_targets.json')
00513         else:
00514             targets_json = find_targets_json(path) or targets_json
00515 
00516     # Apply targets.json to active targets
00517     if targets_json:
00518         notify.info("Using targets from %s" % targets_json)
00519         set_targets_json_location(targets_json)
00520 
00521     # Apply profile to toolchains
00522     if profile:
00523         def init_hook(self):
00524             profile_data = get_toolchain_profile(self.name, profile)
00525             if not profile_data:
00526                 return
00527             notify.info("Using toolchain %s profile %s" % (self.name, profile))
00528 
00529             for k,v in profile_data.items():
00530                 if self.flags.has_key(k):
00531                     self.flags[k] = v
00532                 else:
00533                     setattr(self, k, v)
00534 
00535         mbedToolchain.init = init_hook
00536 
00537     # mbed Classic/2.0/libary support #
00538     ###################################
00539 
00540     toolchain = prepare_toolchain(
00541         src_paths, build_path, target, toolchain_name, macros=macros,
00542         clean=clean, jobs=jobs, notify=notify, config=config,
00543         app_config=app_config, build_profile=build_profile, ignore=ignore)
00544     toolchain.version_check()
00545 
00546     # The first path will give the name to the library
00547     name = (name or toolchain.config.name or
00548             basename(normpath(abspath(src_paths[0]))))
00549     notify.info("Building project %s (%s, %s)" %
00550                 (name, toolchain.target.name, toolchain_name))
00551 
00552     # Initialize reporting
00553     if report != None:
00554         start = time()
00555         # If project_id is specified, use that over the default name
00556         id_name = project_id.upper() if project_id else name.upper()
00557         description = project_description if project_description else name
00558         vendor_label = toolchain.target.extra_labels[0]
00559         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00560         cur_result = create_result(toolchain.target.name, toolchain_name,
00561                                    id_name, description)
00562         if properties != None:
00563             prep_properties(properties, toolchain.target.name, toolchain_name,
00564                             vendor_label)
00565 
00566     try:
00567         resources = Resources(notify).scan_with_toolchain(
00568             src_paths, toolchain, inc_dirs=inc_dirs)
00569 
00570         # Change linker script if specified
00571         if linker_script is not None:
00572             resources.add_file_ref(linker_script, linker_script)
00573 
00574         # Compile Sources
00575         objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
00576         resources.add_files_to_type(FileType.OBJECT, objects)
00577 
00578         # Link Program
00579         if toolchain.config.has_regions:
00580             binary, _ = toolchain.link_program(resources, build_path, name + "_application")
00581             region_list = list(toolchain.config.regions)
00582             region_list = [r._replace(filename=binary) if r.active else r
00583                            for r in region_list]
00584             res = "%s.%s" % (join(build_path, name),
00585                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00586             merge_region_list(region_list, res, notify)
00587             update_regions = [
00588                 r for r in region_list if r.name in UPDATE_WHITELIST
00589             ]
00590             if update_regions:
00591                 update_res = "%s_update.%s" % (
00592                     join(build_path, name),
00593                     getattr(toolchain.target, "OUTPUT_EXT", "bin")
00594                 )
00595                 merge_region_list(update_regions, update_res, notify)
00596                 res = (res, update_res)
00597             else:
00598                 res = (res, None)
00599         else:
00600             res, _ = toolchain.link_program(resources, build_path, name)
00601             res = (res, None)
00602 
00603         memap_instance = getattr(toolchain, 'memap_instance', None)
00604         memap_table = ''
00605         if memap_instance:
00606             # Write output to stdout in text (pretty table) format
00607             memap_table = memap_instance.generate_output('table', stats_depth)
00608             notify.info(memap_table)
00609 
00610             # Write output to file in JSON format
00611             map_out = join(build_path, name + "_map.json")
00612             memap_instance.generate_output('json', stats_depth, map_out)
00613 
00614             # Write output to file in CSV format for the CI
00615             map_csv = join(build_path, name + "_map.csv")
00616             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00617 
00618             map_html = join(build_path, name + "_map.html")
00619             memap_instance.generate_output('html', stats_depth, map_html)
00620 
00621         resources.detect_duplicates()
00622 
00623         if report != None:
00624             end = time()
00625             cur_result["elapsed_time"] = end - start
00626             cur_result["result"] = "OK"
00627             cur_result["memory_usage"] = (memap_instance.mem_report
00628                                           if memap_instance is not None else None)
00629             cur_result["bin"] = res[0]
00630             cur_result["elf"] = splitext(res[0])[0] + ".elf"
00631             cur_result.update(toolchain.report)
00632 
00633             add_result_to_report(report, cur_result)
00634 
00635         return res
00636 
00637     except Exception as exc:
00638         if report != None:
00639             end = time()
00640 
00641             if isinstance(exc, NotSupportedException):
00642                 cur_result["result"] = "NOT_SUPPORTED"
00643             else:
00644                 cur_result["result"] = "FAIL"
00645 
00646             cur_result["elapsed_time"] = end - start
00647 
00648             add_result_to_report(report, cur_result)
00649         # Let Exception propagate
00650         raise
00651 
00652 def build_library (src_paths, build_path, target, toolchain_name,
00653                   dependencies_paths=None, name=None, clean=False,
00654                   archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
00655                   report=None, properties=None, project_id=None,
00656                   remove_config_header_file=False, app_config=None,
00657                   build_profile=None, ignore=None):
00658     """ Build a library
00659 
00660     Positional arguments:
00661     src_paths - a path or list of paths that contain all files needed to build
00662                 the library
00663     build_path - the directory where all of the object files will be placed
00664     target - the MCU or board that the project will compile for
00665     toolchain_name - the name of the build tools
00666 
00667     Keyword arguments:
00668     dependencies_paths - The location of libraries to include when linking
00669     name - the name of the library
00670     clean - Rebuild everything if True
00671     archive - whether the library will create an archive file
00672     notify - Notify function for logs
00673     macros - additional macros
00674     inc_dirs - additional directories where include files may be found
00675     jobs - how many compilers we can run at once
00676     report - a dict where a result may be appended
00677     properties - UUUUHHHHH beats me
00678     project_id - the name that goes in the report
00679     remove_config_header_file - delete config header file when done building
00680     app_config - location of a chosen mbed_app.json file
00681     build_profile - a dict of flags that will be passed to the compiler
00682     ignore - list of paths to add to mbedignore
00683     """
00684 
00685     # Convert src_path to a list if needed
00686     if not isinstance(src_paths, list):
00687         src_paths = [src_paths]
00688     src_paths = [relpath(s) for s in src_paths]
00689 
00690     # Build path
00691     if archive:
00692         # Use temp path when building archive
00693         tmp_path = join(build_path, '.temp')
00694         mkdir(tmp_path)
00695     else:
00696         tmp_path = build_path
00697 
00698     # Clean the build directory
00699     if clean and exists(tmp_path):
00700         rmtree(tmp_path)
00701     mkdir(tmp_path)
00702 
00703     # Pass all params to the unified prepare_toolchain()
00704     toolchain = prepare_toolchain(
00705         src_paths, build_path, target, toolchain_name, macros=macros,
00706         clean=clean, jobs=jobs, notify=notify, app_config=app_config,
00707         build_profile=build_profile, ignore=ignore)
00708 
00709     # The first path will give the name to the library
00710     if name is None:
00711         name = basename(normpath(abspath(src_paths[0])))
00712     notify.info("Building library %s (%s, %s)" %
00713                    (name, toolchain.target.name, toolchain_name))
00714 
00715     # Initialize reporting
00716     if report != None:
00717         start = time()
00718         # If project_id is specified, use that over the default name
00719         id_name = project_id.upper() if project_id else name.upper()
00720         description = name
00721         vendor_label = toolchain.target.extra_labels[0]
00722         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00723         cur_result = create_result(toolchain.target.name, toolchain_name,
00724                                    id_name, description)
00725         cur_result['type'] = 'library'
00726         if properties != None:
00727             prep_properties(properties, toolchain.target.name, toolchain_name,
00728                             vendor_label)
00729 
00730     for src_path in src_paths:
00731         if not exists(src_path):
00732             error_msg = "The library source folder does not exist: %s", src_path
00733             if report != None:
00734                 cur_result["output"] = error_msg
00735                 cur_result["result"] = "FAIL"
00736                 add_result_to_report(report, cur_result)
00737             raise Exception(error_msg)
00738 
00739     try:
00740         res = Resources(notify).scan_with_toolchain(
00741             src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
00742 
00743         # Copy headers, objects and static libraries - all files needed for
00744         # static lib
00745         to_copy = (
00746             res.get_file_refs(FileType.HEADER) +
00747             res.get_file_refs(FileType.OBJECT) +
00748             res.get_file_refs(FileType.LIB) +
00749             res.get_file_refs(FileType.JSON) +
00750             res.get_file_refs(FileType.LD_SCRIPT) +
00751             res.get_file_refs(FileType.HEX) +
00752             res.get_file_refs(FileType.BIN)
00753         )
00754         toolchain.copy_files(to_copy, build_path)
00755         # Compile Sources
00756         objects = toolchain.compile_sources(
00757             res, res.get_file_paths(FileType.INC_DIR))
00758         res.add_files_to_type(FileType.OBJECT, objects)
00759 
00760         if archive:
00761             toolchain.build_library(objects, build_path, name)
00762 
00763         if remove_config_header_file:
00764             config_header_path = toolchain.get_config_header()
00765             if config_header_path:
00766                 remove(config_header_path)
00767 
00768         if report != None:
00769             end = time()
00770             cur_result["elapsed_time"] = end - start
00771             cur_result["result"] = "OK"
00772             add_result_to_report(report, cur_result)
00773         return True
00774 
00775     except Exception as exc:
00776         if report != None:
00777             end = time()
00778 
00779             if isinstance(exc, ToolException):
00780                 cur_result["result"] = "FAIL"
00781             elif isinstance(exc, NotSupportedException):
00782                 cur_result["result"] = "NOT_SUPPORTED"
00783 
00784             cur_result["elapsed_time"] = end - start
00785 
00786             add_result_to_report(report, cur_result)
00787 
00788         # Let Exception propagate
00789         raise
00790 
00791 ######################
00792 ### Legacy methods ###
00793 ######################
00794 
00795 def mbed2_obj_path(target_name, toolchain_name):
00796     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00797     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00798 
00799 def build_lib (lib_id, target, toolchain_name, clean=False, macros=None,
00800               notify=None, jobs=1, report=None, properties=None,
00801               build_profile=None, ignore=None):
00802     """ Legacy method for building mbed libraries
00803 
00804     Positional arguments:
00805     lib_id - the library's unique identifier
00806     target - the MCU or board that the project will compile for
00807     toolchain_name - the name of the build tools
00808 
00809     Keyword arguments:
00810     clean - Rebuild everything if True
00811     macros - additional macros
00812     notify - Notify function for logs
00813     jobs - how many compilers we can run at once
00814     report - a dict where a result may be appended
00815     properties - UUUUHHHHH beats me
00816     build_profile - a dict of flags that will be passed to the compiler
00817     ignore - list of paths to add to mbedignore
00818     """
00819     lib = Library(lib_id)
00820     if not lib.is_supported(target, toolchain_name):
00821         print('Library "%s" is not yet supported on target %s with toolchain %s'
00822               % (lib_id, target.name, toolchain_name))
00823         return False
00824 
00825     # We need to combine macros from parameter list with macros from library
00826     # definition
00827     lib_macros = lib.macros if lib.macros else []
00828     if macros:
00829         macros.extend(lib_macros)
00830     else:
00831         macros = lib_macros
00832 
00833     src_paths = lib.source_dir
00834     build_path = lib.build_dir
00835     dependencies_paths = lib.dependencies
00836     inc_dirs = lib.inc_dirs
00837 
00838     if not isinstance(src_paths, list):
00839         src_paths = [src_paths]
00840 
00841     # The first path will give the name to the library
00842     name = basename(src_paths[0])
00843 
00844     if report is not None:
00845         start = time()
00846         id_name = name.upper()
00847         description = name
00848         vendor_label = target.extra_labels[0]
00849         cur_result = None
00850         prep_report(report, target.name, toolchain_name, id_name)
00851         cur_result = create_result(target.name, toolchain_name, id_name,
00852                                    description)
00853 
00854         if properties != None:
00855             prep_properties(properties, target.name, toolchain_name,
00856                             vendor_label)
00857 
00858     for src_path in src_paths:
00859         if not exists(src_path):
00860             error_msg = "The library source folder does not exist: %s", src_path
00861 
00862             if report != None:
00863                 cur_result["output"] = error_msg
00864                 cur_result["result"] = "FAIL"
00865                 add_result_to_report(report, cur_result)
00866 
00867             raise Exception(error_msg)
00868 
00869     try:
00870         # Toolchain instance
00871         # Create the desired build directory structure
00872         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00873         mkdir(bin_path)
00874         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00875                                                             toolchain_name))
00876         mkdir(tmp_path)
00877 
00878         toolchain = prepare_toolchain(
00879             src_paths, tmp_path, target, toolchain_name, macros=macros,
00880             notify=notify, build_profile=build_profile, jobs=jobs, clean=clean,
00881             ignore=ignore)
00882 
00883         notify.info("Building library %s (%s, %s)" %
00884                     (name.upper(), target.name, toolchain_name))
00885 
00886         # Take into account the library configuration (MBED_CONFIG_FILE)
00887         config = toolchain.config
00888         config.add_config_files([MBED_CONFIG_FILE])
00889 
00890         # Scan Resources
00891         resources = Resources(notify).scan_with_toolchain(
00892             src_paths + (lib.inc_dirs_ext or []), toolchain,
00893             inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
00894 
00895         # Copy Headers
00896         toolchain.copy_files(
00897             resources.get_file_refs(FileType.HEADER), build_path)
00898 
00899         dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
00900 
00901         # Compile Sources
00902         objects = []
00903         for resource in resources:
00904             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00905 
00906         needed_update = toolchain.build_library(objects, bin_path, name)
00907 
00908         if report != None and needed_update:
00909             end = time()
00910             cur_result["elapsed_time"] = end - start
00911             cur_result["result"] = "OK"
00912 
00913             add_result_to_report(report, cur_result)
00914         return True
00915 
00916     except Exception:
00917         if report != None:
00918             end = time()
00919             cur_result["result"] = "FAIL"
00920             cur_result["elapsed_time"] = end - start
00921 
00922             add_result_to_report(report, cur_result)
00923 
00924         # Let Exception propagate
00925         raise
00926 
00927 
00928 # A number of compiled files need to be copied as objects as the linker
00929 # will not search for weak symbol overrides in archives. These are:
00930 #   - mbed_retarget.o: to make sure that the C standard lib symbols get
00931 #                      overridden
00932 #   - mbed_board.o: `mbed_die` is weak
00933 #   - mbed_overrides.o: this contains platform overrides of various
00934 #                       weak SDK functions
00935 #   - mbed_main.o: this contains main redirection
00936 #   - mbed_sdk_boot.o: this contains the main boot code in
00937 #   - PeripheralPins.o: PinMap can be weak
00938 SEPARATE_NAMES = [
00939     'PeripheralPins.o',
00940     'mbed_retarget.o',
00941     'mbed_board.o',
00942     'mbed_overrides.o',
00943     'mbed_main.o',
00944     'mbed_sdk_boot.o',
00945 ]
00946 
00947 
00948 def build_mbed_libs (target, toolchain_name, clean=False, macros=None,
00949                     notify=None, jobs=1, report=None, properties=None,
00950                     build_profile=None, ignore=None):
00951     """ Build legacy libraries for a target and toolchain pair
00952 
00953     Positional arguments:
00954     target - the MCU or board that the project will compile for
00955     toolchain_name - the name of the build tools
00956 
00957     Keyword arguments:
00958     clean - Rebuild everything if True
00959     macros - additional macros
00960     notify - Notify function for logs
00961     jobs - how many compilers we can run at once
00962     report - a dict where a result may be appended
00963     properties - UUUUHHHHH beats me
00964     build_profile - a dict of flags that will be passed to the compiler
00965     ignore - list of paths to add to mbedignore
00966 
00967     Return - True if target + toolchain built correctly, False if not supported
00968     """
00969 
00970     if report is not None:
00971         start = time()
00972         id_name = "MBED"
00973         description = "mbed SDK"
00974         vendor_label = target.extra_labels[0]
00975         cur_result = None
00976         prep_report(report, target.name, toolchain_name, id_name)
00977         cur_result = create_result(
00978             target.name, toolchain_name, id_name, description)
00979         if properties is not None:
00980             prep_properties(
00981                 properties, target.name, toolchain_name, vendor_label)
00982 
00983     if toolchain_name not in target.supported_toolchains:
00984         supported_toolchains_text = ", ".join(target.supported_toolchains)
00985         notify.info('The target {} does not support the toolchain {}'.format(
00986             target.name,
00987             toolchain_name
00988         ))
00989         notify.info('{} supports {} toolchain{}'.format(
00990             target.name,
00991             supported_toolchains_text,
00992             's' if len(target.supported_toolchains) > 1 else ''
00993         ))
00994 
00995         if report is not None:
00996             cur_result["result"] = "SKIP"
00997             add_result_to_report(report, cur_result)
00998 
00999         return False
01000 
01001     try:
01002         # Source and Build Paths
01003         build_toolchain = join(
01004             MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01005         mkdir(build_toolchain)
01006 
01007         tmp_path = join(
01008             MBED_LIBRARIES,
01009             '.temp',
01010             mbed2_obj_path(target.name, toolchain_name)
01011         )
01012         mkdir(tmp_path)
01013 
01014         # Toolchain and config
01015         toolchain = prepare_toolchain(
01016             [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
01017             build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
01018 
01019         config = toolchain.config
01020         config.add_config_files([MBED_CONFIG_FILE])
01021         toolchain.set_config_data(toolchain.config.get_config_data())
01022 
01023         # distribute header files
01024         toolchain.copy_files(
01025             [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
01026         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01027 
01028         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01029                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01030                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01031             resources = Resources(notify).scan_with_toolchain([dir], toolchain)
01032             toolchain.copy_files(
01033                 [FileRef(basename(p), p) for p
01034                  in resources.get_file_paths(FileType.HEADER)] ,
01035                 dest)
01036             library_incdirs.append(dest)
01037 
01038         # collect resources of the libs to compile
01039         cmsis_res = Resources(notify).scan_with_toolchain(
01040             [MBED_CMSIS_PATH], toolchain)
01041         hal_res = Resources(notify).scan_with_toolchain(
01042             [MBED_TARGETS_PATH], toolchain)
01043         mbed_resources = Resources(notify).scan_with_toolchain(
01044             [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain)
01045 
01046         incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs
01047 
01048         # Build Things
01049         notify.info("Building library %s (%s, %s)" %
01050                     ('MBED', target.name, toolchain_name))
01051         objects = toolchain.compile_sources(mbed_resources, incdirs)
01052         separate_objects = []
01053 
01054         for obj in objects:
01055             for name in SEPARATE_NAMES:
01056                 if obj.endswith(name):
01057                     separate_objects.append(obj)
01058 
01059         for obj in separate_objects:
01060             objects.remove(obj)
01061 
01062         toolchain.build_library(objects, build_toolchain, "mbed")
01063         notify.info("Building library %s (%s, %s)" %
01064                     ('CMSIS', target.name, toolchain_name))
01065         cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path])
01066         notify.info("Building library %s (%s, %s)" %
01067                     ('HAL', target.name, toolchain_name))
01068         hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
01069 
01070         # Copy everything into the build directory
01071         to_copy_paths = [
01072             hal_res.get_file_paths(FileType.HEADER),
01073             hal_res.get_file_paths(FileType.HEX),
01074             hal_res.get_file_paths(FileType.BIN),
01075             hal_res.get_file_paths(FileType.LIB),
01076             cmsis_res.get_file_paths(FileType.HEADER),
01077             cmsis_res.get_file_paths(FileType.BIN),
01078             cmsis_res.get_file_paths(FileType.LD_SCRIPT),
01079             hal_res.get_file_paths(FileType.LD_SCRIPT),
01080             [MBED_CONFIG_FILE],
01081             cmsis_objects,
01082             hal_objects,
01083             separate_objects,
01084         ]
01085         to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])]
01086         toolchain.copy_files(to_copy, build_toolchain)
01087 
01088         if report is not None:
01089             end = time()
01090             cur_result["elapsed_time"] = end - start
01091             cur_result["result"] = "OK"
01092             add_result_to_report(report, cur_result)
01093 
01094         return True
01095 
01096     except Exception as exc:
01097         if report is not None:
01098             end = time()
01099             cur_result["result"] = "FAIL"
01100             cur_result["elapsed_time"] = end - start
01101 
01102             cur_result["output"] += str(exc)
01103 
01104             add_result_to_report(report, cur_result)
01105         raise
01106 
01107 
01108 def get_unique_supported_toolchains (release_targets=None):
01109     """ Get list of all unique toolchains supported by targets
01110 
01111     Keyword arguments:
01112     release_targets - tuple structure returned from get_mbed_official_release().
01113                       If release_targets is not specified, then it queries all
01114                       known targets
01115     """
01116     unique_supported_toolchains = []
01117 
01118     if not release_targets:
01119         for target in TARGET_NAMES:
01120             for toolchain in TARGET_MAP[target].supported_toolchains:
01121                 if toolchain not in unique_supported_toolchains:
01122                     unique_supported_toolchains.append(toolchain)
01123     else:
01124         for target in release_targets:
01125             for toolchain in target[1]:
01126                 if toolchain not in unique_supported_toolchains:
01127                     unique_supported_toolchains.append(toolchain)
01128 
01129     return unique_supported_toolchains
01130 
01131 
01132 def _lowercase_release_version(release_version):
01133     try:
01134         return release_version.lower()
01135     except AttributeError:
01136         return 'all'
01137 
01138 def mcu_toolchain_list (release_version='5'):
01139     """  Shows list of toolchains
01140 
01141     """
01142     release_version = _lowercase_release_version(release_version)
01143     version_release_targets = {}
01144     version_release_target_names = {}
01145 
01146     for version in RELEASE_VERSIONS:
01147         version_release_targets[version] = get_mbed_official_release(version)
01148         version_release_target_names[version] = [x[0] for x in
01149                                                  version_release_targets[
01150                                                      version]]
01151 
01152     if release_version in RELEASE_VERSIONS:
01153         release_targets = version_release_targets[release_version]
01154     else:
01155         release_targets = None
01156 
01157     unique_supported_toolchains = get_unique_supported_toolchains(
01158         release_targets)
01159     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01160     return "\n".join(columns)
01161 
01162 
01163 def mcu_target_list (release_version='5'):
01164     """  Shows target list
01165 
01166     """
01167     release_version = _lowercase_release_version(release_version)
01168     version_release_targets = {}
01169     version_release_target_names = {}
01170 
01171     for version in RELEASE_VERSIONS:
01172         version_release_targets[version] = get_mbed_official_release(version)
01173         version_release_target_names[version] = [x[0] for x in
01174                                                  version_release_targets[
01175                                                      version]]
01176 
01177     if release_version in RELEASE_VERSIONS:
01178         release_targets = version_release_targets[release_version]
01179     else:
01180         release_targets = None
01181 
01182     target_names = []
01183 
01184     if release_targets:
01185         target_names = [x[0] for x in release_targets]
01186     else:
01187         target_names = TARGET_NAMES
01188 
01189     return "\n".join(target_names)
01190 
01191 
01192 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01193                          release_version='5'):
01194     """  Shows target map using prettytable
01195 
01196     Keyword arguments:
01197     verbose_html - emit html instead of a simple table
01198     platform_filter - remove results that match the string
01199     release_version - get the matrix for this major version number
01200     """
01201     # Only use it in this function so building works without extra modules
01202     from prettytable import PrettyTable, HEADER
01203     release_version = _lowercase_release_version(release_version)
01204     version_release_targets = {}
01205     version_release_target_names = {}
01206 
01207     for version in RELEASE_VERSIONS:
01208         version_release_targets[version] = get_mbed_official_release(version)
01209         version_release_target_names[version] = [x[0] for x in
01210                                                  version_release_targets[
01211                                                      version]]
01212 
01213     if release_version in RELEASE_VERSIONS:
01214         release_targets = version_release_targets[release_version]
01215     else:
01216         release_targets = None
01217 
01218     unique_supported_toolchains = get_unique_supported_toolchains(
01219         release_targets)
01220     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01221 
01222     # All tests status table print
01223     columns = prepend_columns + unique_supported_toolchains
01224     table_printer = PrettyTable(columns, junction_char="|", hrules=HEADER)
01225     # Align table
01226     for col in columns:
01227         table_printer.align[col] = "c"
01228     table_printer.align["Target"] = "l"
01229 
01230     perm_counter = 0
01231     target_counter = 0
01232 
01233     target_names = []
01234 
01235     if release_targets:
01236         target_names = [x[0] for x in release_targets]
01237     else:
01238         target_names = TARGET_NAMES
01239 
01240     for target in sorted(target_names):
01241         if platform_filter is not None:
01242             # FIlter out platforms using regex
01243             if re.search(platform_filter, target) is None:
01244                 continue
01245         target_counter += 1
01246 
01247         row = [target]  # First column is platform name
01248 
01249         for version in RELEASE_VERSIONS:
01250             if target in version_release_target_names[version]:
01251                 text = "Supported"
01252             else:
01253                 text = "-"
01254             row.append(text)
01255 
01256         for unique_toolchain in unique_supported_toolchains:
01257             tgt_obj = TARGET_MAP[target]
01258             if (unique_toolchain in tgt_obj.supported_toolchains or
01259                 (unique_toolchain == "ARMC6" and
01260                  "ARM" in tgt_obj.supported_toolchains) or
01261                 (unique_toolchain == "ARM" and
01262                  "ARMC6" in tgt_obj.supported_toolchains and
01263                  CORE_ARCH[tgt_obj.core] == 8)):
01264                 text = "Supported"
01265                 perm_counter += 1
01266             else:
01267                 text = "-"
01268 
01269             row.append(text)
01270         table_printer.add_row(row)
01271 
01272     result = table_printer.get_html_string() if verbose_html \
01273              else table_printer.get_string()
01274     result += "\n"
01275     result += "Supported targets: %d\n"% (target_counter)
01276     if target_counter == 1:
01277         result += "Supported toolchains: %d"% (perm_counter)
01278     return result
01279 
01280 
01281 def get_target_supported_toolchains (target):
01282     """ Returns target supported toolchains list
01283 
01284     Positional arguments:
01285     target - the target to get the supported toolchains of
01286     """
01287     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01288         else None
01289 
01290 
01291 def print_build_results (result_list, build_name):
01292     """ Generate result string for build results
01293 
01294     Positional arguments:
01295     result_list - the list of results to print
01296     build_name - the name of the build we are printing result for
01297     """
01298     result = ""
01299     if len(result_list) > 0:
01300         result += build_name + "\n"
01301         result += "\n".join(["  * %s" % f for f in result_list])
01302         result += "\n"
01303     return result
01304 
01305 def print_build_memory_usage (report):
01306     """ Generate result table with memory usage values for build results
01307     Aggregates (puts together) reports obtained from self.get_memory_summary()
01308 
01309     Positional arguments:
01310     report - Report generated during build procedure.
01311     """
01312     from prettytable import PrettyTable, HEADER
01313     columns_text = ['name', 'target', 'toolchain']
01314     columns_int = ['static_ram', 'total_flash']
01315     table = PrettyTable(columns_text + columns_int, junction_char="|", hrules=HEADER)
01316 
01317     for col in columns_text:
01318         table.align[col] = 'l'
01319 
01320     for col in columns_int:
01321         table.align[col] = 'r'
01322 
01323     for target in report:
01324         for toolchain in report[target]:
01325             for name in report[target][toolchain]:
01326                 for dlist in report[target][toolchain][name]:
01327                     for dlistelem in dlist:
01328                         # Get 'memory_usage' record and build table with
01329                         # statistics
01330                         record = dlist[dlistelem]
01331                         if 'memory_usage' in record and record['memory_usage']:
01332                             # Note that summary should be in the last record of
01333                             # 'memory_usage' section. This is why we are
01334                             # grabbing last "[-1]" record.
01335                             row = [
01336                                 record['description'],
01337                                 record['target_name'],
01338                                 record['toolchain_name'],
01339                                 record['memory_usage'][-1]['summary'][
01340                                     'static_ram'],
01341                                 record['memory_usage'][-1]['summary'][
01342                                     'total_flash'],
01343                             ]
01344                             table.add_row(row)
01345 
01346     result = "Memory map breakdown for built projects (values in Bytes):\n"
01347     result += table.get_string(sortby='name')
01348     return result
01349 
01350 def write_build_report (build_report, template_filename, filename):
01351     """Write a build report to disk using a template file
01352 
01353     Positional arguments:
01354     build_report - a report generated by the build system
01355     template_filename - a file that contains the template for the style of build
01356                         report
01357     filename - the location on disk to write the file to
01358     """
01359     build_report_failing = []
01360     build_report_passing = []
01361 
01362     for report in build_report:
01363         if len(report["failing"]) > 0:
01364             build_report_failing.append(report)
01365         else:
01366             build_report_passing.append(report)
01367 
01368     env = Environment(extensions=['jinja2.ext.with_'])
01369     env.loader = FileSystemLoader('ci_templates')
01370     template = env.get_template(template_filename)
01371 
01372     with open(filename, 'w+') as placeholder:
01373         placeholder.write(template.render(
01374             failing_builds=build_report_failing,
01375             passing_builds=build_report_passing))
01376 
01377 
01378 def merge_build_data(filename, toolchain_report, app_type):
01379     path_to_file = dirname(abspath(filename))
01380     try:
01381         build_data = load(open(filename))
01382     except (IOError, ValueError):
01383         build_data = {'builds': []}
01384     for tgt in toolchain_report.values():
01385         for tc in tgt.values():
01386             for project in tc.values():
01387                 for build in project:
01388                     try:
01389                         build[0]['bin_fullpath'] = build[0]['bin']
01390                         build[0]['elf_fullpath'] = build[0]['elf']
01391                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01392                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01393                     except KeyError:
01394                         pass
01395                     if 'type' not in build[0]:
01396                         build[0]['type'] = app_type
01397                     build_data['builds'].insert(0, build[0])
01398     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))