takashi kadono / Mbed OS Nucleo_446

Dependencies:   ssd1331

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer, generate_update_filename)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .resources import Resources, FileType, FileRef
00046 from .notifier.mock import MockNotifier
00047 from .targets import TARGET_NAMES, TARGET_MAP, CORE_ARCH
00048 from .libraries import Library
00049 from .toolchains import TOOLCHAIN_CLASSES
00050 from .config import Config
00051 
00052 RELEASE_VERSIONS = ['2', '5']
00053 
00054 def prep_report (report, target_name, toolchain_name, id_name):
00055     """Setup report keys
00056 
00057     Positional arguments:
00058     report - the report to fill
00059     target_name - the target being used
00060     toolchain_name - the toolchain being used
00061     id_name - the name of the executable or library being built
00062     """
00063     if not target_name in report:
00064         report[target_name] = {}
00065 
00066     if not toolchain_name in report[target_name]:
00067         report[target_name][toolchain_name] = {}
00068 
00069     if not id_name in report[target_name][toolchain_name]:
00070         report[target_name][toolchain_name][id_name] = []
00071 
00072 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00073     """Setup test properties
00074 
00075     Positional arguments:
00076     properties - the dict to fill
00077     target_name - the target the test is targeting
00078     toolchain_name - the toolchain that will compile the test
00079     vendor_label - the vendor
00080     """
00081     if not target_name in properties:
00082         properties[target_name] = {}
00083 
00084     if not toolchain_name in properties[target_name]:
00085         properties[target_name][toolchain_name] = {}
00086 
00087     properties[target_name][toolchain_name]["target"] = target_name
00088     properties[target_name][toolchain_name]["vendor"] = vendor_label
00089     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00090 
00091 def create_result (target_name, toolchain_name, id_name, description):
00092     """Create a result dictionary
00093 
00094     Positional arguments:
00095     target_name - the target being built for
00096     toolchain_name - the toolchain doing the building
00097     id_name - the name of the executable or library being built
00098     description - a human readable description of what's going on
00099     """
00100     cur_result = {}
00101     cur_result["target_name"] = target_name
00102     cur_result["toolchain_name"] = toolchain_name
00103     cur_result["id"] = id_name
00104     cur_result["description"] = description
00105     cur_result["elapsed_time"] = 0
00106     cur_result["output"] = ""
00107 
00108     return cur_result
00109 
00110 def add_result_to_report (report, result):
00111     """Add a single result to a report dictionary
00112 
00113     Positional arguments:
00114     report - the report to append to
00115     result - the result to append
00116     """
00117     result["date"] = datetime.datetime.utcnow().isoformat()
00118     result["uuid"] = str(uuid.uuid1())
00119     target = result["target_name"]
00120     toolchain = result["toolchain_name"]
00121     id_name = result['id']
00122     result_wrap = {0: result}
00123     report[target][toolchain][id_name].append(result_wrap)
00124 
00125 def get_config (src_paths, target, toolchain_name=None, app_config=None):
00126     """Get the configuration object for a target-toolchain combination
00127 
00128     Positional arguments:
00129     src_paths - paths to scan for the configuration files
00130     target - the device we are building for
00131     toolchain_name - the string that identifies the build tools
00132     """
00133     # Convert src_paths to a list if needed
00134     if not isinstance(src_paths, list):
00135         src_paths = [src_paths]
00136 
00137     res = Resources(MockNotifier())
00138     if toolchain_name:
00139         toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
00140                                       app_config=app_config)
00141         config = toolchain.config
00142         res.scan_with_toolchain(src_paths, toolchain, exclude=False)
00143     else:
00144         config = Config(target, src_paths, app_config=app_config)
00145         res.scan_with_config(src_paths, config)
00146     if config.has_regions:
00147         _ = list(config.regions)
00148 
00149     cfg, macros = config.get_config_data()
00150     features = config.get_features()
00151     return cfg, macros, features
00152 
00153 def is_official_target (target_name, version):
00154     """ Returns True, None if a target is part of the official release for the
00155     given version. Return False, 'reason' if a target is not part of the
00156     official release for the given version.
00157 
00158     Positional arguments:
00159     target_name - Name if the target (ex. 'K64F')
00160     version - The release version string. Should be a string contained within
00161               RELEASE_VERSIONS
00162     """
00163 
00164     result = True
00165     reason = None
00166     target = TARGET_MAP[target_name]
00167 
00168     if hasattr(target, 'release_versions') \
00169        and version in target.release_versions:
00170         if version == '2':
00171             # For version 2, either ARM or uARM toolchain support is required
00172             required_toolchains = set(['ARM', 'uARM'])
00173 
00174             if not len(required_toolchains.intersection(
00175                     set(target.supported_toolchains))) > 0:
00176                 result = False
00177                 reason = ("Target '%s' must support " % target.name) + \
00178                     ("one of the folowing toolchains to be included in the") + \
00179                     ((" mbed 2.0 official release: %s" + linesep) %
00180                      ", ".join(required_toolchains)) + \
00181                     ("Currently it is only configured to support the ") + \
00182                     ("following toolchains: %s" %
00183                      ", ".join(target.supported_toolchains))
00184 
00185         elif version == '5':
00186             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00187             required_toolchains = [
00188                 set(['ARM', 'GCC_ARM', 'IAR']),
00189                 set(['ARMC6'])
00190             ]
00191             supported_toolchains = set(target.supported_toolchains)
00192 
00193             if not any(r.issubset(supported_toolchains)
00194                        for r in required_toolchains):
00195                 result = False
00196                 reason = ("Target '%s' must support " % target.name) + \
00197                     ("ALL of the folowing toolchains to be included in the") + \
00198                     ((" mbed OS 5.0 official release: %s" + linesep) %
00199                      ", ".join(sorted(required_toolchains[0]))) + \
00200                     ("Currently it is only configured to support the ") + \
00201                     ("following toolchains: %s" %
00202                      ", ".join(sorted(supported_toolchains)))
00203 
00204             elif not target.default_lib == 'std':
00205                 result = False
00206                 reason = ("Target '%s' must set the " % target.name) + \
00207                     ("'default_lib' to 'std' to be included in the ") + \
00208                     ("mbed OS 5.0 official release." + linesep) + \
00209                     ("Currently it is set to '%s'" % target.default_lib)
00210 
00211         else:
00212             result = False
00213             reason = ("Target '%s' has set an invalid release version of '%s'" %
00214                       version) + \
00215                 ("Please choose from the following release versions: %s" %
00216                  ', '.join(RELEASE_VERSIONS))
00217 
00218     else:
00219         result = False
00220         if not hasattr(target, 'release_versions'):
00221             reason = "Target '%s' " % target.name
00222             reason += "does not have the 'release_versions' key set"
00223         elif not version in target.release_versions:
00224             reason = "Target '%s' does not contain the version '%s' " % \
00225                      (target.name, version)
00226             reason += "in its 'release_versions' key"
00227 
00228     return result, reason
00229 
00230 def transform_release_toolchains (toolchains, version):
00231     """ Given a list of toolchains and a release version, return a list of
00232     only the supported toolchains for that release
00233 
00234     Positional arguments:
00235     toolchains - The list of toolchains
00236     version - The release version string. Should be a string contained within
00237               RELEASE_VERSIONS
00238     """
00239     if version == '5':
00240         return ['ARM', 'GCC_ARM', 'IAR']
00241     else:
00242         return toolchains
00243 
00244 
00245 def get_mbed_official_release (version):
00246     """ Given a release version string, return a tuple that contains a target
00247     and the supported toolchains for that release.
00248     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00249                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00250 
00251     Positional arguments:
00252     version - The version string. Should be a string contained within
00253               RELEASE_VERSIONS
00254     """
00255 
00256     mbed_official_release = (
00257         tuple(
00258             tuple(
00259                 [
00260                     TARGET_MAP[target].name,
00261                     tuple(transform_release_toolchains(
00262                         TARGET_MAP[target].supported_toolchains, version))
00263                 ]
00264             ) for target in TARGET_NAMES \
00265             if (hasattr(TARGET_MAP[target], 'release_versions')
00266                 and version in TARGET_MAP[target].release_versions)
00267         )
00268     )
00269 
00270     for target in mbed_official_release:
00271         is_official, reason = is_official_target(target[0], version)
00272 
00273         if not is_official:
00274             raise InvalidReleaseTargetException(reason)
00275 
00276     return mbed_official_release
00277 
00278 ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
00279 def target_supports_toolchain(target, toolchain_name):
00280     if toolchain_name in ARM_COMPILERS:
00281         return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
00282     else:
00283         return toolchain_name in target.supported_toolchains
00284 
00285 
00286 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00287                       macros=None, clean=False, jobs=1,
00288                       notify=None, config=None, app_config=None,
00289                       build_profile=None, ignore=None):
00290     """ Prepares resource related objects - toolchain, target, config
00291 
00292     Positional arguments:
00293     src_paths - the paths to source directories
00294     target - ['LPC1768', 'LPC11U24', etc.]
00295     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00296 
00297     Keyword arguments:
00298     macros - additional macros
00299     clean - Rebuild everything if True
00300     jobs - how many compilers we can run at once
00301     notify - Notify function for logs
00302     config - a Config object to use instead of creating one
00303     app_config - location of a chosen mbed_app.json file
00304     build_profile - a list of mergeable build profiles
00305     ignore - list of paths to add to mbedignore
00306     """
00307 
00308     # We need to remove all paths which are repeated to avoid
00309     # multiple compilations and linking with the same objects
00310     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00311 
00312     # If the configuration object was not yet created, create it now
00313     config = config or Config(target, src_paths, app_config=app_config)
00314     target = config.target
00315     if not target_supports_toolchain(target, toolchain_name):
00316         raise NotSupportedException(
00317             "Target {} is not supported by toolchain {}".format(
00318                 target.name, toolchain_name))
00319     if (toolchain_name == "ARM" and CORE_ARCH[target.core] == 8):
00320         toolchain_name = "ARMC6"
00321 
00322     try:
00323         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00324     except KeyError:
00325         raise KeyError("Toolchain %s not supported" % toolchain_name)
00326 
00327     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00328     for contents in build_profile or []:
00329         for key in profile:
00330             profile[key].extend(contents[toolchain_name].get(key, []))
00331 
00332     toolchain = cur_tc(
00333         target, notify, macros, build_dir=build_dir, build_profile=profile)
00334 
00335     toolchain.config = config
00336     toolchain.jobs = jobs
00337     toolchain.build_all = clean
00338 
00339     if ignore:
00340         toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore)
00341 
00342     return toolchain
00343 
00344 def _printihex(ihex):
00345     import pprint
00346     pprint.PrettyPrinter().pprint(ihex.todict())
00347 
00348 def _real_region_size(region):
00349     try:
00350         part = intelhex_offset(region.filename, offset=region.start)
00351         return (part.maxaddr() - part.minaddr()) + 1
00352     except AttributeError:
00353         return region.size
00354 
00355 
00356 def _fill_header(region_list, current_region):
00357     """Fill an application header region
00358 
00359     This is done it three steps:
00360      * Fill the whole region with zeros
00361      * Fill const, timestamp and size entries with their data
00362      * Fill the digests using this header as the header region
00363     """
00364     region_dict = {r.name: r for r in region_list}
00365     header = IntelHex()
00366     header.puts(current_region.start, b'\x00' * current_region.size)
00367     start = current_region.start
00368     for member in current_region.filename:
00369         _, type, subtype, data = member
00370         member_size = Config.header_member_size(member)
00371         if type == "const":
00372             fmt = {
00373                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00374                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00375             }[subtype]
00376             header.puts(start, struct.pack(fmt, integer(data, 0)))
00377         elif type == "timestamp":
00378             fmt = {"32le": "<L", "64le": "<Q",
00379                    "32be": ">L", "64be": ">Q"}[subtype]
00380             header.puts(start, struct.pack(fmt, int(time())))
00381         elif type == "size":
00382             fmt = {"32le": "<L", "64le": "<Q",
00383                    "32be": ">L", "64be": ">Q"}[subtype]
00384             size = sum(_real_region_size(region_dict[r]) for r in data)
00385             header.puts(start, struct.pack(fmt, size))
00386         elif type  == "digest":
00387             if data == "header":
00388                 ih = header[:start]
00389             else:
00390                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00391             if subtype.startswith("CRCITT32"):
00392                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00393                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00394             elif subtype.startswith("SHA"):
00395                 if subtype == "SHA256":
00396                     hash = hashlib.sha256()
00397                 elif subtype == "SHA512":
00398                     hash = hashlib.sha512()
00399                 hash.update(ih.tobinarray())
00400                 header.puts(start, hash.digest())
00401         start += Config.header_member_size(member)
00402     return header
00403 
00404 def merge_region_list (region_list, destination, notify, padding=b'\xFF'):
00405     """Merge the region_list into a single image
00406 
00407     Positional Arguments:
00408     region_list - list of regions, which should contain filenames
00409     destination - file name to write all regions to
00410     padding - bytes to fill gapps with
00411     """
00412     merged = IntelHex()
00413     _, format = splitext(destination)
00414 
00415     notify.info("Merging Regions")
00416 
00417     for region in region_list:
00418         if region.active and not region.filename:
00419             raise ToolException("Active region has no contents: No file found.")
00420         if isinstance(region.filename, list):
00421             header_basename, _ = splitext(destination)
00422             header_filename = header_basename + "_header.hex"
00423             _fill_header(region_list, region).tofile(header_filename, format='hex')
00424             region = region._replace(filename=header_filename)
00425         if region.filename:
00426             notify.info("  Filling region %s with %s" % (region.name, region.filename))
00427             part = intelhex_offset(region.filename, offset=region.start)
00428             part.start_addr = None
00429             part_size = (part.maxaddr() - part.minaddr()) + 1
00430             if part_size > region.size:
00431                 raise ToolException("Contents of region %s does not fit"
00432                                     % region.name)
00433             merged.merge(part)
00434             pad_size = region.size - part_size
00435             if pad_size > 0 and region != region_list[-1]:
00436                 notify.info("  Padding region %s with 0x%x bytes" %
00437                             (region.name, pad_size))
00438                 if format is ".hex":
00439                     """The offset will be in the hex file generated when we're done,
00440                     so we can skip padding here"""
00441                 else:
00442                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00443 
00444     if not exists(dirname(destination)):
00445         makedirs(dirname(destination))
00446     notify.info("Space used after regions merged: 0x%x" %
00447                 (merged.maxaddr() - merged.minaddr() + 1))
00448     merged.tofile(destination, format=format.strip("."))
00449 
00450 
00451 UPDATE_WHITELIST = (
00452     "application",
00453 )
00454 
00455 
00456 def build_project (src_paths, build_path, target, toolchain_name,
00457                   libraries_paths=None, linker_script=None, clean=False,
00458                   notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
00459                   report=None, properties=None, project_id=None,
00460                   project_description=None, config=None,
00461                   app_config=None, build_profile=None, stats_depth=None, ignore=None):
00462     """ Build a project. A project may be a test or a user program.
00463 
00464     Positional arguments:
00465     src_paths - a path or list of paths that contain all files needed to build
00466                 the project
00467     build_path - the directory where all of the object files will be placed
00468     target - the MCU or board that the project will compile for
00469     toolchain_name - the name of the build tools
00470 
00471     Keyword arguments:
00472     libraries_paths - The location of libraries to include when linking
00473     linker_script - the file that drives the linker to do it's job
00474     clean - Rebuild everything if True
00475     notify - Notify function for logs
00476     name - the name of the project
00477     macros - additional macros
00478     inc_dirs - additional directories where include files may be found
00479     jobs - how many compilers we can run at once
00480     report - a dict where a result may be appended
00481     properties - UUUUHHHHH beats me
00482     project_id - the name put in the report
00483     project_description - the human-readable version of what this thing does
00484     config - a Config object to use instead of creating one
00485     app_config - location of a chosen mbed_app.json file
00486     build_profile - a dict of flags that will be passed to the compiler
00487     stats_depth - depth level for memap to display file/dirs
00488     ignore - list of paths to add to mbedignore
00489     """
00490     # Convert src_path to a list if needed
00491     if not isinstance(src_paths, list):
00492         src_paths = [src_paths]
00493     # Extend src_paths wiht libraries_paths
00494     if libraries_paths is not None:
00495         src_paths.extend(libraries_paths)
00496         inc_dirs.extend(map(dirname, libraries_paths))
00497 
00498     if clean and exists(build_path):
00499         rmtree(build_path)
00500     mkdir(build_path)
00501 
00502     toolchain = prepare_toolchain(
00503         src_paths, build_path, target, toolchain_name, macros=macros,
00504         clean=clean, jobs=jobs, notify=notify, config=config,
00505         app_config=app_config, build_profile=build_profile, ignore=ignore)
00506     toolchain.version_check()
00507 
00508     # The first path will give the name to the library
00509     name = (name or toolchain.config.name or
00510             basename(normpath(abspath(src_paths[0]))))
00511     notify.info("Building project %s (%s, %s)" %
00512                 (name, toolchain.target.name, toolchain_name))
00513 
00514     # Initialize reporting
00515     if report != None:
00516         start = time()
00517         # If project_id is specified, use that over the default name
00518         id_name = project_id.upper() if project_id else name.upper()
00519         description = project_description if project_description else name
00520         vendor_label = toolchain.target.extra_labels[0]
00521         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00522         cur_result = create_result(toolchain.target.name, toolchain_name,
00523                                    id_name, description)
00524         if properties != None:
00525             prep_properties(properties, toolchain.target.name, toolchain_name,
00526                             vendor_label)
00527 
00528     try:
00529         resources = Resources(notify).scan_with_toolchain(
00530             src_paths, toolchain, inc_dirs=inc_dirs)
00531 
00532         # Change linker script if specified
00533         if linker_script is not None:
00534             resources.add_file_ref(linker_script, linker_script)
00535 
00536         # Compile Sources
00537         objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
00538         resources.add_files_to_type(FileType.OBJECT, objects)
00539 
00540         # Link Program
00541         if toolchain.config.has_regions:
00542             binary, _ = toolchain.link_program(resources, build_path, name + "_application")
00543             region_list = list(toolchain.config.regions)
00544             region_list = [r._replace(filename=binary) if r.active else r
00545                            for r in region_list]
00546             res = "%s.%s" % (join(build_path, name),
00547                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00548             merge_region_list(region_list, res, notify)
00549             update_regions = [
00550                 r for r in region_list if r.name in UPDATE_WHITELIST
00551             ]
00552             if update_regions:
00553                 update_res = join(build_path, generate_update_filename(name, toolchain.target))
00554                 merge_region_list(update_regions, update_res, notify)
00555                 res = (res, update_res)
00556             else:
00557                 res = (res, None)
00558         else:
00559             res, _ = toolchain.link_program(resources, build_path, name)
00560             res = (res, None)
00561 
00562         memap_instance = getattr(toolchain, 'memap_instance', None)
00563         memap_table = ''
00564         if memap_instance:
00565             # Write output to stdout in text (pretty table) format
00566             memap_table = memap_instance.generate_output('table', stats_depth)
00567             notify.info(memap_table)
00568 
00569             # Write output to file in JSON format
00570             map_out = join(build_path, name + "_map.json")
00571             memap_instance.generate_output('json', stats_depth, map_out)
00572 
00573             # Write output to file in CSV format for the CI
00574             map_csv = join(build_path, name + "_map.csv")
00575             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00576 
00577             map_html = join(build_path, name + "_map.html")
00578             memap_instance.generate_output('html', stats_depth, map_html)
00579 
00580         resources.detect_duplicates()
00581 
00582         if report != None:
00583             end = time()
00584             cur_result["elapsed_time"] = end - start
00585             cur_result["result"] = "OK"
00586             cur_result["memory_usage"] = (memap_instance.mem_report
00587                                           if memap_instance is not None else None)
00588             cur_result["bin"] = res[0]
00589             cur_result["elf"] = splitext(res[0])[0] + ".elf"
00590             cur_result.update(toolchain.report)
00591 
00592             add_result_to_report(report, cur_result)
00593 
00594         return res
00595 
00596     except Exception as exc:
00597         if report != None:
00598             end = time()
00599 
00600             if isinstance(exc, NotSupportedException):
00601                 cur_result["result"] = "NOT_SUPPORTED"
00602             else:
00603                 cur_result["result"] = "FAIL"
00604 
00605             cur_result["elapsed_time"] = end - start
00606 
00607             add_result_to_report(report, cur_result)
00608         # Let Exception propagate
00609         raise
00610 
00611 def build_library (src_paths, build_path, target, toolchain_name,
00612                   dependencies_paths=None, name=None, clean=False,
00613                   archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
00614                   report=None, properties=None, project_id=None,
00615                   remove_config_header_file=False, app_config=None,
00616                   build_profile=None, ignore=None):
00617     """ Build a library
00618 
00619     Positional arguments:
00620     src_paths - a path or list of paths that contain all files needed to build
00621                 the library
00622     build_path - the directory where all of the object files will be placed
00623     target - the MCU or board that the project will compile for
00624     toolchain_name - the name of the build tools
00625 
00626     Keyword arguments:
00627     dependencies_paths - The location of libraries to include when linking
00628     name - the name of the library
00629     clean - Rebuild everything if True
00630     archive - whether the library will create an archive file
00631     notify - Notify function for logs
00632     macros - additional macros
00633     inc_dirs - additional directories where include files may be found
00634     jobs - how many compilers we can run at once
00635     report - a dict where a result may be appended
00636     properties - UUUUHHHHH beats me
00637     project_id - the name that goes in the report
00638     remove_config_header_file - delete config header file when done building
00639     app_config - location of a chosen mbed_app.json file
00640     build_profile - a dict of flags that will be passed to the compiler
00641     ignore - list of paths to add to mbedignore
00642     """
00643 
00644     # Convert src_path to a list if needed
00645     if not isinstance(src_paths, list):
00646         src_paths = [src_paths]
00647     src_paths = [relpath(s) for s in src_paths]
00648 
00649     # Build path
00650     if archive:
00651         # Use temp path when building archive
00652         tmp_path = join(build_path, '.temp')
00653         mkdir(tmp_path)
00654     else:
00655         tmp_path = build_path
00656 
00657     # Clean the build directory
00658     if clean and exists(tmp_path):
00659         rmtree(tmp_path)
00660     mkdir(tmp_path)
00661 
00662     # Pass all params to the unified prepare_toolchain()
00663     toolchain = prepare_toolchain(
00664         src_paths, build_path, target, toolchain_name, macros=macros,
00665         clean=clean, jobs=jobs, notify=notify, app_config=app_config,
00666         build_profile=build_profile, ignore=ignore)
00667 
00668     # The first path will give the name to the library
00669     if name is None:
00670         name = basename(normpath(abspath(src_paths[0])))
00671     notify.info("Building library %s (%s, %s)" %
00672                    (name, toolchain.target.name, toolchain_name))
00673 
00674     # Initialize reporting
00675     if report != None:
00676         start = time()
00677         # If project_id is specified, use that over the default name
00678         id_name = project_id.upper() if project_id else name.upper()
00679         description = name
00680         vendor_label = toolchain.target.extra_labels[0]
00681         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00682         cur_result = create_result(toolchain.target.name, toolchain_name,
00683                                    id_name, description)
00684         cur_result['type'] = 'library'
00685         if properties != None:
00686             prep_properties(properties, toolchain.target.name, toolchain_name,
00687                             vendor_label)
00688 
00689     for src_path in src_paths:
00690         if not exists(src_path):
00691             error_msg = "The library source folder does not exist: %s", src_path
00692             if report != None:
00693                 cur_result["output"] = error_msg
00694                 cur_result["result"] = "FAIL"
00695                 add_result_to_report(report, cur_result)
00696             raise Exception(error_msg)
00697 
00698     try:
00699         res = Resources(notify).scan_with_toolchain(
00700             src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
00701 
00702         # Copy headers, objects and static libraries - all files needed for
00703         # static lib
00704         to_copy = (
00705             res.get_file_refs(FileType.HEADER) +
00706             res.get_file_refs(FileType.OBJECT) +
00707             res.get_file_refs(FileType.LIB) +
00708             res.get_file_refs(FileType.JSON) +
00709             res.get_file_refs(FileType.LD_SCRIPT) +
00710             res.get_file_refs(FileType.HEX) +
00711             res.get_file_refs(FileType.BIN)
00712         )
00713         toolchain.copy_files(to_copy, build_path)
00714         # Compile Sources
00715         objects = toolchain.compile_sources(
00716             res, res.get_file_paths(FileType.INC_DIR))
00717         res.add_files_to_type(FileType.OBJECT, objects)
00718 
00719         if archive:
00720             toolchain.build_library(objects, build_path, name)
00721 
00722         if remove_config_header_file:
00723             config_header_path = toolchain.get_config_header()
00724             if config_header_path:
00725                 remove(config_header_path)
00726 
00727         if report != None:
00728             end = time()
00729             cur_result["elapsed_time"] = end - start
00730             cur_result["result"] = "OK"
00731             add_result_to_report(report, cur_result)
00732         return True
00733 
00734     except Exception as exc:
00735         if report != None:
00736             end = time()
00737 
00738             if isinstance(exc, ToolException):
00739                 cur_result["result"] = "FAIL"
00740             elif isinstance(exc, NotSupportedException):
00741                 cur_result["result"] = "NOT_SUPPORTED"
00742 
00743             cur_result["elapsed_time"] = end - start
00744 
00745             add_result_to_report(report, cur_result)
00746 
00747         # Let Exception propagate
00748         raise
00749 
00750 ######################
00751 ### Legacy methods ###
00752 ######################
00753 
00754 def mbed2_obj_path(target_name, toolchain_name):
00755     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00756     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00757 
00758 def build_lib (lib_id, target, toolchain_name, clean=False, macros=None,
00759               notify=None, jobs=1, report=None, properties=None,
00760               build_profile=None, ignore=None):
00761     """ Legacy method for building mbed libraries
00762 
00763     Positional arguments:
00764     lib_id - the library's unique identifier
00765     target - the MCU or board that the project will compile for
00766     toolchain_name - the name of the build tools
00767 
00768     Keyword arguments:
00769     clean - Rebuild everything if True
00770     macros - additional macros
00771     notify - Notify function for logs
00772     jobs - how many compilers we can run at once
00773     report - a dict where a result may be appended
00774     properties - UUUUHHHHH beats me
00775     build_profile - a dict of flags that will be passed to the compiler
00776     ignore - list of paths to add to mbedignore
00777     """
00778     lib = Library(lib_id)
00779     if not lib.is_supported(target, toolchain_name):
00780         print('Library "%s" is not yet supported on target %s with toolchain %s'
00781               % (lib_id, target.name, toolchain_name))
00782         return False
00783 
00784     # We need to combine macros from parameter list with macros from library
00785     # definition
00786     lib_macros = lib.macros if lib.macros else []
00787     if macros:
00788         macros.extend(lib_macros)
00789     else:
00790         macros = lib_macros
00791 
00792     src_paths = lib.source_dir
00793     build_path = lib.build_dir
00794     dependencies_paths = lib.dependencies
00795     inc_dirs = lib.inc_dirs
00796 
00797     if not isinstance(src_paths, list):
00798         src_paths = [src_paths]
00799 
00800     # The first path will give the name to the library
00801     name = basename(src_paths[0])
00802 
00803     if report is not None:
00804         start = time()
00805         id_name = name.upper()
00806         description = name
00807         vendor_label = target.extra_labels[0]
00808         cur_result = None
00809         prep_report(report, target.name, toolchain_name, id_name)
00810         cur_result = create_result(target.name, toolchain_name, id_name,
00811                                    description)
00812 
00813         if properties != None:
00814             prep_properties(properties, target.name, toolchain_name,
00815                             vendor_label)
00816 
00817     for src_path in src_paths:
00818         if not exists(src_path):
00819             error_msg = "The library source folder does not exist: %s", src_path
00820 
00821             if report != None:
00822                 cur_result["output"] = error_msg
00823                 cur_result["result"] = "FAIL"
00824                 add_result_to_report(report, cur_result)
00825 
00826             raise Exception(error_msg)
00827 
00828     try:
00829         # Toolchain instance
00830         # Create the desired build directory structure
00831         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00832         mkdir(bin_path)
00833         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00834                                                             toolchain_name))
00835         mkdir(tmp_path)
00836 
00837         toolchain = prepare_toolchain(
00838             src_paths, tmp_path, target, toolchain_name, macros=macros,
00839             notify=notify, build_profile=build_profile, jobs=jobs, clean=clean,
00840             ignore=ignore)
00841 
00842         notify.info("Building library %s (%s, %s)" %
00843                     (name.upper(), target.name, toolchain_name))
00844 
00845         # Take into account the library configuration (MBED_CONFIG_FILE)
00846         config = toolchain.config
00847         config.add_config_files([MBED_CONFIG_FILE])
00848 
00849         # Scan Resources
00850         resources = Resources(notify).scan_with_toolchain(
00851             src_paths + (lib.inc_dirs_ext or []), toolchain,
00852             inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
00853 
00854         # Copy Headers
00855         toolchain.copy_files(
00856             resources.get_file_refs(FileType.HEADER), build_path)
00857 
00858         dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
00859 
00860         # Compile Sources
00861         objects = []
00862         for resource in resources:
00863             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00864 
00865         needed_update = toolchain.build_library(objects, bin_path, name)
00866 
00867         if report != None and needed_update:
00868             end = time()
00869             cur_result["elapsed_time"] = end - start
00870             cur_result["result"] = "OK"
00871 
00872             add_result_to_report(report, cur_result)
00873         return True
00874 
00875     except Exception:
00876         if report != None:
00877             end = time()
00878             cur_result["result"] = "FAIL"
00879             cur_result["elapsed_time"] = end - start
00880 
00881             add_result_to_report(report, cur_result)
00882 
00883         # Let Exception propagate
00884         raise
00885 
00886 
00887 # A number of compiled files need to be copied as objects as the linker
00888 # will not search for weak symbol overrides in archives. These are:
00889 #   - mbed_retarget.o: to make sure that the C standard lib symbols get
00890 #                      overridden
00891 #   - mbed_board.o: `mbed_die` is weak
00892 #   - mbed_overrides.o: this contains platform overrides of various
00893 #                       weak SDK functions
00894 #   - mbed_main.o: this contains main redirection
00895 #   - mbed_sdk_boot.o: this contains the main boot code in
00896 #   - PeripheralPins.o: PinMap can be weak
00897 SEPARATE_NAMES = [
00898     'PeripheralPins.o',
00899     'mbed_retarget.o',
00900     'mbed_board.o',
00901     'mbed_overrides.o',
00902     'mbed_main.o',
00903     'mbed_sdk_boot.o',
00904 ]
00905 
00906 
00907 def build_mbed_libs (target, toolchain_name, clean=False, macros=None,
00908                     notify=None, jobs=1, report=None, properties=None,
00909                     build_profile=None, ignore=None):
00910     """ Build legacy libraries for a target and toolchain pair
00911 
00912     Positional arguments:
00913     target - the MCU or board that the project will compile for
00914     toolchain_name - the name of the build tools
00915 
00916     Keyword arguments:
00917     clean - Rebuild everything if True
00918     macros - additional macros
00919     notify - Notify function for logs
00920     jobs - how many compilers we can run at once
00921     report - a dict where a result may be appended
00922     properties - UUUUHHHHH beats me
00923     build_profile - a dict of flags that will be passed to the compiler
00924     ignore - list of paths to add to mbedignore
00925 
00926     Return - True if target + toolchain built correctly, False if not supported
00927     """
00928 
00929     if report is not None:
00930         start = time()
00931         id_name = "MBED"
00932         description = "mbed SDK"
00933         vendor_label = target.extra_labels[0]
00934         cur_result = None
00935         prep_report(report, target.name, toolchain_name, id_name)
00936         cur_result = create_result(
00937             target.name, toolchain_name, id_name, description)
00938         if properties is not None:
00939             prep_properties(
00940                 properties, target.name, toolchain_name, vendor_label)
00941 
00942     if toolchain_name not in target.supported_toolchains:
00943         supported_toolchains_text = ", ".join(target.supported_toolchains)
00944         notify.info('The target {} does not support the toolchain {}'.format(
00945             target.name,
00946             toolchain_name
00947         ))
00948         notify.info('{} supports {} toolchain{}'.format(
00949             target.name,
00950             supported_toolchains_text,
00951             's' if len(target.supported_toolchains) > 1 else ''
00952         ))
00953 
00954         if report is not None:
00955             cur_result["result"] = "SKIP"
00956             add_result_to_report(report, cur_result)
00957 
00958         return False
00959 
00960     try:
00961         # Source and Build Paths
00962         build_toolchain = join(
00963             MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
00964         mkdir(build_toolchain)
00965 
00966         tmp_path = join(
00967             MBED_LIBRARIES,
00968             '.temp',
00969             mbed2_obj_path(target.name, toolchain_name)
00970         )
00971         mkdir(tmp_path)
00972 
00973         # Toolchain and config
00974         toolchain = prepare_toolchain(
00975             [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
00976             build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
00977 
00978         config = toolchain.config
00979         config.add_config_files([MBED_CONFIG_FILE])
00980         toolchain.set_config_data(toolchain.config.get_config_data())
00981 
00982         # distribute header files
00983         toolchain.copy_files(
00984             [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
00985         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
00986 
00987         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
00988                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
00989                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
00990             resources = Resources(notify).scan_with_toolchain([dir], toolchain)
00991             toolchain.copy_files(
00992                 [FileRef(basename(p), p) for p
00993                  in resources.get_file_paths(FileType.HEADER)] ,
00994                 dest)
00995             library_incdirs.append(dest)
00996 
00997         # collect resources of the libs to compile
00998         cmsis_res = Resources(notify).scan_with_toolchain(
00999             [MBED_CMSIS_PATH], toolchain)
01000         hal_res = Resources(notify).scan_with_toolchain(
01001             [MBED_TARGETS_PATH], toolchain)
01002         mbed_resources = Resources(notify).scan_with_toolchain(
01003             [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain)
01004 
01005         incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs
01006 
01007         # Build Things
01008         notify.info("Building library %s (%s, %s)" %
01009                     ('MBED', target.name, toolchain_name))
01010         objects = toolchain.compile_sources(mbed_resources, incdirs)
01011         separate_objects = []
01012 
01013         for obj in objects:
01014             for name in SEPARATE_NAMES:
01015                 if obj.endswith(name):
01016                     separate_objects.append(obj)
01017 
01018         for obj in separate_objects:
01019             objects.remove(obj)
01020 
01021         toolchain.build_library(objects, build_toolchain, "mbed")
01022         notify.info("Building library %s (%s, %s)" %
01023                     ('CMSIS', target.name, toolchain_name))
01024         cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path])
01025         notify.info("Building library %s (%s, %s)" %
01026                     ('HAL', target.name, toolchain_name))
01027         hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
01028 
01029         # Copy everything into the build directory
01030         to_copy_paths = [
01031             hal_res.get_file_paths(FileType.HEADER),
01032             hal_res.get_file_paths(FileType.HEX),
01033             hal_res.get_file_paths(FileType.BIN),
01034             hal_res.get_file_paths(FileType.LIB),
01035             cmsis_res.get_file_paths(FileType.HEADER),
01036             cmsis_res.get_file_paths(FileType.BIN),
01037             cmsis_res.get_file_paths(FileType.LD_SCRIPT),
01038             hal_res.get_file_paths(FileType.LD_SCRIPT),
01039             [MBED_CONFIG_FILE],
01040             cmsis_objects,
01041             hal_objects,
01042             separate_objects,
01043         ]
01044         to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])]
01045         toolchain.copy_files(to_copy, build_toolchain)
01046 
01047         if report is not None:
01048             end = time()
01049             cur_result["elapsed_time"] = end - start
01050             cur_result["result"] = "OK"
01051             add_result_to_report(report, cur_result)
01052 
01053         return True
01054 
01055     except Exception as exc:
01056         if report is not None:
01057             end = time()
01058             cur_result["result"] = "FAIL"
01059             cur_result["elapsed_time"] = end - start
01060 
01061             cur_result["output"] += str(exc)
01062 
01063             add_result_to_report(report, cur_result)
01064         raise
01065 
01066 
01067 def get_unique_supported_toolchains (release_targets=None):
01068     """ Get list of all unique toolchains supported by targets
01069 
01070     Keyword arguments:
01071     release_targets - tuple structure returned from get_mbed_official_release().
01072                       If release_targets is not specified, then it queries all
01073                       known targets
01074     """
01075     unique_supported_toolchains = []
01076 
01077     if not release_targets:
01078         for target in TARGET_NAMES:
01079             for toolchain in TARGET_MAP[target].supported_toolchains:
01080                 if toolchain not in unique_supported_toolchains:
01081                     unique_supported_toolchains.append(toolchain)
01082     else:
01083         for target in release_targets:
01084             for toolchain in target[1]:
01085                 if toolchain not in unique_supported_toolchains:
01086                     unique_supported_toolchains.append(toolchain)
01087 
01088     return unique_supported_toolchains
01089 
01090 
01091 def _lowercase_release_version(release_version):
01092     try:
01093         return release_version.lower()
01094     except AttributeError:
01095         return 'all'
01096 
01097 def mcu_toolchain_list (release_version='5'):
01098     """  Shows list of toolchains
01099 
01100     """
01101     release_version = _lowercase_release_version(release_version)
01102     version_release_targets = {}
01103     version_release_target_names = {}
01104 
01105     for version in RELEASE_VERSIONS:
01106         version_release_targets[version] = get_mbed_official_release(version)
01107         version_release_target_names[version] = [x[0] for x in
01108                                                  version_release_targets[
01109                                                      version]]
01110 
01111     if release_version in RELEASE_VERSIONS:
01112         release_targets = version_release_targets[release_version]
01113     else:
01114         release_targets = None
01115 
01116     unique_supported_toolchains = get_unique_supported_toolchains(
01117         release_targets)
01118     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01119     return "\n".join(columns)
01120 
01121 
01122 def mcu_target_list (release_version='5'):
01123     """  Shows target list
01124 
01125     """
01126     release_version = _lowercase_release_version(release_version)
01127     version_release_targets = {}
01128     version_release_target_names = {}
01129 
01130     for version in RELEASE_VERSIONS:
01131         version_release_targets[version] = get_mbed_official_release(version)
01132         version_release_target_names[version] = [x[0] for x in
01133                                                  version_release_targets[
01134                                                      version]]
01135 
01136     if release_version in RELEASE_VERSIONS:
01137         release_targets = version_release_targets[release_version]
01138     else:
01139         release_targets = None
01140 
01141     target_names = []
01142 
01143     if release_targets:
01144         target_names = [x[0] for x in release_targets]
01145     else:
01146         target_names = TARGET_NAMES
01147 
01148     return "\n".join(target_names)
01149 
01150 
01151 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01152                          release_version='5'):
01153     """  Shows target map using prettytable
01154 
01155     Keyword arguments:
01156     verbose_html - emit html instead of a simple table
01157     platform_filter - remove results that match the string
01158     release_version - get the matrix for this major version number
01159     """
01160     # Only use it in this function so building works without extra modules
01161     from prettytable import PrettyTable, HEADER
01162     release_version = _lowercase_release_version(release_version)
01163     version_release_targets = {}
01164     version_release_target_names = {}
01165 
01166     for version in RELEASE_VERSIONS:
01167         version_release_targets[version] = get_mbed_official_release(version)
01168         version_release_target_names[version] = [x[0] for x in
01169                                                  version_release_targets[
01170                                                      version]]
01171 
01172     if release_version in RELEASE_VERSIONS:
01173         release_targets = version_release_targets[release_version]
01174     else:
01175         release_targets = None
01176 
01177     unique_supported_toolchains = get_unique_supported_toolchains(
01178         release_targets)
01179     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01180 
01181     # All tests status table print
01182     columns = prepend_columns + unique_supported_toolchains
01183     table_printer = PrettyTable(columns, junction_char="|", hrules=HEADER)
01184     # Align table
01185     for col in columns:
01186         table_printer.align[col] = "c"
01187     table_printer.align["Target"] = "l"
01188 
01189     perm_counter = 0
01190     target_counter = 0
01191 
01192     target_names = []
01193 
01194     if release_targets:
01195         target_names = [x[0] for x in release_targets]
01196     else:
01197         target_names = TARGET_NAMES
01198 
01199     for target in sorted(target_names):
01200         if platform_filter is not None:
01201             # FIlter out platforms using regex
01202             if re.search(platform_filter, target) is None:
01203                 continue
01204         target_counter += 1
01205 
01206         row = [target]  # First column is platform name
01207 
01208         for version in RELEASE_VERSIONS:
01209             if target in version_release_target_names[version]:
01210                 text = "Supported"
01211             else:
01212                 text = "-"
01213             row.append(text)
01214 
01215         for unique_toolchain in unique_supported_toolchains:
01216             tgt_obj = TARGET_MAP[target]
01217             if (unique_toolchain in tgt_obj.supported_toolchains or
01218                 (unique_toolchain == "ARMC6" and
01219                  "ARM" in tgt_obj.supported_toolchains) or
01220                 (unique_toolchain == "ARM" and
01221                  "ARMC6" in tgt_obj.supported_toolchains and
01222                  CORE_ARCH[tgt_obj.core] == 8)):
01223                 text = "Supported"
01224                 perm_counter += 1
01225             else:
01226                 text = "-"
01227 
01228             row.append(text)
01229         table_printer.add_row(row)
01230 
01231     result = table_printer.get_html_string() if verbose_html \
01232              else table_printer.get_string()
01233     result += "\n"
01234     result += "Supported targets: %d\n"% (target_counter)
01235     if target_counter == 1:
01236         result += "Supported toolchains: %d"% (perm_counter)
01237     return result
01238 
01239 
01240 def get_target_supported_toolchains (target):
01241     """ Returns target supported toolchains list
01242 
01243     Positional arguments:
01244     target - the target to get the supported toolchains of
01245     """
01246     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01247         else None
01248 
01249 
01250 def print_build_results (result_list, build_name):
01251     """ Generate result string for build results
01252 
01253     Positional arguments:
01254     result_list - the list of results to print
01255     build_name - the name of the build we are printing result for
01256     """
01257     result = ""
01258     if len(result_list) > 0:
01259         result += build_name + "\n"
01260         result += "\n".join(["  * %s" % f for f in result_list])
01261         result += "\n"
01262     return result
01263 
01264 def print_build_memory_usage (report):
01265     """ Generate result table with memory usage values for build results
01266     Aggregates (puts together) reports obtained from self.get_memory_summary()
01267 
01268     Positional arguments:
01269     report - Report generated during build procedure.
01270     """
01271     from prettytable import PrettyTable, HEADER
01272     columns_text = ['name', 'target', 'toolchain']
01273     columns_int = ['static_ram', 'total_flash']
01274     table = PrettyTable(columns_text + columns_int, junction_char="|", hrules=HEADER)
01275 
01276     for col in columns_text:
01277         table.align[col] = 'l'
01278 
01279     for col in columns_int:
01280         table.align[col] = 'r'
01281 
01282     for target in report:
01283         for toolchain in report[target]:
01284             for name in report[target][toolchain]:
01285                 for dlist in report[target][toolchain][name]:
01286                     for dlistelem in dlist:
01287                         # Get 'memory_usage' record and build table with
01288                         # statistics
01289                         record = dlist[dlistelem]
01290                         if 'memory_usage' in record and record['memory_usage']:
01291                             # Note that summary should be in the last record of
01292                             # 'memory_usage' section. This is why we are
01293                             # grabbing last "[-1]" record.
01294                             row = [
01295                                 record['description'],
01296                                 record['target_name'],
01297                                 record['toolchain_name'],
01298                                 record['memory_usage'][-1]['summary'][
01299                                     'static_ram'],
01300                                 record['memory_usage'][-1]['summary'][
01301                                     'total_flash'],
01302                             ]
01303                             table.add_row(row)
01304 
01305     result = "Memory map breakdown for built projects (values in Bytes):\n"
01306     result += table.get_string(sortby='name')
01307     return result
01308 
01309 def write_build_report (build_report, template_filename, filename):
01310     """Write a build report to disk using a template file
01311 
01312     Positional arguments:
01313     build_report - a report generated by the build system
01314     template_filename - a file that contains the template for the style of build
01315                         report
01316     filename - the location on disk to write the file to
01317     """
01318     build_report_failing = []
01319     build_report_passing = []
01320 
01321     for report in build_report:
01322         if len(report["failing"]) > 0:
01323             build_report_failing.append(report)
01324         else:
01325             build_report_passing.append(report)
01326 
01327     env = Environment(extensions=['jinja2.ext.with_'])
01328     env.loader = FileSystemLoader('ci_templates')
01329     template = env.get_template(template_filename)
01330 
01331     with open(filename, 'w+') as placeholder:
01332         placeholder.write(template.render(
01333             failing_builds=build_report_failing,
01334             passing_builds=build_report_passing))
01335 
01336 
01337 def merge_build_data(filename, toolchain_report, app_type):
01338     path_to_file = dirname(abspath(filename))
01339     try:
01340         build_data = load(open(filename))
01341     except (IOError, ValueError):
01342         build_data = {'builds': []}
01343     for tgt in toolchain_report.values():
01344         for tc in tgt.values():
01345             for project in tc.values():
01346                 for build in project:
01347                     try:
01348                         build[0]['bin_fullpath'] = build[0]['bin']
01349                         build[0]['elf_fullpath'] = build[0]['elf']
01350                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01351                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01352                     except KeyError:
01353                         pass
01354                     if 'type' not in build[0]:
01355                         build[0]['type'] = app_type
01356                     build_data['builds'].insert(0, build[0])
01357     dump(build_data, open(filename, "w"), indent=4, separators=(',', ': '))