Gleb Klochkov / Mbed OS Climatcontroll_Main

Dependencies:   esp8266-driver

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .targets import TARGET_NAMES, TARGET_MAP
00046 from .libraries import Library
00047 from .toolchains import TOOLCHAIN_CLASSES
00048 from .config import Config
00049 
00050 RELEASE_VERSIONS = ['2', '5']
00051 
00052 def prep_report (report, target_name, toolchain_name, id_name):
00053     """Setup report keys
00054 
00055     Positional arguments:
00056     report - the report to fill
00057     target_name - the target being used
00058     toolchain_name - the toolchain being used
00059     id_name - the name of the executable or library being built
00060     """
00061     if not target_name in report:
00062         report[target_name] = {}
00063 
00064     if not toolchain_name in report[target_name]:
00065         report[target_name][toolchain_name] = {}
00066 
00067     if not id_name in report[target_name][toolchain_name]:
00068         report[target_name][toolchain_name][id_name] = []
00069 
00070 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00071     """Setup test properties
00072 
00073     Positional arguments:
00074     properties - the dict to fill
00075     target_name - the target the test is targeting
00076     toolchain_name - the toolchain that will compile the test
00077     vendor_label - the vendor
00078     """
00079     if not target_name in properties:
00080         properties[target_name] = {}
00081 
00082     if not toolchain_name in properties[target_name]:
00083         properties[target_name][toolchain_name] = {}
00084 
00085     properties[target_name][toolchain_name]["target"] = target_name
00086     properties[target_name][toolchain_name]["vendor"] = vendor_label
00087     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00088 
00089 def create_result (target_name, toolchain_name, id_name, description):
00090     """Create a result dictionary
00091 
00092     Positional arguments:
00093     target_name - the target being built for
00094     toolchain_name - the toolchain doing the building
00095     id_name - the name of the executable or library being built
00096     description - a human readable description of what's going on
00097     """
00098     cur_result = {}
00099     cur_result["target_name"] = target_name
00100     cur_result["toolchain_name"] = toolchain_name
00101     cur_result["id"] = id_name
00102     cur_result["description"] = description
00103     cur_result["elapsed_time"] = 0
00104     cur_result["output"] = ""
00105 
00106     return cur_result
00107 
00108 def add_result_to_report (report, result):
00109     """Add a single result to a report dictionary
00110 
00111     Positional arguments:
00112     report - the report to append to
00113     result - the result to append
00114     """
00115     result["date"] = datetime.datetime.utcnow().isoformat()
00116     result["uuid"] = str(uuid.uuid1())
00117     target = result["target_name"]
00118     toolchain = result["toolchain_name"]
00119     id_name = result['id']
00120     result_wrap = {0: result}
00121     report[target][toolchain][id_name].append(result_wrap)
00122 
00123 def get_config (src_paths, target, toolchain_name):
00124     """Get the configuration object for a target-toolchain combination
00125 
00126     Positional arguments:
00127     src_paths - paths to scan for the configuration files
00128     target - the device we are building for
00129     toolchain_name - the string that identifies the build tools
00130     """
00131     # Convert src_paths to a list if needed
00132     if not isinstance(src_paths, list):
00133         src_paths = [src_paths]
00134 
00135     # Pass all params to the unified prepare_resources()
00136     toolchain = prepare_toolchain(src_paths, None, target, toolchain_name)
00137 
00138     # Scan src_path for config files
00139     resources = toolchain.scan_resources(src_paths[0])
00140     for path in src_paths[1:]:
00141         resources.add(toolchain.scan_resources(path))
00142 
00143     # Update configuration files until added features creates no changes
00144     prev_features = set()
00145     while True:
00146         # Update the configuration with any .json files found while scanning
00147         toolchain.config.add_config_files(resources.json_files)
00148 
00149         # Add features while we find new ones
00150         features = set(toolchain.config.get_features())
00151         if features == prev_features:
00152             break
00153 
00154         for feature in features:
00155             if feature in resources.features:
00156                 resources += resources.features[feature]
00157 
00158         prev_features = features
00159     toolchain.config.validate_config()
00160     if toolchain.config.has_regions:
00161         _ = list(toolchain.config.regions)
00162 
00163     cfg, macros = toolchain.config.get_config_data()
00164     features = toolchain.config.get_features()
00165     return cfg, macros, features
00166 
00167 def is_official_target (target_name, version):
00168     """ Returns True, None if a target is part of the official release for the
00169     given version. Return False, 'reason' if a target is not part of the
00170     official release for the given version.
00171 
00172     Positional arguments:
00173     target_name - Name if the target (ex. 'K64F')
00174     version - The release version string. Should be a string contained within
00175               RELEASE_VERSIONS
00176     """
00177 
00178     result = True
00179     reason = None
00180     target = TARGET_MAP[target_name]
00181 
00182     if hasattr(target, 'release_versions') \
00183        and version in target.release_versions:
00184         if version == '2':
00185             # For version 2, either ARM or uARM toolchain support is required
00186             required_toolchains = set(['ARM', 'uARM'])
00187 
00188             if not len(required_toolchains.intersection(
00189                     set(target.supported_toolchains))) > 0:
00190                 result = False
00191                 reason = ("Target '%s' must support " % target.name) + \
00192                     ("one of the folowing toolchains to be included in the") + \
00193                     ((" mbed 2.0 official release: %s" + linesep) %
00194                      ", ".join(required_toolchains)) + \
00195                     ("Currently it is only configured to support the ") + \
00196                     ("following toolchains: %s" %
00197                      ", ".join(target.supported_toolchains))
00198 
00199         elif version == '5':
00200             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00201             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00202             required_toolchains_sorted = list(required_toolchains)
00203             required_toolchains_sorted.sort()
00204             supported_toolchains = set(target.supported_toolchains)
00205             supported_toolchains_sorted = list(supported_toolchains)
00206             supported_toolchains_sorted.sort()
00207 
00208             if not required_toolchains.issubset(supported_toolchains):
00209                 result = False
00210                 reason = ("Target '%s' must support " % target.name) + \
00211                     ("ALL of the folowing toolchains to be included in the") + \
00212                     ((" mbed OS 5.0 official release: %s" + linesep) %
00213                      ", ".join(required_toolchains_sorted)) + \
00214                     ("Currently it is only configured to support the ") + \
00215                     ("following toolchains: %s" %
00216                      ", ".join(supported_toolchains_sorted))
00217 
00218             elif not target.default_lib == 'std':
00219                 result = False
00220                 reason = ("Target '%s' must set the " % target.name) + \
00221                     ("'default_lib' to 'std' to be included in the ") + \
00222                     ("mbed OS 5.0 official release." + linesep) + \
00223                     ("Currently it is set to '%s'" % target.default_lib)
00224 
00225         else:
00226             result = False
00227             reason = ("Target '%s' has set an invalid release version of '%s'" %
00228                       version) + \
00229                 ("Please choose from the following release versions: %s" %
00230                  ', '.join(RELEASE_VERSIONS))
00231 
00232     else:
00233         result = False
00234         if not hasattr(target, 'release_versions'):
00235             reason = "Target '%s' " % target.name
00236             reason += "does not have the 'release_versions' key set"
00237         elif not version in target.release_versions:
00238             reason = "Target '%s' does not contain the version '%s' " % \
00239                      (target.name, version)
00240             reason += "in its 'release_versions' key"
00241 
00242     return result, reason
00243 
00244 def transform_release_toolchains (toolchains, version):
00245     """ Given a list of toolchains and a release version, return a list of
00246     only the supported toolchains for that release
00247 
00248     Positional arguments:
00249     toolchains - The list of toolchains
00250     version - The release version string. Should be a string contained within
00251               RELEASE_VERSIONS
00252     """
00253     if version == '5':
00254         return ['ARM', 'GCC_ARM', 'IAR']
00255     else:
00256         return toolchains
00257 
00258 
00259 def get_mbed_official_release (version):
00260     """ Given a release version string, return a tuple that contains a target
00261     and the supported toolchains for that release.
00262     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00263                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00264 
00265     Positional arguments:
00266     version - The version string. Should be a string contained within
00267               RELEASE_VERSIONS
00268     """
00269 
00270     mbed_official_release = (
00271         tuple(
00272             tuple(
00273                 [
00274                     TARGET_MAP[target].name,
00275                     tuple(transform_release_toolchains(
00276                         TARGET_MAP[target].supported_toolchains, version))
00277                 ]
00278             ) for target in TARGET_NAMES \
00279             if (hasattr(TARGET_MAP[target], 'release_versions')
00280                 and version in TARGET_MAP[target].release_versions)
00281         )
00282     )
00283 
00284     for target in mbed_official_release:
00285         is_official, reason = is_official_target(target[0], version)
00286 
00287         if not is_official:
00288             raise InvalidReleaseTargetException(reason)
00289 
00290     return mbed_official_release
00291 
00292 
00293 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00294                       macros=None, clean=False, jobs=1,
00295                       notify=None, silent=False, verbose=False,
00296                       extra_verbose=False, config=None,
00297                       app_config=None, build_profile=None):
00298     """ Prepares resource related objects - toolchain, target, config
00299 
00300     Positional arguments:
00301     src_paths - the paths to source directories
00302     target - ['LPC1768', 'LPC11U24', etc.]
00303     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00304 
00305     Keyword arguments:
00306     macros - additional macros
00307     clean - Rebuild everything if True
00308     jobs - how many compilers we can run at once
00309     notify - Notify function for logs
00310     silent - suppress printing of progress indicators
00311     verbose - Write the actual tools command lines used if True
00312     extra_verbose - even more output!
00313     config - a Config object to use instead of creating one
00314     app_config - location of a chosen mbed_app.json file
00315     build_profile - a list of mergeable build profiles
00316     """
00317 
00318     # We need to remove all paths which are repeated to avoid
00319     # multiple compilations and linking with the same objects
00320     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00321 
00322     # If the configuration object was not yet created, create it now
00323     config = config or Config(target, src_paths, app_config=app_config)
00324     target = config.target
00325     try:
00326         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00327     except KeyError:
00328         raise KeyError("Toolchain %s not supported" % toolchain_name)
00329 
00330     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00331     for contents in build_profile or []:
00332         for key in profile:
00333             profile[key].extend(contents[toolchain_name].get(key, []))
00334 
00335     toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
00336                        extra_verbose=extra_verbose, build_profile=profile)
00337 
00338     toolchain.config = config
00339     toolchain.jobs = jobs
00340     toolchain.build_all = clean
00341     toolchain.VERBOSE = verbose
00342 
00343     return toolchain
00344 
00345 def _printihex(ihex):
00346     import pprint
00347     pprint.PrettyPrinter().pprint(ihex.todict())
00348 
00349 def _real_region_size(region):
00350     try:
00351         part = intelhex_offset(region.filename, offset=region.start)
00352         return (part.maxaddr() - part.minaddr()) + 1
00353     except AttributeError:
00354         return region.size
00355 
00356 
00357 def _fill_header(region_list, current_region):
00358     """Fill an application header region
00359 
00360     This is done it three steps:
00361      * Fill the whole region with zeros
00362      * Fill const, timestamp and size entries with their data
00363      * Fill the digests using this header as the header region
00364     """
00365     region_dict = {r.name: r for r in region_list}
00366     header = IntelHex()
00367     header.puts(current_region.start, b'\x00' * current_region.size)
00368     start = current_region.start
00369     for member in current_region.filename:
00370         _, type, subtype, data = member
00371         member_size = Config.header_member_size(member)
00372         if type == "const":
00373             fmt = {
00374                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00375                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00376             }[subtype]
00377             header.puts(start, struct.pack(fmt, integer(data, 0)))
00378         elif type == "timestamp":
00379             fmt = {"32le": "<L", "64le": "<Q",
00380                    "32be": ">L", "64be": ">Q"}[subtype]
00381             header.puts(start, struct.pack(fmt, time()))
00382         elif type == "size":
00383             fmt = {"32le": "<L", "64le": "<Q",
00384                    "32be": ">L", "64be": ">Q"}[subtype]
00385             size = sum(_real_region_size(region_dict[r]) for r in data)
00386             header.puts(start, struct.pack(fmt, size))
00387         elif type  == "digest":
00388             if data == "header":
00389                 ih = header[:start]
00390             else:
00391                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00392             if subtype.startswith("CRCITT32"):
00393                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00394                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00395             elif subtype.startswith("SHA"):
00396                 if subtype == "SHA256":
00397                     hash = hashlib.sha256()
00398                 elif subtype == "SHA512":
00399                     hash = hashlib.sha512()
00400                 hash.update(ih.tobinarray())
00401                 header.puts(start, hash.digest())
00402         start += Config.header_member_size(member)
00403     return header
00404 
00405 def merge_region_list (region_list, destination, padding=b'\xFF'):
00406     """Merge the region_list into a single image
00407 
00408     Positional Arguments:
00409     region_list - list of regions, which should contain filenames
00410     destination - file name to write all regions to
00411     padding - bytes to fill gapps with
00412     """
00413     merged = IntelHex()
00414     _, format = splitext(destination)
00415 
00416     print("Merging Regions:")
00417 
00418     for region in region_list:
00419         if region.active and not region.filename:
00420             raise ToolException("Active region has no contents: No file found.")
00421         if isinstance(region.filename, list):
00422             header_basename, _ = splitext(destination)
00423             header_filename = header_basename + "_header.hex"
00424             _fill_header(region_list, region).tofile(header_filename, format='hex')
00425             region = region._replace(filename=header_filename)
00426         if region.filename:
00427             print("  Filling region %s with %s" % (region.name, region.filename))
00428             part = intelhex_offset(region.filename, offset=region.start)
00429             part_size = (part.maxaddr() - part.minaddr()) + 1
00430             if part_size > region.size:
00431                 raise ToolException("Contents of region %s does not fit"
00432                                     % region.name)
00433             merged.merge(part)
00434             pad_size = region.size - part_size
00435             if pad_size > 0 and region != region_list[-1]:
00436                 print("  Padding region %s with 0x%x bytes" % (region.name, pad_size))
00437                 if format is ".hex":
00438                     """The offset will be in the hex file generated when we're done,
00439                     so we can skip padding here"""
00440                 else:
00441                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00442 
00443     if not exists(dirname(destination)):
00444         makedirs(dirname(destination))
00445     print("Space used after regions merged: 0x%x" %
00446           (merged.maxaddr() - merged.minaddr() + 1))
00447     with open(destination, "wb+") as output:
00448         merged.tofile(output, format=format.strip("."))
00449 
00450 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00451                    inc_dirs=None, base_path=None, collect_ignores=False):
00452     """ Scan resources using initialized toolcain
00453 
00454     Positional arguments
00455     src_paths - the paths to source directories
00456     toolchain - valid toolchain object
00457     dependencies_paths - dependency paths that we should scan for include dirs
00458     inc_dirs - additional include directories which should be added to
00459                the scanner resources
00460     """
00461 
00462     # Scan src_path
00463     resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
00464                                          collect_ignores=collect_ignores)
00465     for path in src_paths[1:]:
00466         resources.add(toolchain.scan_resources(path, base_path=base_path,
00467                                                collect_ignores=collect_ignores))
00468 
00469     # Scan dependency paths for include dirs
00470     if dependencies_paths is not None:
00471         for path in dependencies_paths:
00472             lib_resources = toolchain.scan_resources(path)
00473             resources.inc_dirs.extend(lib_resources.inc_dirs)
00474 
00475     # Add additional include directories if passed
00476     if inc_dirs:
00477         if isinstance(inc_dirs, list):
00478             resources.inc_dirs.extend(inc_dirs)
00479         else:
00480             resources.inc_dirs.append(inc_dirs)
00481 
00482     # Load resources into the config system which might expand/modify resources
00483     # based on config data
00484     resources = toolchain.config.load_resources(resources)
00485 
00486     # Set the toolchain's configuration data
00487     toolchain.set_config_data(toolchain.config.get_config_data())
00488 
00489     return resources
00490 
00491 def build_project (src_paths, build_path, target, toolchain_name,
00492                   libraries_paths=None, linker_script=None,
00493                   clean=False, notify=None, verbose=False, name=None,
00494                   macros=None, inc_dirs=None, jobs=1, silent=False,
00495                   report=None, properties=None, project_id=None,
00496                   project_description=None, extra_verbose=False, config=None,
00497                   app_config=None, build_profile=None, stats_depth=None):
00498     """ Build a project. A project may be a test or a user program.
00499 
00500     Positional arguments:
00501     src_paths - a path or list of paths that contain all files needed to build
00502                 the project
00503     build_path - the directory where all of the object files will be placed
00504     target - the MCU or board that the project will compile for
00505     toolchain_name - the name of the build tools
00506 
00507     Keyword arguments:
00508     libraries_paths - The location of libraries to include when linking
00509     linker_script - the file that drives the linker to do it's job
00510     clean - Rebuild everything if True
00511     notify - Notify function for logs
00512     verbose - Write the actual tools command lines used if True
00513     name - the name of the project
00514     macros - additional macros
00515     inc_dirs - additional directories where include files may be found
00516     jobs - how many compilers we can run at once
00517     silent - suppress printing of progress indicators
00518     report - a dict where a result may be appended
00519     properties - UUUUHHHHH beats me
00520     project_id - the name put in the report
00521     project_description - the human-readable version of what this thing does
00522     extra_verbose - even more output!
00523     config - a Config object to use instead of creating one
00524     app_config - location of a chosen mbed_app.json file
00525     build_profile - a dict of flags that will be passed to the compiler
00526     stats_depth - depth level for memap to display file/dirs
00527     """
00528 
00529     # Convert src_path to a list if needed
00530     if not isinstance(src_paths, list):
00531         src_paths = [src_paths]
00532     # Extend src_paths wiht libraries_paths
00533     if libraries_paths is not None:
00534         src_paths.extend(libraries_paths)
00535         inc_dirs.extend(map(dirname, libraries_paths))
00536 
00537     if clean and exists(build_path):
00538         rmtree(build_path)
00539     mkdir(build_path)
00540 
00541     toolchain = prepare_toolchain(
00542         src_paths, build_path, target, toolchain_name, macros=macros,
00543         clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
00544         extra_verbose=extra_verbose, config=config, app_config=app_config,
00545         build_profile=build_profile)
00546 
00547     # The first path will give the name to the library
00548     name = (name or toolchain.config.name or
00549             basename(normpath(abspath(src_paths[0]))))
00550     toolchain.info("Building project %s (%s, %s)" %
00551                    (name, toolchain.target.name, toolchain_name))
00552 
00553     # Initialize reporting
00554     if report != None:
00555         start = time()
00556         # If project_id is specified, use that over the default name
00557         id_name = project_id.upper() if project_id else name.upper()
00558         description = project_description if project_description else name
00559         vendor_label = toolchain.target.extra_labels[0]
00560         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00561         cur_result = create_result(toolchain.target.name, toolchain_name,
00562                                    id_name, description)
00563         if properties != None:
00564             prep_properties(properties, toolchain.target.name, toolchain_name,
00565                             vendor_label)
00566 
00567     try:
00568         # Call unified scan_resources
00569         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00570 
00571         # Change linker script if specified
00572         if linker_script is not None:
00573             resources.linker_script = linker_script
00574 
00575         # Compile Sources
00576         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00577         resources.objects.extend(objects)
00578 
00579         # Link Program
00580         if toolchain.config.has_regions:
00581             res, _ = toolchain.link_program(resources, build_path, name + "_application")
00582             region_list = list(toolchain.config.regions)
00583             region_list = [r._replace(filename=res) if r.active else r
00584                            for r in region_list]
00585             res = "%s.%s" % (join(build_path, name),
00586                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00587             merge_region_list(region_list, res)
00588         else:
00589             res, _ = toolchain.link_program(resources, build_path, name)
00590 
00591         memap_instance = getattr(toolchain, 'memap_instance', None)
00592         memap_table = ''
00593         if memap_instance:
00594             # Write output to stdout in text (pretty table) format
00595             memap_table = memap_instance.generate_output('table', stats_depth)
00596 
00597             if not silent:
00598                 print(memap_table)
00599 
00600             # Write output to file in JSON format
00601             map_out = join(build_path, name + "_map.json")
00602             memap_instance.generate_output('json', stats_depth, map_out)
00603 
00604             # Write output to file in CSV format for the CI
00605             map_csv = join(build_path, name + "_map.csv")
00606             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00607 
00608         resources.detect_duplicates(toolchain)
00609 
00610         if report != None:
00611             end = time()
00612             cur_result["elapsed_time"] = end - start
00613             cur_result["output"] = toolchain.get_output() + memap_table
00614             cur_result["result"] = "OK"
00615             cur_result["memory_usage"] = (memap_instance.mem_report
00616                                           if memap_instance is not None else None)
00617             cur_result["bin"] = res
00618             cur_result["elf"] = splitext(res)[0] + ".elf"
00619             cur_result.update(toolchain.report)
00620 
00621             add_result_to_report(report, cur_result)
00622 
00623         return res
00624 
00625     except Exception as exc:
00626         if report != None:
00627             end = time()
00628 
00629             if isinstance(exc, NotSupportedException):
00630                 cur_result["result"] = "NOT_SUPPORTED"
00631             else:
00632                 cur_result["result"] = "FAIL"
00633 
00634             cur_result["elapsed_time"] = end - start
00635 
00636             toolchain_output = toolchain.get_output()
00637             if toolchain_output:
00638                 cur_result["output"] += toolchain_output
00639 
00640             add_result_to_report(report, cur_result)
00641 
00642         # Let Exception propagate
00643         raise
00644 
00645 def build_library (src_paths, build_path, target, toolchain_name,
00646                   dependencies_paths=None, name=None, clean=False,
00647                   archive=True, notify=None, verbose=False, macros=None,
00648                   inc_dirs=None, jobs=1, silent=False, report=None,
00649                   properties=None, extra_verbose=False, project_id=None,
00650                   remove_config_header_file=False, app_config=None,
00651                   build_profile=None):
00652     """ Build a library
00653 
00654     Positional arguments:
00655     src_paths - a path or list of paths that contain all files needed to build
00656                 the library
00657     build_path - the directory where all of the object files will be placed
00658     target - the MCU or board that the project will compile for
00659     toolchain_name - the name of the build tools
00660 
00661     Keyword arguments:
00662     dependencies_paths - The location of libraries to include when linking
00663     name - the name of the library
00664     clean - Rebuild everything if True
00665     archive - whether the library will create an archive file
00666     notify - Notify function for logs
00667     verbose - Write the actual tools command lines used if True
00668     macros - additional macros
00669     inc_dirs - additional directories where include files may be found
00670     jobs - how many compilers we can run at once
00671     silent - suppress printing of progress indicators
00672     report - a dict where a result may be appended
00673     properties - UUUUHHHHH beats me
00674     extra_verbose - even more output!
00675     project_id - the name that goes in the report
00676     remove_config_header_file - delete config header file when done building
00677     app_config - location of a chosen mbed_app.json file
00678     build_profile - a dict of flags that will be passed to the compiler
00679     """
00680 
00681     # Convert src_path to a list if needed
00682     if not isinstance(src_paths, list):
00683         src_paths = [src_paths]
00684 
00685     # Build path
00686     if archive:
00687         # Use temp path when building archive
00688         tmp_path = join(build_path, '.temp')
00689         mkdir(tmp_path)
00690     else:
00691         tmp_path = build_path
00692 
00693     # Clean the build directory
00694     if clean and exists(tmp_path):
00695         rmtree(tmp_path)
00696     mkdir(tmp_path)
00697 
00698     # Pass all params to the unified prepare_toolchain()
00699     toolchain = prepare_toolchain(
00700         src_paths, build_path, target, toolchain_name, macros=macros,
00701         clean=clean, jobs=jobs, notify=notify, silent=silent,
00702         verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
00703         build_profile=build_profile)
00704 
00705     # The first path will give the name to the library
00706     if name is None:
00707         name = basename(normpath(abspath(src_paths[0])))
00708     toolchain.info("Building library %s (%s, %s)" %
00709                    (name, toolchain.target.name, toolchain_name))
00710 
00711     # Initialize reporting
00712     if report != None:
00713         start = time()
00714         # If project_id is specified, use that over the default name
00715         id_name = project_id.upper() if project_id else name.upper()
00716         description = name
00717         vendor_label = toolchain.target.extra_labels[0]
00718         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00719         cur_result = create_result(toolchain.target.name, toolchain_name,
00720                                    id_name, description)
00721         cur_result['type'] = 'library'
00722         if properties != None:
00723             prep_properties(properties, toolchain.target.name, toolchain_name,
00724                             vendor_label)
00725 
00726     for src_path in src_paths:
00727         if not exists(src_path):
00728             error_msg = "The library source folder does not exist: %s", src_path
00729             if report != None:
00730                 cur_result["output"] = error_msg
00731                 cur_result["result"] = "FAIL"
00732                 add_result_to_report(report, cur_result)
00733             raise Exception(error_msg)
00734 
00735     try:
00736         # Call unified scan_resources
00737         resources = scan_resources(src_paths, toolchain,
00738                                    dependencies_paths=dependencies_paths,
00739                                    inc_dirs=inc_dirs)
00740 
00741 
00742         # Copy headers, objects and static libraries - all files needed for
00743         # static lib
00744         toolchain.copy_files(resources.headers, build_path, resources=resources)
00745         toolchain.copy_files(resources.objects, build_path, resources=resources)
00746         toolchain.copy_files(resources.libraries, build_path,
00747                              resources=resources)
00748         toolchain.copy_files(resources.json_files, build_path,
00749                              resources=resources)
00750         if resources.linker_script:
00751             toolchain.copy_files(resources.linker_script, build_path,
00752                                  resources=resources)
00753 
00754         if resources.hex_files:
00755             toolchain.copy_files(resources.hex_files, build_path,
00756                                  resources=resources)
00757 
00758         # Compile Sources
00759         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00760         resources.objects.extend(objects)
00761 
00762         if archive:
00763             toolchain.build_library(objects, build_path, name)
00764 
00765         if remove_config_header_file:
00766             config_header_path = toolchain.get_config_header()
00767             if config_header_path:
00768                 remove(config_header_path)
00769 
00770         if report != None:
00771             end = time()
00772             cur_result["elapsed_time"] = end - start
00773             cur_result["output"] = toolchain.get_output()
00774             cur_result["result"] = "OK"
00775 
00776 
00777             add_result_to_report(report, cur_result)
00778         return True
00779 
00780     except Exception as exc:
00781         if report != None:
00782             end = time()
00783 
00784             if isinstance(exc, ToolException):
00785                 cur_result["result"] = "FAIL"
00786             elif isinstance(exc, NotSupportedException):
00787                 cur_result["result"] = "NOT_SUPPORTED"
00788 
00789             cur_result["elapsed_time"] = end - start
00790 
00791             toolchain_output = toolchain.get_output()
00792             if toolchain_output:
00793                 cur_result["output"] += toolchain_output
00794 
00795             add_result_to_report(report, cur_result)
00796 
00797         # Let Exception propagate
00798         raise
00799 
00800 ######################
00801 ### Legacy methods ###
00802 ######################
00803 
00804 def mbed2_obj_path(target_name, toolchain_name):
00805     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00806     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00807 
00808 def build_lib (lib_id, target, toolchain_name, verbose=False,
00809               clean=False, macros=None, notify=None, jobs=1, silent=False,
00810               report=None, properties=None, extra_verbose=False,
00811               build_profile=None):
00812     """ Legacy method for building mbed libraries
00813 
00814     Positional arguments:
00815     lib_id - the library's unique identifier
00816     target - the MCU or board that the project will compile for
00817     toolchain_name - the name of the build tools
00818 
00819     Keyword arguments:
00820     clean - Rebuild everything if True
00821     verbose - Write the actual tools command lines used if True
00822     macros - additional macros
00823     notify - Notify function for logs
00824     jobs - how many compilers we can run at once
00825     silent - suppress printing of progress indicators
00826     report - a dict where a result may be appended
00827     properties - UUUUHHHHH beats me
00828     extra_verbose - even more output!
00829     build_profile - a dict of flags that will be passed to the compiler
00830     """
00831     lib = Library(lib_id)
00832     if not lib.is_supported(target, toolchain_name):
00833         print('Library "%s" is not yet supported on target %s with toolchain %s'
00834               % (lib_id, target.name, toolchain_name))
00835         return False
00836 
00837     # We need to combine macros from parameter list with macros from library
00838     # definition
00839     lib_macros = lib.macros if lib.macros else []
00840     if macros:
00841         macros.extend(lib_macros)
00842     else:
00843         macros = lib_macros
00844 
00845     src_paths = lib.source_dir
00846     build_path = lib.build_dir
00847     dependencies_paths = lib.dependencies
00848     inc_dirs = lib.inc_dirs
00849     inc_dirs_ext = lib.inc_dirs_ext
00850 
00851     if not isinstance(src_paths, list):
00852         src_paths = [src_paths]
00853 
00854     # The first path will give the name to the library
00855     name = basename(src_paths[0])
00856 
00857     if report != None:
00858         start = time()
00859         id_name = name.upper()
00860         description = name
00861         vendor_label = target.extra_labels[0]
00862         cur_result = None
00863         prep_report(report, target.name, toolchain_name, id_name)
00864         cur_result = create_result(target.name, toolchain_name, id_name,
00865                                    description)
00866 
00867         if properties != None:
00868             prep_properties(properties, target.name, toolchain_name,
00869                             vendor_label)
00870 
00871     for src_path in src_paths:
00872         if not exists(src_path):
00873             error_msg = "The library source folder does not exist: %s", src_path
00874 
00875             if report != None:
00876                 cur_result["output"] = error_msg
00877                 cur_result["result"] = "FAIL"
00878                 add_result_to_report(report, cur_result)
00879 
00880             raise Exception(error_msg)
00881 
00882     try:
00883         # Toolchain instance
00884         # Create the desired build directory structure
00885         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00886         mkdir(bin_path)
00887         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00888                                                             toolchain_name))
00889         mkdir(tmp_path)
00890 
00891         toolchain = prepare_toolchain(
00892             src_paths, tmp_path, target, toolchain_name, macros=macros,
00893             notify=notify, silent=silent, extra_verbose=extra_verbose,
00894             build_profile=build_profile, jobs=jobs, clean=clean)
00895 
00896         toolchain.info("Building library %s (%s, %s)" %
00897                        (name.upper(), target.name, toolchain_name))
00898 
00899         # Take into account the library configuration (MBED_CONFIG_FILE)
00900         config = toolchain.config
00901         config.add_config_files([MBED_CONFIG_FILE])
00902 
00903         # Scan Resources
00904         resources = []
00905         for src_path in src_paths:
00906             resources.append(toolchain.scan_resources(src_path))
00907 
00908         # Add extra include directories / files which are required by library
00909         # This files usually are not in the same directory as source files so
00910         # previous scan will not include them
00911         if inc_dirs_ext is not None:
00912             for inc_ext in inc_dirs_ext:
00913                 resources.append(toolchain.scan_resources(inc_ext))
00914 
00915         # Dependencies Include Paths
00916         dependencies_include_dir = []
00917         if dependencies_paths is not None:
00918             for path in dependencies_paths:
00919                 lib_resources = toolchain.scan_resources(path)
00920                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00921                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00922 
00923         if inc_dirs:
00924             dependencies_include_dir.extend(inc_dirs)
00925 
00926         # Add other discovered configuration data to the configuration object
00927         for res in resources:
00928             config.load_resources(res)
00929         toolchain.set_config_data(toolchain.config.get_config_data())
00930 
00931 
00932         # Copy Headers
00933         for resource in resources:
00934             toolchain.copy_files(resource.headers, build_path,
00935                                  resources=resource)
00936 
00937         dependencies_include_dir.extend(
00938             toolchain.scan_resources(build_path).inc_dirs)
00939 
00940         # Compile Sources
00941         objects = []
00942         for resource in resources:
00943             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00944 
00945         needed_update = toolchain.build_library(objects, bin_path, name)
00946 
00947         if report != None and needed_update:
00948             end = time()
00949             cur_result["elapsed_time"] = end - start
00950             cur_result["output"] = toolchain.get_output()
00951             cur_result["result"] = "OK"
00952 
00953             add_result_to_report(report, cur_result)
00954         return True
00955 
00956     except Exception:
00957         if report != None:
00958             end = time()
00959             cur_result["result"] = "FAIL"
00960             cur_result["elapsed_time"] = end - start
00961 
00962             toolchain_output = toolchain.get_output()
00963             if toolchain_output:
00964                 cur_result["output"] += toolchain_output
00965 
00966             add_result_to_report(report, cur_result)
00967 
00968         # Let Exception propagate
00969         raise
00970 
00971 # We do have unique legacy conventions about how we build and package the mbed
00972 # library
00973 def build_mbed_libs (target, toolchain_name, verbose=False,
00974                     clean=False, macros=None, notify=None, jobs=1, silent=False,
00975                     report=None, properties=None, extra_verbose=False,
00976                     build_profile=None):
00977     """ Function returns True is library was built and false if building was
00978     skipped
00979 
00980     Positional arguments:
00981     target - the MCU or board that the project will compile for
00982     toolchain_name - the name of the build tools
00983 
00984     Keyword arguments:
00985     verbose - Write the actual tools command lines used if True
00986     clean - Rebuild everything if True
00987     macros - additional macros
00988     notify - Notify function for logs
00989     jobs - how many compilers we can run at once
00990     silent - suppress printing of progress indicators
00991     report - a dict where a result may be appended
00992     properties - UUUUHHHHH beats me
00993     extra_verbose - even more output!
00994     build_profile - a dict of flags that will be passed to the compiler
00995     """
00996 
00997     if report != None:
00998         start = time()
00999         id_name = "MBED"
01000         description = "mbed SDK"
01001         vendor_label = target.extra_labels[0]
01002         cur_result = None
01003         prep_report(report, target.name, toolchain_name, id_name)
01004         cur_result = create_result(target.name, toolchain_name, id_name,
01005                                    description)
01006 
01007         if properties != None:
01008             prep_properties(properties, target.name, toolchain_name,
01009                             vendor_label)
01010 
01011     # Check toolchain support
01012     if toolchain_name not in target.supported_toolchains:
01013         supported_toolchains_text = ", ".join(target.supported_toolchains)
01014         print('%s target is not yet supported by toolchain %s' %
01015               (target.name, toolchain_name))
01016         print('%s target supports %s toolchain%s' %
01017               (target.name, supported_toolchains_text, 's'
01018                if len(target.supported_toolchains) > 1 else ''))
01019 
01020         if report != None:
01021             cur_result["result"] = "SKIP"
01022             add_result_to_report(report, cur_result)
01023 
01024         return False
01025 
01026     try:
01027         # Source and Build Paths
01028         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01029         build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01030         mkdir(build_toolchain)
01031 
01032         # Toolchain
01033         tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
01034         mkdir(tmp_path)
01035 
01036         toolchain = prepare_toolchain(
01037             [""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose,
01038             notify=notify, silent=silent, extra_verbose=extra_verbose,
01039             build_profile=build_profile, jobs=jobs, clean=clean)
01040 
01041         # Take into account the library configuration (MBED_CONFIG_FILE)
01042         config = toolchain.config
01043         config.add_config_files([MBED_CONFIG_FILE])
01044         toolchain.set_config_data(toolchain.config.get_config_data())
01045 
01046         # mbed
01047         toolchain.info("Building library %s (%s, %s)" %
01048                        ('MBED', target.name, toolchain_name))
01049 
01050         # Common Headers
01051         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01052         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01053 
01054         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01055                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01056                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01057             resources = toolchain.scan_resources(dir)
01058             toolchain.copy_files(resources.headers, dest)
01059             library_incdirs.append(dest)
01060 
01061         cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH)
01062         toolchain.copy_files(cmsis_implementation.headers, build_target)
01063         toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain)
01064         toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain)
01065 
01066         hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH)
01067         toolchain.copy_files(hal_implementation.headers +
01068                              hal_implementation.hex_files +
01069                              hal_implementation.libraries +
01070                              [MBED_CONFIG_FILE],
01071                              build_target, resources=hal_implementation)
01072         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
01073         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
01074         incdirs = toolchain.scan_resources(build_target).inc_dirs
01075         objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
01076                                             library_incdirs + incdirs + [tmp_path])
01077         toolchain.copy_files(objects, build_toolchain)
01078 
01079         # Common Sources
01080         mbed_resources = None
01081         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
01082             mbed_resources += toolchain.scan_resources(dir)
01083 
01084         objects = toolchain.compile_sources(mbed_resources,
01085                                             library_incdirs + incdirs)
01086 
01087         # A number of compiled files need to be copied as objects as opposed to
01088         # way the linker search for symbols in archives. These are:
01089         #   - mbed_retarget.o: to make sure that the C standard lib symbols get
01090         #                 overridden
01091         #   - mbed_board.o: mbed_die is weak
01092         #   - mbed_overrides.o: this contains platform overrides of various
01093         #                       weak SDK functions
01094         #   - mbed_main.o: this contains main redirection
01095         separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
01096                                             'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
01097 
01098         for obj in objects:
01099             for name in separate_names:
01100                 if obj.endswith(name):
01101                     separate_objects.append(obj)
01102 
01103         for obj in separate_objects:
01104             objects.remove(obj)
01105 
01106         toolchain.build_library(objects, build_toolchain, "mbed")
01107 
01108         for obj in separate_objects:
01109             toolchain.copy_files(obj, build_toolchain)
01110 
01111         if report != None:
01112             end = time()
01113             cur_result["elapsed_time"] = end - start
01114             cur_result["output"] = toolchain.get_output()
01115             cur_result["result"] = "OK"
01116 
01117             add_result_to_report(report, cur_result)
01118 
01119         return True
01120 
01121     except Exception as exc:
01122         if report != None:
01123             end = time()
01124             cur_result["result"] = "FAIL"
01125             cur_result["elapsed_time"] = end - start
01126 
01127             toolchain_output = toolchain.get_output()
01128             if toolchain_output:
01129                 cur_result["output"] += toolchain_output
01130 
01131             cur_result["output"] += str(exc)
01132 
01133             add_result_to_report(report, cur_result)
01134 
01135         # Let Exception propagate
01136         raise
01137 
01138 
01139 def get_unique_supported_toolchains (release_targets=None):
01140     """ Get list of all unique toolchains supported by targets
01141 
01142     Keyword arguments:
01143     release_targets - tuple structure returned from get_mbed_official_release().
01144                       If release_targets is not specified, then it queries all
01145                       known targets
01146     """
01147     unique_supported_toolchains = []
01148 
01149     if not release_targets:
01150         for target in TARGET_NAMES:
01151             for toolchain in TARGET_MAP[target].supported_toolchains:
01152                 if toolchain not in unique_supported_toolchains:
01153                     unique_supported_toolchains.append(toolchain)
01154     else:
01155         for target in release_targets:
01156             for toolchain in target[1]:
01157                 if toolchain not in unique_supported_toolchains:
01158                     unique_supported_toolchains.append(toolchain)
01159 
01160     if "ARM" in unique_supported_toolchains:
01161         unique_supported_toolchains.append("ARMC6")
01162 
01163     return unique_supported_toolchains
01164 
01165 def mcu_toolchain_list (release_version='5'):
01166     """  Shows list of toolchains
01167 
01168     """
01169 
01170     if isinstance(release_version, basestring):
01171         # Force release_version to lowercase if it is a string
01172         release_version = release_version.lower()
01173     else:
01174         # Otherwise default to printing all known targets and toolchains
01175         release_version = 'all'
01176 
01177 
01178     version_release_targets = {}
01179     version_release_target_names = {}
01180 
01181     for version in RELEASE_VERSIONS:
01182         version_release_targets[version] = get_mbed_official_release(version)
01183         version_release_target_names[version] = [x[0] for x in
01184                                                  version_release_targets[
01185                                                      version]]
01186 
01187     if release_version in RELEASE_VERSIONS:
01188         release_targets = version_release_targets[release_version]
01189     else:
01190         release_targets = None
01191 
01192     unique_supported_toolchains = get_unique_supported_toolchains(
01193         release_targets)
01194     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01195     return "\n".join(columns)
01196 
01197 
01198 def mcu_target_list (release_version='5'):
01199     """  Shows target list
01200 
01201     """
01202 
01203     if isinstance(release_version, basestring):
01204         # Force release_version to lowercase if it is a string
01205         release_version = release_version.lower()
01206     else:
01207         # Otherwise default to printing all known targets and toolchains
01208         release_version = 'all'
01209 
01210 
01211     version_release_targets = {}
01212     version_release_target_names = {}
01213 
01214     for version in RELEASE_VERSIONS:
01215         version_release_targets[version] = get_mbed_official_release(version)
01216         version_release_target_names[version] = [x[0] for x in
01217                                                  version_release_targets[
01218                                                      version]]
01219 
01220     if release_version in RELEASE_VERSIONS:
01221         release_targets = version_release_targets[release_version]
01222     else:
01223         release_targets = None
01224 
01225     target_names = []
01226 
01227     if release_targets:
01228         target_names = [x[0] for x in release_targets]
01229     else:
01230         target_names = TARGET_NAMES
01231 
01232     return "\n".join(target_names)
01233 
01234 
01235 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01236                          release_version='5'):
01237     """  Shows target map using prettytable
01238 
01239     Keyword arguments:
01240     verbose_html - emit html instead of a simple table
01241     platform_filter - remove results that match the string
01242     release_version - get the matrix for this major version number
01243     """
01244     # Only use it in this function so building works without extra modules
01245     from prettytable import PrettyTable
01246 
01247     if isinstance(release_version, basestring):
01248         # Force release_version to lowercase if it is a string
01249         release_version = release_version.lower()
01250     else:
01251         # Otherwise default to printing all known targets and toolchains
01252         release_version = 'all'
01253 
01254 
01255     version_release_targets = {}
01256     version_release_target_names = {}
01257 
01258     for version in RELEASE_VERSIONS:
01259         version_release_targets[version] = get_mbed_official_release(version)
01260         version_release_target_names[version] = [x[0] for x in
01261                                                  version_release_targets[
01262                                                      version]]
01263 
01264     if release_version in RELEASE_VERSIONS:
01265         release_targets = version_release_targets[release_version]
01266     else:
01267         release_targets = None
01268 
01269     unique_supported_toolchains = get_unique_supported_toolchains(
01270         release_targets)
01271     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01272 
01273     # All tests status table print
01274     columns = prepend_columns + unique_supported_toolchains
01275     table_printer = PrettyTable(columns)
01276     # Align table
01277     for col in columns:
01278         table_printer.align[col] = "c"
01279     table_printer.align["Target"] = "l"
01280 
01281     perm_counter = 0
01282     target_counter = 0
01283 
01284     target_names = []
01285 
01286     if release_targets:
01287         target_names = [x[0] for x in release_targets]
01288     else:
01289         target_names = TARGET_NAMES
01290 
01291     for target in sorted(target_names):
01292         if platform_filter is not None:
01293             # FIlter out platforms using regex
01294             if re.search(platform_filter, target) is None:
01295                 continue
01296         target_counter += 1
01297 
01298         row = [target]  # First column is platform name
01299 
01300         for version in RELEASE_VERSIONS:
01301             if target in version_release_target_names[version]:
01302                 text = "Supported"
01303             else:
01304                 text = "-"
01305             row.append(text)
01306 
01307         for unique_toolchain in unique_supported_toolchains:
01308             if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
01309                 (unique_toolchain == "ARMC6" and
01310                  "ARM" in TARGET_MAP[target].supported_toolchains)):
01311                 text = "Supported"
01312                 perm_counter += 1
01313             else:
01314                 text = "-"
01315 
01316             row.append(text)
01317         table_printer.add_row(row)
01318 
01319     result = table_printer.get_html_string() if verbose_html \
01320              else table_printer.get_string()
01321     result += "\n"
01322     result += "Supported targets: %d\n"% (target_counter)
01323     if target_counter == 1:
01324         result += "Supported toolchains: %d"% (perm_counter)
01325     return result
01326 
01327 
01328 def get_target_supported_toolchains (target):
01329     """ Returns target supported toolchains list
01330 
01331     Positional arguments:
01332     target - the target to get the supported toolchains of
01333     """
01334     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01335         else None
01336 
01337 
01338 def print_build_results (result_list, build_name):
01339     """ Generate result string for build results
01340 
01341     Positional arguments:
01342     result_list - the list of results to print
01343     build_name - the name of the build we are printing result for
01344     """
01345     result = ""
01346     if len(result_list) > 0:
01347         result += build_name + "\n"
01348         result += "\n".join(["  * %s" % f for f in result_list])
01349         result += "\n"
01350     return result
01351 
01352 def print_build_memory_usage (report):
01353     """ Generate result table with memory usage values for build results
01354     Aggregates (puts together) reports obtained from self.get_memory_summary()
01355 
01356     Positional arguments:
01357     report - Report generated during build procedure.
01358     """
01359     from prettytable import PrettyTable
01360     columns_text = ['name', 'target', 'toolchain']
01361     columns_int = ['static_ram', 'total_flash']
01362     table = PrettyTable(columns_text + columns_int)
01363 
01364     for col in columns_text:
01365         table.align[col] = 'l'
01366 
01367     for col in columns_int:
01368         table.align[col] = 'r'
01369 
01370     for target in report:
01371         for toolchain in report[target]:
01372             for name in report[target][toolchain]:
01373                 for dlist in report[target][toolchain][name]:
01374                     for dlistelem in dlist:
01375                         # Get 'memory_usage' record and build table with
01376                         # statistics
01377                         record = dlist[dlistelem]
01378                         if 'memory_usage' in record and record['memory_usage']:
01379                             # Note that summary should be in the last record of
01380                             # 'memory_usage' section. This is why we are
01381                             # grabbing last "[-1]" record.
01382                             row = [
01383                                 record['description'],
01384                                 record['target_name'],
01385                                 record['toolchain_name'],
01386                                 record['memory_usage'][-1]['summary'][
01387                                     'static_ram'],
01388                                 record['memory_usage'][-1]['summary'][
01389                                     'total_flash'],
01390                             ]
01391                             table.add_row(row)
01392 
01393     result = "Memory map breakdown for built projects (values in Bytes):\n"
01394     result += table.get_string(sortby='name')
01395     return result
01396 
01397 def write_build_report (build_report, template_filename, filename):
01398     """Write a build report to disk using a template file
01399 
01400     Positional arguments:
01401     build_report - a report generated by the build system
01402     template_filename - a file that contains the template for the style of build
01403                         report
01404     filename - the location on disk to write the file to
01405     """
01406     build_report_failing = []
01407     build_report_passing = []
01408 
01409     for report in build_report:
01410         if len(report["failing"]) > 0:
01411             build_report_failing.append(report)
01412         else:
01413             build_report_passing.append(report)
01414 
01415     env = Environment(extensions=['jinja2.ext.with_'])
01416     env.loader = FileSystemLoader('ci_templates')
01417     template = env.get_template(template_filename)
01418 
01419     with open(filename, 'w+') as placeholder:
01420         placeholder.write(template.render(
01421             failing_builds=build_report_failing,
01422             passing_builds=build_report_passing))
01423 
01424 
01425 def merge_build_data(filename, toolchain_report, app_type):
01426     path_to_file = dirname(abspath(filename))
01427     try:
01428         build_data = load(open(filename))
01429     except (IOError, ValueError):
01430         build_data = {'builds': []}
01431     for tgt in toolchain_report.values():
01432         for tc in tgt.values():
01433             for project in tc.values():
01434                 for build in project:
01435                     try:
01436                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01437                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01438                     except KeyError:
01439                         pass
01440                     if 'type' not in build[0]:
01441                         build[0]['type'] = app_type
01442                     build_data['builds'].append(build[0])
01443     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))