Denislam Valeev / Mbed OS Nucleo_rtos_basic
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .targets import TARGET_NAMES, TARGET_MAP
00046 from .libraries import Library
00047 from .toolchains import TOOLCHAIN_CLASSES
00048 from .config import Config
00049 
00050 RELEASE_VERSIONS = ['2', '5']
00051 
00052 def prep_report (report, target_name, toolchain_name, id_name):
00053     """Setup report keys
00054 
00055     Positional arguments:
00056     report - the report to fill
00057     target_name - the target being used
00058     toolchain_name - the toolchain being used
00059     id_name - the name of the executable or library being built
00060     """
00061     if not target_name in report:
00062         report[target_name] = {}
00063 
00064     if not toolchain_name in report[target_name]:
00065         report[target_name][toolchain_name] = {}
00066 
00067     if not id_name in report[target_name][toolchain_name]:
00068         report[target_name][toolchain_name][id_name] = []
00069 
00070 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00071     """Setup test properties
00072 
00073     Positional arguments:
00074     properties - the dict to fill
00075     target_name - the target the test is targeting
00076     toolchain_name - the toolchain that will compile the test
00077     vendor_label - the vendor
00078     """
00079     if not target_name in properties:
00080         properties[target_name] = {}
00081 
00082     if not toolchain_name in properties[target_name]:
00083         properties[target_name][toolchain_name] = {}
00084 
00085     properties[target_name][toolchain_name]["target"] = target_name
00086     properties[target_name][toolchain_name]["vendor"] = vendor_label
00087     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00088 
00089 def create_result (target_name, toolchain_name, id_name, description):
00090     """Create a result dictionary
00091 
00092     Positional arguments:
00093     target_name - the target being built for
00094     toolchain_name - the toolchain doing the building
00095     id_name - the name of the executable or library being built
00096     description - a human readable description of what's going on
00097     """
00098     cur_result = {}
00099     cur_result["target_name"] = target_name
00100     cur_result["toolchain_name"] = toolchain_name
00101     cur_result["id"] = id_name
00102     cur_result["description"] = description
00103     cur_result["elapsed_time"] = 0
00104     cur_result["output"] = ""
00105 
00106     return cur_result
00107 
00108 def add_result_to_report (report, result):
00109     """Add a single result to a report dictionary
00110 
00111     Positional arguments:
00112     report - the report to append to
00113     result - the result to append
00114     """
00115     result["date"] = datetime.datetime.utcnow().isoformat()
00116     result["uuid"] = str(uuid.uuid1())
00117     target = result["target_name"]
00118     toolchain = result["toolchain_name"]
00119     id_name = result['id']
00120     result_wrap = {0: result}
00121     report[target][toolchain][id_name].append(result_wrap)
00122 
00123 def get_config (src_paths, target, toolchain_name):
00124     """Get the configuration object for a target-toolchain combination
00125 
00126     Positional arguments:
00127     src_paths - paths to scan for the configuration files
00128     target - the device we are building for
00129     toolchain_name - the string that identifies the build tools
00130     """
00131     # Convert src_paths to a list if needed
00132     if not isinstance(src_paths, list):
00133         src_paths = [src_paths]
00134 
00135     # Pass all params to the unified prepare_resources()
00136     toolchain = prepare_toolchain(src_paths, None, target, toolchain_name)
00137 
00138     # Scan src_path for config files
00139     resources = toolchain.scan_resources(src_paths[0])
00140     for path in src_paths[1:]:
00141         resources.add(toolchain.scan_resources(path))
00142 
00143     # Update configuration files until added features creates no changes
00144     prev_features = set()
00145     while True:
00146         # Update the configuration with any .json files found while scanning
00147         toolchain.config.add_config_files(resources.json_files)
00148 
00149         # Add features while we find new ones
00150         features = set(toolchain.config.get_features())
00151         if features == prev_features:
00152             break
00153 
00154         for feature in features:
00155             if feature in resources.features:
00156                 resources += resources.features[feature]
00157 
00158         prev_features = features
00159     toolchain.config.validate_config()
00160     if toolchain.config.has_regions:
00161         _ = list(toolchain.config.regions)
00162 
00163     cfg, macros = toolchain.config.get_config_data()
00164     features = toolchain.config.get_features()
00165     return cfg, macros, features
00166 
00167 def is_official_target (target_name, version):
00168     """ Returns True, None if a target is part of the official release for the
00169     given version. Return False, 'reason' if a target is not part of the
00170     official release for the given version.
00171 
00172     Positional arguments:
00173     target_name - Name if the target (ex. 'K64F')
00174     version - The release version string. Should be a string contained within
00175               RELEASE_VERSIONS
00176     """
00177 
00178     result = True
00179     reason = None
00180     target = TARGET_MAP[target_name]
00181 
00182     if hasattr(target, 'release_versions') \
00183        and version in target.release_versions:
00184         if version == '2':
00185             # For version 2, either ARM or uARM toolchain support is required
00186             required_toolchains = set(['ARM', 'uARM'])
00187 
00188             if not len(required_toolchains.intersection(
00189                     set(target.supported_toolchains))) > 0:
00190                 result = False
00191                 reason = ("Target '%s' must support " % target.name) + \
00192                     ("one of the folowing toolchains to be included in the") + \
00193                     ((" mbed 2.0 official release: %s" + linesep) %
00194                      ", ".join(required_toolchains)) + \
00195                     ("Currently it is only configured to support the ") + \
00196                     ("following toolchains: %s" %
00197                      ", ".join(target.supported_toolchains))
00198 
00199         elif version == '5':
00200             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00201             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00202             required_toolchains_sorted = list(required_toolchains)
00203             required_toolchains_sorted.sort()
00204             supported_toolchains = set(target.supported_toolchains)
00205             supported_toolchains_sorted = list(supported_toolchains)
00206             supported_toolchains_sorted.sort()
00207 
00208             if not required_toolchains.issubset(supported_toolchains):
00209                 result = False
00210                 reason = ("Target '%s' must support " % target.name) + \
00211                     ("ALL of the folowing toolchains to be included in the") + \
00212                     ((" mbed OS 5.0 official release: %s" + linesep) %
00213                      ", ".join(required_toolchains_sorted)) + \
00214                     ("Currently it is only configured to support the ") + \
00215                     ("following toolchains: %s" %
00216                      ", ".join(supported_toolchains_sorted))
00217 
00218             elif not target.default_lib == 'std':
00219                 result = False
00220                 reason = ("Target '%s' must set the " % target.name) + \
00221                     ("'default_lib' to 'std' to be included in the ") + \
00222                     ("mbed OS 5.0 official release." + linesep) + \
00223                     ("Currently it is set to '%s'" % target.default_lib)
00224 
00225         else:
00226             result = False
00227             reason = ("Target '%s' has set an invalid release version of '%s'" %
00228                       version) + \
00229                 ("Please choose from the following release versions: %s" %
00230                  ', '.join(RELEASE_VERSIONS))
00231 
00232     else:
00233         result = False
00234         if not hasattr(target, 'release_versions'):
00235             reason = "Target '%s' " % target.name
00236             reason += "does not have the 'release_versions' key set"
00237         elif not version in target.release_versions:
00238             reason = "Target '%s' does not contain the version '%s' " % \
00239                      (target.name, version)
00240             reason += "in its 'release_versions' key"
00241 
00242     return result, reason
00243 
00244 def transform_release_toolchains (toolchains, version):
00245     """ Given a list of toolchains and a release version, return a list of
00246     only the supported toolchains for that release
00247 
00248     Positional arguments:
00249     toolchains - The list of toolchains
00250     version - The release version string. Should be a string contained within
00251               RELEASE_VERSIONS
00252     """
00253     if version == '5':
00254         return ['ARM', 'GCC_ARM', 'IAR']
00255     else:
00256         return toolchains
00257 
00258 
00259 def get_mbed_official_release (version):
00260     """ Given a release version string, return a tuple that contains a target
00261     and the supported toolchains for that release.
00262     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00263                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00264 
00265     Positional arguments:
00266     version - The version string. Should be a string contained within
00267               RELEASE_VERSIONS
00268     """
00269 
00270     mbed_official_release = (
00271         tuple(
00272             tuple(
00273                 [
00274                     TARGET_MAP[target].name,
00275                     tuple(transform_release_toolchains(
00276                         TARGET_MAP[target].supported_toolchains, version))
00277                 ]
00278             ) for target in TARGET_NAMES \
00279             if (hasattr(TARGET_MAP[target], 'release_versions')
00280                 and version in TARGET_MAP[target].release_versions)
00281         )
00282     )
00283 
00284     for target in mbed_official_release:
00285         is_official, reason = is_official_target(target[0], version)
00286 
00287         if not is_official:
00288             raise InvalidReleaseTargetException(reason)
00289 
00290     return mbed_official_release
00291 
00292 
00293 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00294                       macros=None, clean=False, jobs=1,
00295                       notify=None, silent=False, verbose=False,
00296                       extra_verbose=False, config=None,
00297                       app_config=None, build_profile=None):
00298     """ Prepares resource related objects - toolchain, target, config
00299 
00300     Positional arguments:
00301     src_paths - the paths to source directories
00302     target - ['LPC1768', 'LPC11U24', etc.]
00303     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00304 
00305     Keyword arguments:
00306     macros - additional macros
00307     clean - Rebuild everything if True
00308     jobs - how many compilers we can run at once
00309     notify - Notify function for logs
00310     silent - suppress printing of progress indicators
00311     verbose - Write the actual tools command lines used if True
00312     extra_verbose - even more output!
00313     config - a Config object to use instead of creating one
00314     app_config - location of a chosen mbed_app.json file
00315     build_profile - a list of mergeable build profiles
00316     """
00317 
00318     # We need to remove all paths which are repeated to avoid
00319     # multiple compilations and linking with the same objects
00320     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00321 
00322     # If the configuration object was not yet created, create it now
00323     config = config or Config(target, src_paths, app_config=app_config)
00324     target = config.target
00325     try:
00326         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00327     except KeyError:
00328         raise KeyError("Toolchain %s not supported" % toolchain_name)
00329 
00330     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00331     for contents in build_profile or []:
00332         for key in profile:
00333             profile[key].extend(contents[toolchain_name].get(key, []))
00334 
00335     toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
00336                        extra_verbose=extra_verbose, build_profile=profile)
00337 
00338     toolchain.config = config
00339     toolchain.jobs = jobs
00340     toolchain.build_all = clean
00341     toolchain.VERBOSE = verbose
00342 
00343     return toolchain
00344 
00345 def _printihex(ihex):
00346     import pprint
00347     pprint.PrettyPrinter().pprint(ihex.todict())
00348 
00349 def _real_region_size(region):
00350     try:
00351         part = intelhex_offset(region.filename, offset=region.start)
00352         return (part.maxaddr() - part.minaddr()) + 1
00353     except AttributeError:
00354         return region.size
00355 
00356 
00357 def _fill_header(region_list, current_region):
00358     """Fill an application header region
00359 
00360     This is done it three steps:
00361      * Fill the whole region with zeros
00362      * Fill const, timestamp and size entries with their data
00363      * Fill the digests using this header as the header region
00364     """
00365     region_dict = {r.name: r for r in region_list}
00366     header = IntelHex()
00367     header.puts(current_region.start, b'\x00' * current_region.size)
00368     start = current_region.start
00369     for member in current_region.filename:
00370         _, type, subtype, data = member
00371         member_size = Config.header_member_size(member)
00372         if type == "const":
00373             fmt = {
00374                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00375                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00376             }[subtype]
00377             header.puts(start, struct.pack(fmt, integer(data, 0)))
00378         elif type == "timestamp":
00379             fmt = {"32le": "<L", "64le": "<Q",
00380                    "32be": ">L", "64be": ">Q"}[subtype]
00381             header.puts(start, struct.pack(fmt, time()))
00382         elif type == "size":
00383             fmt = {"32le": "<L", "64le": "<Q",
00384                    "32be": ">L", "64be": ">Q"}[subtype]
00385             size = sum(_real_region_size(region_dict[r]) for r in data)
00386             header.puts(start, struct.pack(fmt, size))
00387         elif type  == "digest":
00388             if data == "header":
00389                 ih = header[:start]
00390             else:
00391                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00392             if subtype.startswith("CRCITT32"):
00393                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00394                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00395             elif subtype.startswith("SHA"):
00396                 if subtype == "SHA256":
00397                     hash = hashlib.sha256()
00398                 elif subtype == "SHA512":
00399                     hash = hashlib.sha512()
00400                 hash.update(ih.tobinarray())
00401                 header.puts(start, hash.digest())
00402         start += Config.header_member_size(member)
00403     return header
00404 
00405 def merge_region_list (region_list, destination, padding=b'\xFF'):
00406     """Merge the region_list into a single image
00407 
00408     Positional Arguments:
00409     region_list - list of regions, which should contain filenames
00410     destination - file name to write all regions to
00411     padding - bytes to fill gapps with
00412     """
00413     merged = IntelHex()
00414     _, format = splitext(destination)
00415 
00416     print("Merging Regions:")
00417 
00418     for region in region_list:
00419         if region.active and not region.filename:
00420             raise ToolException("Active region has no contents: No file found.")
00421         if isinstance(region.filename, list):
00422             header_basename, _ = splitext(destination)
00423             header_filename = header_basename + "_header.hex"
00424             _fill_header(region_list, region).tofile(header_filename, format='hex')
00425             region = region._replace(filename=header_filename)
00426         if region.filename:
00427             print("  Filling region %s with %s" % (region.name, region.filename))
00428             part = intelhex_offset(region.filename, offset=region.start)
00429             part_size = (part.maxaddr() - part.minaddr()) + 1
00430             if part_size > region.size:
00431                 raise ToolException("Contents of region %s does not fit"
00432                                     % region.name)
00433             merged.merge(part)
00434             pad_size = region.size - part_size
00435             if pad_size > 0 and region != region_list[-1]:
00436                 print("  Padding region %s with 0x%x bytes" % (region.name, pad_size))
00437                 if format is ".hex":
00438                     """The offset will be in the hex file generated when we're done,
00439                     so we can skip padding here"""
00440                 else:
00441                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00442 
00443     if not exists(dirname(destination)):
00444         makedirs(dirname(destination))
00445     print("Space used after regions merged: 0x%x" %
00446           (merged.maxaddr() - merged.minaddr() + 1))
00447     with open(destination, "wb+") as output:
00448         merged.tofile(output, format=format.strip("."))
00449 
00450 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00451                    inc_dirs=None, base_path=None, collect_ignores=False):
00452     """ Scan resources using initialized toolcain
00453 
00454     Positional arguments
00455     src_paths - the paths to source directories
00456     toolchain - valid toolchain object
00457     dependencies_paths - dependency paths that we should scan for include dirs
00458     inc_dirs - additional include directories which should be added to
00459                the scanner resources
00460     """
00461 
00462     # Scan src_path
00463     resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
00464                                          collect_ignores=collect_ignores)
00465     for path in src_paths[1:]:
00466         resources.add(toolchain.scan_resources(path, base_path=base_path,
00467                                                collect_ignores=collect_ignores))
00468 
00469     # Scan dependency paths for include dirs
00470     if dependencies_paths is not None:
00471         for path in dependencies_paths:
00472             lib_resources = toolchain.scan_resources(path)
00473             resources.inc_dirs.extend(lib_resources.inc_dirs)
00474 
00475     # Add additional include directories if passed
00476     if inc_dirs:
00477         if isinstance(inc_dirs, list):
00478             resources.inc_dirs.extend(inc_dirs)
00479         else:
00480             resources.inc_dirs.append(inc_dirs)
00481 
00482     # Load resources into the config system which might expand/modify resources
00483     # based on config data
00484     resources = toolchain.config.load_resources(resources)
00485 
00486     # Set the toolchain's configuration data
00487     toolchain.set_config_data(toolchain.config.get_config_data())
00488 
00489     if  (hasattr(toolchain.target, "release_versions") and
00490             "5" not in toolchain.target.release_versions and
00491             "rtos" in toolchain.config.lib_config_data):
00492             raise NotSupportedException("Target does not support mbed OS 5")
00493 
00494     return resources
00495 
00496 def build_project (src_paths, build_path, target, toolchain_name,
00497                   libraries_paths=None, linker_script=None,
00498                   clean=False, notify=None, verbose=False, name=None,
00499                   macros=None, inc_dirs=None, jobs=1, silent=False,
00500                   report=None, properties=None, project_id=None,
00501                   project_description=None, extra_verbose=False, config=None,
00502                   app_config=None, build_profile=None, stats_depth=None):
00503     """ Build a project. A project may be a test or a user program.
00504 
00505     Positional arguments:
00506     src_paths - a path or list of paths that contain all files needed to build
00507                 the project
00508     build_path - the directory where all of the object files will be placed
00509     target - the MCU or board that the project will compile for
00510     toolchain_name - the name of the build tools
00511 
00512     Keyword arguments:
00513     libraries_paths - The location of libraries to include when linking
00514     linker_script - the file that drives the linker to do it's job
00515     clean - Rebuild everything if True
00516     notify - Notify function for logs
00517     verbose - Write the actual tools command lines used if True
00518     name - the name of the project
00519     macros - additional macros
00520     inc_dirs - additional directories where include files may be found
00521     jobs - how many compilers we can run at once
00522     silent - suppress printing of progress indicators
00523     report - a dict where a result may be appended
00524     properties - UUUUHHHHH beats me
00525     project_id - the name put in the report
00526     project_description - the human-readable version of what this thing does
00527     extra_verbose - even more output!
00528     config - a Config object to use instead of creating one
00529     app_config - location of a chosen mbed_app.json file
00530     build_profile - a dict of flags that will be passed to the compiler
00531     stats_depth - depth level for memap to display file/dirs
00532     """
00533 
00534     # Convert src_path to a list if needed
00535     if not isinstance(src_paths, list):
00536         src_paths = [src_paths]
00537     # Extend src_paths wiht libraries_paths
00538     if libraries_paths is not None:
00539         src_paths.extend(libraries_paths)
00540         inc_dirs.extend(map(dirname, libraries_paths))
00541 
00542     if clean and exists(build_path):
00543         rmtree(build_path)
00544     mkdir(build_path)
00545 
00546     toolchain = prepare_toolchain(
00547         src_paths, build_path, target, toolchain_name, macros=macros,
00548         clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
00549         extra_verbose=extra_verbose, config=config, app_config=app_config,
00550         build_profile=build_profile)
00551 
00552     # The first path will give the name to the library
00553     name = (name or toolchain.config.name or
00554             basename(normpath(abspath(src_paths[0]))))
00555     toolchain.info("Building project %s (%s, %s)" %
00556                    (name, toolchain.target.name, toolchain_name))
00557 
00558     # Initialize reporting
00559     if report != None:
00560         start = time()
00561         # If project_id is specified, use that over the default name
00562         id_name = project_id.upper() if project_id else name.upper()
00563         description = project_description if project_description else name
00564         vendor_label = toolchain.target.extra_labels[0]
00565         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00566         cur_result = create_result(toolchain.target.name, toolchain_name,
00567                                    id_name, description)
00568         if properties != None:
00569             prep_properties(properties, toolchain.target.name, toolchain_name,
00570                             vendor_label)
00571 
00572     try:
00573         # Call unified scan_resources
00574         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00575 
00576         # Change linker script if specified
00577         if linker_script is not None:
00578             resources.linker_script = linker_script
00579 
00580         # Compile Sources
00581         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00582         resources.objects.extend(objects)
00583 
00584         # Link Program
00585         if toolchain.config.has_regions:
00586             res, _ = toolchain.link_program(resources, build_path, name + "_application")
00587             region_list = list(toolchain.config.regions)
00588             region_list = [r._replace(filename=res) if r.active else r
00589                            for r in region_list]
00590             res = "%s.%s" % (join(build_path, name),
00591                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00592             merge_region_list(region_list, res)
00593         else:
00594             res, _ = toolchain.link_program(resources, build_path, name)
00595 
00596         memap_instance = getattr(toolchain, 'memap_instance', None)
00597         memap_table = ''
00598         if memap_instance:
00599             # Write output to stdout in text (pretty table) format
00600             memap_table = memap_instance.generate_output('table', stats_depth)
00601 
00602             if not silent:
00603                 print(memap_table)
00604 
00605             # Write output to file in JSON format
00606             map_out = join(build_path, name + "_map.json")
00607             memap_instance.generate_output('json', stats_depth, map_out)
00608 
00609             # Write output to file in CSV format for the CI
00610             map_csv = join(build_path, name + "_map.csv")
00611             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00612 
00613         resources.detect_duplicates(toolchain)
00614 
00615         if report != None:
00616             end = time()
00617             cur_result["elapsed_time"] = end - start
00618             cur_result["output"] = toolchain.get_output() + memap_table
00619             cur_result["result"] = "OK"
00620             cur_result["memory_usage"] = (memap_instance.mem_report
00621                                           if memap_instance is not None else None)
00622             cur_result["bin"] = res
00623             cur_result["elf"] = splitext(res)[0] + ".elf"
00624             cur_result.update(toolchain.report)
00625 
00626             add_result_to_report(report, cur_result)
00627 
00628         return res
00629 
00630     except Exception as exc:
00631         if report != None:
00632             end = time()
00633 
00634             if isinstance(exc, NotSupportedException):
00635                 cur_result["result"] = "NOT_SUPPORTED"
00636             else:
00637                 cur_result["result"] = "FAIL"
00638 
00639             cur_result["elapsed_time"] = end - start
00640 
00641             toolchain_output = toolchain.get_output()
00642             if toolchain_output:
00643                 cur_result["output"] += toolchain_output
00644 
00645             add_result_to_report(report, cur_result)
00646 
00647         # Let Exception propagate
00648         raise
00649 
00650 def build_library (src_paths, build_path, target, toolchain_name,
00651                   dependencies_paths=None, name=None, clean=False,
00652                   archive=True, notify=None, verbose=False, macros=None,
00653                   inc_dirs=None, jobs=1, silent=False, report=None,
00654                   properties=None, extra_verbose=False, project_id=None,
00655                   remove_config_header_file=False, app_config=None,
00656                   build_profile=None):
00657     """ Build a library
00658 
00659     Positional arguments:
00660     src_paths - a path or list of paths that contain all files needed to build
00661                 the library
00662     build_path - the directory where all of the object files will be placed
00663     target - the MCU or board that the project will compile for
00664     toolchain_name - the name of the build tools
00665 
00666     Keyword arguments:
00667     dependencies_paths - The location of libraries to include when linking
00668     name - the name of the library
00669     clean - Rebuild everything if True
00670     archive - whether the library will create an archive file
00671     notify - Notify function for logs
00672     verbose - Write the actual tools command lines used if True
00673     macros - additional macros
00674     inc_dirs - additional directories where include files may be found
00675     jobs - how many compilers we can run at once
00676     silent - suppress printing of progress indicators
00677     report - a dict where a result may be appended
00678     properties - UUUUHHHHH beats me
00679     extra_verbose - even more output!
00680     project_id - the name that goes in the report
00681     remove_config_header_file - delete config header file when done building
00682     app_config - location of a chosen mbed_app.json file
00683     build_profile - a dict of flags that will be passed to the compiler
00684     """
00685 
00686     # Convert src_path to a list if needed
00687     if not isinstance(src_paths, list):
00688         src_paths = [src_paths]
00689 
00690     # Build path
00691     if archive:
00692         # Use temp path when building archive
00693         tmp_path = join(build_path, '.temp')
00694         mkdir(tmp_path)
00695     else:
00696         tmp_path = build_path
00697 
00698     # Clean the build directory
00699     if clean and exists(tmp_path):
00700         rmtree(tmp_path)
00701     mkdir(tmp_path)
00702 
00703     # Pass all params to the unified prepare_toolchain()
00704     toolchain = prepare_toolchain(
00705         src_paths, build_path, target, toolchain_name, macros=macros,
00706         clean=clean, jobs=jobs, notify=notify, silent=silent,
00707         verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
00708         build_profile=build_profile)
00709 
00710     # The first path will give the name to the library
00711     if name is None:
00712         name = basename(normpath(abspath(src_paths[0])))
00713     toolchain.info("Building library %s (%s, %s)" %
00714                    (name, toolchain.target.name, toolchain_name))
00715 
00716     # Initialize reporting
00717     if report != None:
00718         start = time()
00719         # If project_id is specified, use that over the default name
00720         id_name = project_id.upper() if project_id else name.upper()
00721         description = name
00722         vendor_label = toolchain.target.extra_labels[0]
00723         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00724         cur_result = create_result(toolchain.target.name, toolchain_name,
00725                                    id_name, description)
00726         cur_result['type'] = 'library'
00727         if properties != None:
00728             prep_properties(properties, toolchain.target.name, toolchain_name,
00729                             vendor_label)
00730 
00731     for src_path in src_paths:
00732         if not exists(src_path):
00733             error_msg = "The library source folder does not exist: %s", src_path
00734             if report != None:
00735                 cur_result["output"] = error_msg
00736                 cur_result["result"] = "FAIL"
00737                 add_result_to_report(report, cur_result)
00738             raise Exception(error_msg)
00739 
00740     try:
00741         # Call unified scan_resources
00742         resources = scan_resources(src_paths, toolchain,
00743                                    dependencies_paths=dependencies_paths,
00744                                    inc_dirs=inc_dirs)
00745 
00746 
00747         # Copy headers, objects and static libraries - all files needed for
00748         # static lib
00749         toolchain.copy_files(resources.headers, build_path, resources=resources)
00750         toolchain.copy_files(resources.objects, build_path, resources=resources)
00751         toolchain.copy_files(resources.libraries, build_path,
00752                              resources=resources)
00753         toolchain.copy_files(resources.json_files, build_path,
00754                              resources=resources)
00755         if resources.linker_script:
00756             toolchain.copy_files(resources.linker_script, build_path,
00757                                  resources=resources)
00758 
00759         if resources.hex_files:
00760             toolchain.copy_files(resources.hex_files, build_path,
00761                                  resources=resources)
00762 
00763         # Compile Sources
00764         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00765         resources.objects.extend(objects)
00766 
00767         if archive:
00768             toolchain.build_library(objects, build_path, name)
00769 
00770         if remove_config_header_file:
00771             config_header_path = toolchain.get_config_header()
00772             if config_header_path:
00773                 remove(config_header_path)
00774 
00775         if report != None:
00776             end = time()
00777             cur_result["elapsed_time"] = end - start
00778             cur_result["output"] = toolchain.get_output()
00779             cur_result["result"] = "OK"
00780 
00781 
00782             add_result_to_report(report, cur_result)
00783         return True
00784 
00785     except Exception as exc:
00786         if report != None:
00787             end = time()
00788 
00789             if isinstance(exc, ToolException):
00790                 cur_result["result"] = "FAIL"
00791             elif isinstance(exc, NotSupportedException):
00792                 cur_result["result"] = "NOT_SUPPORTED"
00793 
00794             cur_result["elapsed_time"] = end - start
00795 
00796             toolchain_output = toolchain.get_output()
00797             if toolchain_output:
00798                 cur_result["output"] += toolchain_output
00799 
00800             add_result_to_report(report, cur_result)
00801 
00802         # Let Exception propagate
00803         raise
00804 
00805 ######################
00806 ### Legacy methods ###
00807 ######################
00808 
00809 def mbed2_obj_path(target_name, toolchain_name):
00810     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00811     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00812 
00813 def build_lib (lib_id, target, toolchain_name, verbose=False,
00814               clean=False, macros=None, notify=None, jobs=1, silent=False,
00815               report=None, properties=None, extra_verbose=False,
00816               build_profile=None):
00817     """ Legacy method for building mbed libraries
00818 
00819     Positional arguments:
00820     lib_id - the library's unique identifier
00821     target - the MCU or board that the project will compile for
00822     toolchain_name - the name of the build tools
00823 
00824     Keyword arguments:
00825     clean - Rebuild everything if True
00826     verbose - Write the actual tools command lines used if True
00827     macros - additional macros
00828     notify - Notify function for logs
00829     jobs - how many compilers we can run at once
00830     silent - suppress printing of progress indicators
00831     report - a dict where a result may be appended
00832     properties - UUUUHHHHH beats me
00833     extra_verbose - even more output!
00834     build_profile - a dict of flags that will be passed to the compiler
00835     """
00836     lib = Library(lib_id)
00837     if not lib.is_supported(target, toolchain_name):
00838         print('Library "%s" is not yet supported on target %s with toolchain %s'
00839               % (lib_id, target.name, toolchain_name))
00840         return False
00841 
00842     # We need to combine macros from parameter list with macros from library
00843     # definition
00844     lib_macros = lib.macros if lib.macros else []
00845     if macros:
00846         macros.extend(lib_macros)
00847     else:
00848         macros = lib_macros
00849 
00850     src_paths = lib.source_dir
00851     build_path = lib.build_dir
00852     dependencies_paths = lib.dependencies
00853     inc_dirs = lib.inc_dirs
00854     inc_dirs_ext = lib.inc_dirs_ext
00855 
00856     if not isinstance(src_paths, list):
00857         src_paths = [src_paths]
00858 
00859     # The first path will give the name to the library
00860     name = basename(src_paths[0])
00861 
00862     if report != None:
00863         start = time()
00864         id_name = name.upper()
00865         description = name
00866         vendor_label = target.extra_labels[0]
00867         cur_result = None
00868         prep_report(report, target.name, toolchain_name, id_name)
00869         cur_result = create_result(target.name, toolchain_name, id_name,
00870                                    description)
00871 
00872         if properties != None:
00873             prep_properties(properties, target.name, toolchain_name,
00874                             vendor_label)
00875 
00876     for src_path in src_paths:
00877         if not exists(src_path):
00878             error_msg = "The library source folder does not exist: %s", src_path
00879 
00880             if report != None:
00881                 cur_result["output"] = error_msg
00882                 cur_result["result"] = "FAIL"
00883                 add_result_to_report(report, cur_result)
00884 
00885             raise Exception(error_msg)
00886 
00887     try:
00888         # Toolchain instance
00889         # Create the desired build directory structure
00890         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00891         mkdir(bin_path)
00892         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00893                                                             toolchain_name))
00894         mkdir(tmp_path)
00895 
00896         toolchain = prepare_toolchain(
00897             src_paths, tmp_path, target, toolchain_name, macros=macros,
00898             notify=notify, silent=silent, extra_verbose=extra_verbose,
00899             build_profile=build_profile, jobs=jobs, clean=clean)
00900 
00901         toolchain.info("Building library %s (%s, %s)" %
00902                        (name.upper(), target.name, toolchain_name))
00903 
00904         # Take into account the library configuration (MBED_CONFIG_FILE)
00905         config = toolchain.config
00906         config.add_config_files([MBED_CONFIG_FILE])
00907 
00908         # Scan Resources
00909         resources = []
00910         for src_path in src_paths:
00911             resources.append(toolchain.scan_resources(src_path))
00912 
00913         # Add extra include directories / files which are required by library
00914         # This files usually are not in the same directory as source files so
00915         # previous scan will not include them
00916         if inc_dirs_ext is not None:
00917             for inc_ext in inc_dirs_ext:
00918                 resources.append(toolchain.scan_resources(inc_ext))
00919 
00920         # Dependencies Include Paths
00921         dependencies_include_dir = []
00922         if dependencies_paths is not None:
00923             for path in dependencies_paths:
00924                 lib_resources = toolchain.scan_resources(path)
00925                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00926                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00927 
00928         if inc_dirs:
00929             dependencies_include_dir.extend(inc_dirs)
00930 
00931         # Add other discovered configuration data to the configuration object
00932         for res in resources:
00933             config.load_resources(res)
00934         toolchain.set_config_data(toolchain.config.get_config_data())
00935 
00936 
00937         # Copy Headers
00938         for resource in resources:
00939             toolchain.copy_files(resource.headers, build_path,
00940                                  resources=resource)
00941 
00942         dependencies_include_dir.extend(
00943             toolchain.scan_resources(build_path).inc_dirs)
00944 
00945         # Compile Sources
00946         objects = []
00947         for resource in resources:
00948             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00949 
00950         needed_update = toolchain.build_library(objects, bin_path, name)
00951 
00952         if report != None and needed_update:
00953             end = time()
00954             cur_result["elapsed_time"] = end - start
00955             cur_result["output"] = toolchain.get_output()
00956             cur_result["result"] = "OK"
00957 
00958             add_result_to_report(report, cur_result)
00959         return True
00960 
00961     except Exception:
00962         if report != None:
00963             end = time()
00964             cur_result["result"] = "FAIL"
00965             cur_result["elapsed_time"] = end - start
00966 
00967             toolchain_output = toolchain.get_output()
00968             if toolchain_output:
00969                 cur_result["output"] += toolchain_output
00970 
00971             add_result_to_report(report, cur_result)
00972 
00973         # Let Exception propagate
00974         raise
00975 
00976 # We do have unique legacy conventions about how we build and package the mbed
00977 # library
00978 def build_mbed_libs (target, toolchain_name, verbose=False,
00979                     clean=False, macros=None, notify=None, jobs=1, silent=False,
00980                     report=None, properties=None, extra_verbose=False,
00981                     build_profile=None):
00982     """ Function returns True is library was built and false if building was
00983     skipped
00984 
00985     Positional arguments:
00986     target - the MCU or board that the project will compile for
00987     toolchain_name - the name of the build tools
00988 
00989     Keyword arguments:
00990     verbose - Write the actual tools command lines used if True
00991     clean - Rebuild everything if True
00992     macros - additional macros
00993     notify - Notify function for logs
00994     jobs - how many compilers we can run at once
00995     silent - suppress printing of progress indicators
00996     report - a dict where a result may be appended
00997     properties - UUUUHHHHH beats me
00998     extra_verbose - even more output!
00999     build_profile - a dict of flags that will be passed to the compiler
01000     """
01001 
01002     if report != None:
01003         start = time()
01004         id_name = "MBED"
01005         description = "mbed SDK"
01006         vendor_label = target.extra_labels[0]
01007         cur_result = None
01008         prep_report(report, target.name, toolchain_name, id_name)
01009         cur_result = create_result(target.name, toolchain_name, id_name,
01010                                    description)
01011 
01012         if properties != None:
01013             prep_properties(properties, target.name, toolchain_name,
01014                             vendor_label)
01015 
01016     # Check toolchain support
01017     if toolchain_name not in target.supported_toolchains:
01018         supported_toolchains_text = ", ".join(target.supported_toolchains)
01019         print('%s target is not yet supported by toolchain %s' %
01020               (target.name, toolchain_name))
01021         print('%s target supports %s toolchain%s' %
01022               (target.name, supported_toolchains_text, 's'
01023                if len(target.supported_toolchains) > 1 else ''))
01024 
01025         if report != None:
01026             cur_result["result"] = "SKIP"
01027             add_result_to_report(report, cur_result)
01028 
01029         return False
01030 
01031     try:
01032         # Source and Build Paths
01033         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01034         build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01035         mkdir(build_toolchain)
01036 
01037         # Toolchain
01038         tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
01039         mkdir(tmp_path)
01040 
01041         toolchain = prepare_toolchain(
01042             [""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose,
01043             notify=notify, silent=silent, extra_verbose=extra_verbose,
01044             build_profile=build_profile, jobs=jobs, clean=clean)
01045 
01046         # Take into account the library configuration (MBED_CONFIG_FILE)
01047         config = toolchain.config
01048         config.add_config_files([MBED_CONFIG_FILE])
01049         toolchain.set_config_data(toolchain.config.get_config_data())
01050 
01051         # mbed
01052         toolchain.info("Building library %s (%s, %s)" %
01053                        ('MBED', target.name, toolchain_name))
01054 
01055         # Common Headers
01056         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01057         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01058 
01059         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01060                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01061                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01062             resources = toolchain.scan_resources(dir)
01063             toolchain.copy_files(resources.headers, dest)
01064             library_incdirs.append(dest)
01065 
01066         cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH)
01067         toolchain.copy_files(cmsis_implementation.headers, build_target)
01068         toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain)
01069         toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain)
01070 
01071         hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH)
01072         toolchain.copy_files(hal_implementation.headers +
01073                              hal_implementation.hex_files +
01074                              hal_implementation.libraries +
01075                              [MBED_CONFIG_FILE],
01076                              build_target, resources=hal_implementation)
01077         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
01078         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
01079         incdirs = toolchain.scan_resources(build_target).inc_dirs
01080         objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
01081                                             library_incdirs + incdirs + [tmp_path])
01082         toolchain.copy_files(objects, build_toolchain)
01083 
01084         # Common Sources
01085         mbed_resources = None
01086         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
01087             mbed_resources += toolchain.scan_resources(dir)
01088 
01089         objects = toolchain.compile_sources(mbed_resources,
01090                                             library_incdirs + incdirs)
01091 
01092         # A number of compiled files need to be copied as objects as opposed to
01093         # way the linker search for symbols in archives. These are:
01094         #   - mbed_retarget.o: to make sure that the C standard lib symbols get
01095         #                 overridden
01096         #   - mbed_board.o: mbed_die is weak
01097         #   - mbed_overrides.o: this contains platform overrides of various
01098         #                       weak SDK functions
01099         #   - mbed_main.o: this contains main redirection
01100         separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
01101                                             'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
01102 
01103         for obj in objects:
01104             for name in separate_names:
01105                 if obj.endswith(name):
01106                     separate_objects.append(obj)
01107 
01108         for obj in separate_objects:
01109             objects.remove(obj)
01110 
01111         toolchain.build_library(objects, build_toolchain, "mbed")
01112 
01113         for obj in separate_objects:
01114             toolchain.copy_files(obj, build_toolchain)
01115 
01116         if report != None:
01117             end = time()
01118             cur_result["elapsed_time"] = end - start
01119             cur_result["output"] = toolchain.get_output()
01120             cur_result["result"] = "OK"
01121 
01122             add_result_to_report(report, cur_result)
01123 
01124         return True
01125 
01126     except Exception as exc:
01127         if report != None:
01128             end = time()
01129             cur_result["result"] = "FAIL"
01130             cur_result["elapsed_time"] = end - start
01131 
01132             toolchain_output = toolchain.get_output()
01133             if toolchain_output:
01134                 cur_result["output"] += toolchain_output
01135 
01136             cur_result["output"] += str(exc)
01137 
01138             add_result_to_report(report, cur_result)
01139 
01140         # Let Exception propagate
01141         raise
01142 
01143 
01144 def get_unique_supported_toolchains (release_targets=None):
01145     """ Get list of all unique toolchains supported by targets
01146 
01147     Keyword arguments:
01148     release_targets - tuple structure returned from get_mbed_official_release().
01149                       If release_targets is not specified, then it queries all
01150                       known targets
01151     """
01152     unique_supported_toolchains = []
01153 
01154     if not release_targets:
01155         for target in TARGET_NAMES:
01156             for toolchain in TARGET_MAP[target].supported_toolchains:
01157                 if toolchain not in unique_supported_toolchains:
01158                     unique_supported_toolchains.append(toolchain)
01159     else:
01160         for target in release_targets:
01161             for toolchain in target[1]:
01162                 if toolchain not in unique_supported_toolchains:
01163                     unique_supported_toolchains.append(toolchain)
01164 
01165     if "ARM" in unique_supported_toolchains:
01166         unique_supported_toolchains.append("ARMC6")
01167 
01168     return unique_supported_toolchains
01169 
01170 def mcu_toolchain_list (release_version='5'):
01171     """  Shows list of toolchains
01172 
01173     """
01174 
01175     if isinstance(release_version, basestring):
01176         # Force release_version to lowercase if it is a string
01177         release_version = release_version.lower()
01178     else:
01179         # Otherwise default to printing all known targets and toolchains
01180         release_version = 'all'
01181 
01182 
01183     version_release_targets = {}
01184     version_release_target_names = {}
01185 
01186     for version in RELEASE_VERSIONS:
01187         version_release_targets[version] = get_mbed_official_release(version)
01188         version_release_target_names[version] = [x[0] for x in
01189                                                  version_release_targets[
01190                                                      version]]
01191 
01192     if release_version in RELEASE_VERSIONS:
01193         release_targets = version_release_targets[release_version]
01194     else:
01195         release_targets = None
01196 
01197     unique_supported_toolchains = get_unique_supported_toolchains(
01198         release_targets)
01199     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01200     return "\n".join(columns)
01201 
01202 
01203 def mcu_target_list (release_version='5'):
01204     """  Shows target list
01205 
01206     """
01207 
01208     if isinstance(release_version, basestring):
01209         # Force release_version to lowercase if it is a string
01210         release_version = release_version.lower()
01211     else:
01212         # Otherwise default to printing all known targets and toolchains
01213         release_version = 'all'
01214 
01215 
01216     version_release_targets = {}
01217     version_release_target_names = {}
01218 
01219     for version in RELEASE_VERSIONS:
01220         version_release_targets[version] = get_mbed_official_release(version)
01221         version_release_target_names[version] = [x[0] for x in
01222                                                  version_release_targets[
01223                                                      version]]
01224 
01225     if release_version in RELEASE_VERSIONS:
01226         release_targets = version_release_targets[release_version]
01227     else:
01228         release_targets = None
01229 
01230     target_names = []
01231 
01232     if release_targets:
01233         target_names = [x[0] for x in release_targets]
01234     else:
01235         target_names = TARGET_NAMES
01236 
01237     return "\n".join(target_names)
01238 
01239 
01240 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01241                          release_version='5'):
01242     """  Shows target map using prettytable
01243 
01244     Keyword arguments:
01245     verbose_html - emit html instead of a simple table
01246     platform_filter - remove results that match the string
01247     release_version - get the matrix for this major version number
01248     """
01249     # Only use it in this function so building works without extra modules
01250     from prettytable import PrettyTable
01251 
01252     if isinstance(release_version, basestring):
01253         # Force release_version to lowercase if it is a string
01254         release_version = release_version.lower()
01255     else:
01256         # Otherwise default to printing all known targets and toolchains
01257         release_version = 'all'
01258 
01259 
01260     version_release_targets = {}
01261     version_release_target_names = {}
01262 
01263     for version in RELEASE_VERSIONS:
01264         version_release_targets[version] = get_mbed_official_release(version)
01265         version_release_target_names[version] = [x[0] for x in
01266                                                  version_release_targets[
01267                                                      version]]
01268 
01269     if release_version in RELEASE_VERSIONS:
01270         release_targets = version_release_targets[release_version]
01271     else:
01272         release_targets = None
01273 
01274     unique_supported_toolchains = get_unique_supported_toolchains(
01275         release_targets)
01276     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01277 
01278     # All tests status table print
01279     columns = prepend_columns + unique_supported_toolchains
01280     table_printer = PrettyTable(columns)
01281     # Align table
01282     for col in columns:
01283         table_printer.align[col] = "c"
01284     table_printer.align["Target"] = "l"
01285 
01286     perm_counter = 0
01287     target_counter = 0
01288 
01289     target_names = []
01290 
01291     if release_targets:
01292         target_names = [x[0] for x in release_targets]
01293     else:
01294         target_names = TARGET_NAMES
01295 
01296     for target in sorted(target_names):
01297         if platform_filter is not None:
01298             # FIlter out platforms using regex
01299             if re.search(platform_filter, target) is None:
01300                 continue
01301         target_counter += 1
01302 
01303         row = [target]  # First column is platform name
01304 
01305         for version in RELEASE_VERSIONS:
01306             if target in version_release_target_names[version]:
01307                 text = "Supported"
01308             else:
01309                 text = "-"
01310             row.append(text)
01311 
01312         for unique_toolchain in unique_supported_toolchains:
01313             if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
01314                 (unique_toolchain == "ARMC6" and
01315                  "ARM" in TARGET_MAP[target].supported_toolchains)):
01316                 text = "Supported"
01317                 perm_counter += 1
01318             else:
01319                 text = "-"
01320 
01321             row.append(text)
01322         table_printer.add_row(row)
01323 
01324     result = table_printer.get_html_string() if verbose_html \
01325              else table_printer.get_string()
01326     result += "\n"
01327     result += "Supported targets: %d\n"% (target_counter)
01328     if target_counter == 1:
01329         result += "Supported toolchains: %d"% (perm_counter)
01330     return result
01331 
01332 
01333 def get_target_supported_toolchains (target):
01334     """ Returns target supported toolchains list
01335 
01336     Positional arguments:
01337     target - the target to get the supported toolchains of
01338     """
01339     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01340         else None
01341 
01342 
01343 def print_build_results (result_list, build_name):
01344     """ Generate result string for build results
01345 
01346     Positional arguments:
01347     result_list - the list of results to print
01348     build_name - the name of the build we are printing result for
01349     """
01350     result = ""
01351     if len(result_list) > 0:
01352         result += build_name + "\n"
01353         result += "\n".join(["  * %s" % f for f in result_list])
01354         result += "\n"
01355     return result
01356 
01357 def print_build_memory_usage (report):
01358     """ Generate result table with memory usage values for build results
01359     Aggregates (puts together) reports obtained from self.get_memory_summary()
01360 
01361     Positional arguments:
01362     report - Report generated during build procedure.
01363     """
01364     from prettytable import PrettyTable
01365     columns_text = ['name', 'target', 'toolchain']
01366     columns_int = ['static_ram', 'total_flash']
01367     table = PrettyTable(columns_text + columns_int)
01368 
01369     for col in columns_text:
01370         table.align[col] = 'l'
01371 
01372     for col in columns_int:
01373         table.align[col] = 'r'
01374 
01375     for target in report:
01376         for toolchain in report[target]:
01377             for name in report[target][toolchain]:
01378                 for dlist in report[target][toolchain][name]:
01379                     for dlistelem in dlist:
01380                         # Get 'memory_usage' record and build table with
01381                         # statistics
01382                         record = dlist[dlistelem]
01383                         if 'memory_usage' in record and record['memory_usage']:
01384                             # Note that summary should be in the last record of
01385                             # 'memory_usage' section. This is why we are
01386                             # grabbing last "[-1]" record.
01387                             row = [
01388                                 record['description'],
01389                                 record['target_name'],
01390                                 record['toolchain_name'],
01391                                 record['memory_usage'][-1]['summary'][
01392                                     'static_ram'],
01393                                 record['memory_usage'][-1]['summary'][
01394                                     'total_flash'],
01395                             ]
01396                             table.add_row(row)
01397 
01398     result = "Memory map breakdown for built projects (values in Bytes):\n"
01399     result += table.get_string(sortby='name')
01400     return result
01401 
01402 def write_build_report (build_report, template_filename, filename):
01403     """Write a build report to disk using a template file
01404 
01405     Positional arguments:
01406     build_report - a report generated by the build system
01407     template_filename - a file that contains the template for the style of build
01408                         report
01409     filename - the location on disk to write the file to
01410     """
01411     build_report_failing = []
01412     build_report_passing = []
01413 
01414     for report in build_report:
01415         if len(report["failing"]) > 0:
01416             build_report_failing.append(report)
01417         else:
01418             build_report_passing.append(report)
01419 
01420     env = Environment(extensions=['jinja2.ext.with_'])
01421     env.loader = FileSystemLoader('ci_templates')
01422     template = env.get_template(template_filename)
01423 
01424     with open(filename, 'w+') as placeholder:
01425         placeholder.write(template.render(
01426             failing_builds=build_report_failing,
01427             passing_builds=build_report_passing))
01428 
01429 
01430 def merge_build_data(filename, toolchain_report, app_type):
01431     path_to_file = dirname(abspath(filename))
01432     try:
01433         build_data = load(open(filename))
01434     except (IOError, ValueError):
01435         build_data = {'builds': []}
01436     for tgt in toolchain_report.values():
01437         for tc in tgt.values():
01438             for project in tc.values():
01439                 for build in project:
01440                     try:
01441                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01442                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01443                     except KeyError:
01444                         pass
01445                     if 'type' not in build[0]:
01446                         build[0]['type'] = app_type
01447                     build_data['builds'].append(build[0])
01448     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))