Knight KE / Mbed OS Game_Master
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .targets import TARGET_NAMES, TARGET_MAP
00046 from .libraries import Library
00047 from .toolchains import TOOLCHAIN_CLASSES
00048 from .config import Config
00049 
00050 RELEASE_VERSIONS = ['2', '5']
00051 
00052 def prep_report (report, target_name, toolchain_name, id_name):
00053     """Setup report keys
00054 
00055     Positional arguments:
00056     report - the report to fill
00057     target_name - the target being used
00058     toolchain_name - the toolchain being used
00059     id_name - the name of the executable or library being built
00060     """
00061     if not target_name in report:
00062         report[target_name] = {}
00063 
00064     if not toolchain_name in report[target_name]:
00065         report[target_name][toolchain_name] = {}
00066 
00067     if not id_name in report[target_name][toolchain_name]:
00068         report[target_name][toolchain_name][id_name] = []
00069 
00070 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00071     """Setup test properties
00072 
00073     Positional arguments:
00074     properties - the dict to fill
00075     target_name - the target the test is targeting
00076     toolchain_name - the toolchain that will compile the test
00077     vendor_label - the vendor
00078     """
00079     if not target_name in properties:
00080         properties[target_name] = {}
00081 
00082     if not toolchain_name in properties[target_name]:
00083         properties[target_name][toolchain_name] = {}
00084 
00085     properties[target_name][toolchain_name]["target"] = target_name
00086     properties[target_name][toolchain_name]["vendor"] = vendor_label
00087     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00088 
00089 def create_result (target_name, toolchain_name, id_name, description):
00090     """Create a result dictionary
00091 
00092     Positional arguments:
00093     target_name - the target being built for
00094     toolchain_name - the toolchain doing the building
00095     id_name - the name of the executable or library being built
00096     description - a human readable description of what's going on
00097     """
00098     cur_result = {}
00099     cur_result["target_name"] = target_name
00100     cur_result["toolchain_name"] = toolchain_name
00101     cur_result["id"] = id_name
00102     cur_result["description"] = description
00103     cur_result["elapsed_time"] = 0
00104     cur_result["output"] = ""
00105 
00106     return cur_result
00107 
00108 def add_result_to_report (report, result):
00109     """Add a single result to a report dictionary
00110 
00111     Positional arguments:
00112     report - the report to append to
00113     result - the result to append
00114     """
00115     result["date"] = datetime.datetime.utcnow().isoformat()
00116     result["uuid"] = str(uuid.uuid1())
00117     target = result["target_name"]
00118     toolchain = result["toolchain_name"]
00119     id_name = result['id']
00120     result_wrap = {0: result}
00121     report[target][toolchain][id_name].append(result_wrap)
00122 
00123 def get_config (src_paths, target, toolchain_name, app_config=None):
00124     """Get the configuration object for a target-toolchain combination
00125 
00126     Positional arguments:
00127     src_paths - paths to scan for the configuration files
00128     target - the device we are building for
00129     toolchain_name - the string that identifies the build tools
00130     """
00131     # Convert src_paths to a list if needed
00132     if not isinstance(src_paths, list):
00133         src_paths = [src_paths]
00134 
00135     # Pass all params to the unified prepare_resources()
00136     toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
00137                                   app_config=app_config)
00138 
00139     # Scan src_path for config files
00140     resources = toolchain.scan_resources(src_paths[0])
00141     for path in src_paths[1:]:
00142         resources.add(toolchain.scan_resources(path))
00143 
00144     # Update configuration files until added features creates no changes
00145     prev_features = set()
00146     while True:
00147         # Update the configuration with any .json files found while scanning
00148         toolchain.config.add_config_files(resources.json_files)
00149 
00150         # Add features while we find new ones
00151         features = set(toolchain.config.get_features())
00152         if features == prev_features:
00153             break
00154 
00155         for feature in features:
00156             if feature in resources.features:
00157                 resources += resources.features[feature]
00158 
00159         prev_features = features
00160     toolchain.config.validate_config()
00161     if toolchain.config.has_regions:
00162         _ = list(toolchain.config.regions)
00163 
00164     cfg, macros = toolchain.config.get_config_data()
00165     features = toolchain.config.get_features()
00166     return cfg, macros, features
00167 
00168 def is_official_target (target_name, version):
00169     """ Returns True, None if a target is part of the official release for the
00170     given version. Return False, 'reason' if a target is not part of the
00171     official release for the given version.
00172 
00173     Positional arguments:
00174     target_name - Name if the target (ex. 'K64F')
00175     version - The release version string. Should be a string contained within
00176               RELEASE_VERSIONS
00177     """
00178 
00179     result = True
00180     reason = None
00181     target = TARGET_MAP[target_name]
00182 
00183     if hasattr(target, 'release_versions') \
00184        and version in target.release_versions:
00185         if version == '2':
00186             # For version 2, either ARM or uARM toolchain support is required
00187             required_toolchains = set(['ARM', 'uARM'])
00188 
00189             if not len(required_toolchains.intersection(
00190                     set(target.supported_toolchains))) > 0:
00191                 result = False
00192                 reason = ("Target '%s' must support " % target.name) + \
00193                     ("one of the folowing toolchains to be included in the") + \
00194                     ((" mbed 2.0 official release: %s" + linesep) %
00195                      ", ".join(required_toolchains)) + \
00196                     ("Currently it is only configured to support the ") + \
00197                     ("following toolchains: %s" %
00198                      ", ".join(target.supported_toolchains))
00199 
00200         elif version == '5':
00201             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00202             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00203             required_toolchains_sorted = list(required_toolchains)
00204             required_toolchains_sorted.sort()
00205             supported_toolchains = set(target.supported_toolchains)
00206             supported_toolchains_sorted = list(supported_toolchains)
00207             supported_toolchains_sorted.sort()
00208 
00209             if not required_toolchains.issubset(supported_toolchains):
00210                 result = False
00211                 reason = ("Target '%s' must support " % target.name) + \
00212                     ("ALL of the folowing toolchains to be included in the") + \
00213                     ((" mbed OS 5.0 official release: %s" + linesep) %
00214                      ", ".join(required_toolchains_sorted)) + \
00215                     ("Currently it is only configured to support the ") + \
00216                     ("following toolchains: %s" %
00217                      ", ".join(supported_toolchains_sorted))
00218 
00219             elif not target.default_lib == 'std':
00220                 result = False
00221                 reason = ("Target '%s' must set the " % target.name) + \
00222                     ("'default_lib' to 'std' to be included in the ") + \
00223                     ("mbed OS 5.0 official release." + linesep) + \
00224                     ("Currently it is set to '%s'" % target.default_lib)
00225 
00226         else:
00227             result = False
00228             reason = ("Target '%s' has set an invalid release version of '%s'" %
00229                       version) + \
00230                 ("Please choose from the following release versions: %s" %
00231                  ', '.join(RELEASE_VERSIONS))
00232 
00233     else:
00234         result = False
00235         if not hasattr(target, 'release_versions'):
00236             reason = "Target '%s' " % target.name
00237             reason += "does not have the 'release_versions' key set"
00238         elif not version in target.release_versions:
00239             reason = "Target '%s' does not contain the version '%s' " % \
00240                      (target.name, version)
00241             reason += "in its 'release_versions' key"
00242 
00243     return result, reason
00244 
00245 def transform_release_toolchains (toolchains, version):
00246     """ Given a list of toolchains and a release version, return a list of
00247     only the supported toolchains for that release
00248 
00249     Positional arguments:
00250     toolchains - The list of toolchains
00251     version - The release version string. Should be a string contained within
00252               RELEASE_VERSIONS
00253     """
00254     if version == '5':
00255         return ['ARM', 'GCC_ARM', 'IAR']
00256     else:
00257         return toolchains
00258 
00259 
00260 def get_mbed_official_release (version):
00261     """ Given a release version string, return a tuple that contains a target
00262     and the supported toolchains for that release.
00263     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00264                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00265 
00266     Positional arguments:
00267     version - The version string. Should be a string contained within
00268               RELEASE_VERSIONS
00269     """
00270 
00271     mbed_official_release = (
00272         tuple(
00273             tuple(
00274                 [
00275                     TARGET_MAP[target].name,
00276                     tuple(transform_release_toolchains(
00277                         TARGET_MAP[target].supported_toolchains, version))
00278                 ]
00279             ) for target in TARGET_NAMES \
00280             if (hasattr(TARGET_MAP[target], 'release_versions')
00281                 and version in TARGET_MAP[target].release_versions)
00282         )
00283     )
00284 
00285     for target in mbed_official_release:
00286         is_official, reason = is_official_target(target[0], version)
00287 
00288         if not is_official:
00289             raise InvalidReleaseTargetException(reason)
00290 
00291     return mbed_official_release
00292 
00293 ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
00294 def target_supports_toolchain(target, toolchain_name):
00295     if toolchain_name in ARM_COMPILERS:
00296         return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
00297     else:
00298         return toolchain_name in target.supported_toolchains
00299 
00300 
00301 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00302                       macros=None, clean=False, jobs=1,
00303                       notify=None, config=None, app_config=None,
00304                       build_profile=None, ignore=None):
00305     """ Prepares resource related objects - toolchain, target, config
00306 
00307     Positional arguments:
00308     src_paths - the paths to source directories
00309     target - ['LPC1768', 'LPC11U24', etc.]
00310     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00311 
00312     Keyword arguments:
00313     macros - additional macros
00314     clean - Rebuild everything if True
00315     jobs - how many compilers we can run at once
00316     notify - Notify function for logs
00317     config - a Config object to use instead of creating one
00318     app_config - location of a chosen mbed_app.json file
00319     build_profile - a list of mergeable build profiles
00320     ignore - list of paths to add to mbedignore
00321     """
00322 
00323     # We need to remove all paths which are repeated to avoid
00324     # multiple compilations and linking with the same objects
00325     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00326 
00327     # If the configuration object was not yet created, create it now
00328     config = config or Config(target, src_paths, app_config=app_config)
00329     target = config.target
00330     if not target_supports_toolchain(target, toolchain_name):
00331         raise NotSupportedException(
00332             "Target {} is not supported by toolchain {}".format(
00333                 target.name, toolchain_name))
00334 
00335     try:
00336         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00337     except KeyError:
00338         raise KeyError("Toolchain %s not supported" % toolchain_name)
00339 
00340     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00341     for contents in build_profile or []:
00342         for key in profile:
00343             profile[key].extend(contents[toolchain_name].get(key, []))
00344 
00345     toolchain = cur_tc(
00346         target, notify, macros, build_dir=build_dir, build_profile=profile)
00347 
00348     toolchain.config = config
00349     toolchain.jobs = jobs
00350     toolchain.build_all = clean
00351 
00352     if ignore:
00353         toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore)
00354 
00355     return toolchain
00356 
00357 def _printihex(ihex):
00358     import pprint
00359     pprint.PrettyPrinter().pprint(ihex.todict())
00360 
00361 def _real_region_size(region):
00362     try:
00363         part = intelhex_offset(region.filename, offset=region.start)
00364         return (part.maxaddr() - part.minaddr()) + 1
00365     except AttributeError:
00366         return region.size
00367 
00368 
00369 def _fill_header(region_list, current_region):
00370     """Fill an application header region
00371 
00372     This is done it three steps:
00373      * Fill the whole region with zeros
00374      * Fill const, timestamp and size entries with their data
00375      * Fill the digests using this header as the header region
00376     """
00377     region_dict = {r.name: r for r in region_list}
00378     header = IntelHex()
00379     header.puts(current_region.start, b'\x00' * current_region.size)
00380     start = current_region.start
00381     for member in current_region.filename:
00382         _, type, subtype, data = member
00383         member_size = Config.header_member_size(member)
00384         if type == "const":
00385             fmt = {
00386                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00387                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00388             }[subtype]
00389             header.puts(start, struct.pack(fmt, integer(data, 0)))
00390         elif type == "timestamp":
00391             fmt = {"32le": "<L", "64le": "<Q",
00392                    "32be": ">L", "64be": ">Q"}[subtype]
00393             header.puts(start, struct.pack(fmt, time()))
00394         elif type == "size":
00395             fmt = {"32le": "<L", "64le": "<Q",
00396                    "32be": ">L", "64be": ">Q"}[subtype]
00397             size = sum(_real_region_size(region_dict[r]) for r in data)
00398             header.puts(start, struct.pack(fmt, size))
00399         elif type  == "digest":
00400             if data == "header":
00401                 ih = header[:start]
00402             else:
00403                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00404             if subtype.startswith("CRCITT32"):
00405                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00406                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00407             elif subtype.startswith("SHA"):
00408                 if subtype == "SHA256":
00409                     hash = hashlib.sha256()
00410                 elif subtype == "SHA512":
00411                     hash = hashlib.sha512()
00412                 hash.update(ih.tobinarray())
00413                 header.puts(start, hash.digest())
00414         start += Config.header_member_size(member)
00415     return header
00416 
00417 def merge_region_list (region_list, destination, notify, padding=b'\xFF'):
00418     """Merge the region_list into a single image
00419 
00420     Positional Arguments:
00421     region_list - list of regions, which should contain filenames
00422     destination - file name to write all regions to
00423     padding - bytes to fill gapps with
00424     """
00425     merged = IntelHex()
00426     _, format = splitext(destination)
00427 
00428     notify.info("Merging Regions")
00429 
00430     for region in region_list:
00431         if region.active and not region.filename:
00432             raise ToolException("Active region has no contents: No file found.")
00433         if isinstance(region.filename, list):
00434             header_basename, _ = splitext(destination)
00435             header_filename = header_basename + "_header.hex"
00436             _fill_header(region_list, region).tofile(header_filename, format='hex')
00437             region = region._replace(filename=header_filename)
00438         if region.filename:
00439             notify.info("  Filling region %s with %s" % (region.name, region.filename))
00440             part = intelhex_offset(region.filename, offset=region.start)
00441             part.start_addr = None
00442             part_size = (part.maxaddr() - part.minaddr()) + 1
00443             if part_size > region.size:
00444                 raise ToolException("Contents of region %s does not fit"
00445                                     % region.name)
00446             merged.merge(part)
00447             pad_size = region.size - part_size
00448             if pad_size > 0 and region != region_list[-1]:
00449                 notify.info("  Padding region %s with 0x%x bytes" %
00450                             (region.name, pad_size))
00451                 if format is ".hex":
00452                     """The offset will be in the hex file generated when we're done,
00453                     so we can skip padding here"""
00454                 else:
00455                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00456 
00457     if not exists(dirname(destination)):
00458         makedirs(dirname(destination))
00459     notify.info("Space used after regions merged: 0x%x" %
00460                 (merged.maxaddr() - merged.minaddr() + 1))
00461     merged.tofile(destination, format=format.strip("."))
00462 
00463 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00464                    inc_dirs=None, base_path=None, collect_ignores=False):
00465     """ Scan resources using initialized toolcain
00466 
00467     Positional arguments
00468     src_paths - the paths to source directories
00469     toolchain - valid toolchain object
00470     dependencies_paths - dependency paths that we should scan for include dirs
00471     inc_dirs - additional include directories which should be added to
00472                the scanner resources
00473     """
00474 
00475     # Scan src_path
00476     resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
00477                                          collect_ignores=collect_ignores)
00478     for path in src_paths[1:]:
00479         resources.add(toolchain.scan_resources(path, base_path=base_path,
00480                                                collect_ignores=collect_ignores))
00481 
00482     # Scan dependency paths for include dirs
00483     if dependencies_paths is not None:
00484         for path in dependencies_paths:
00485             lib_resources = toolchain.scan_resources(path)
00486             resources.inc_dirs.extend(lib_resources.inc_dirs)
00487 
00488     # Add additional include directories if passed
00489     if inc_dirs:
00490         if isinstance(inc_dirs, list):
00491             resources.inc_dirs.extend(inc_dirs)
00492         else:
00493             resources.inc_dirs.append(inc_dirs)
00494 
00495     # Load resources into the config system which might expand/modify resources
00496     # based on config data
00497     resources = toolchain.config.load_resources(resources)
00498 
00499     # Set the toolchain's configuration data
00500     toolchain.set_config_data(toolchain.config.get_config_data())
00501 
00502     return resources
00503 
00504 def build_project (src_paths, build_path, target, toolchain_name,
00505                   libraries_paths=None, linker_script=None, clean=False,
00506                   notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
00507                   report=None, properties=None, project_id=None,
00508                   project_description=None, config=None,
00509                   app_config=None, build_profile=None, stats_depth=None, ignore=None):
00510     """ Build a project. A project may be a test or a user program.
00511 
00512     Positional arguments:
00513     src_paths - a path or list of paths that contain all files needed to build
00514                 the project
00515     build_path - the directory where all of the object files will be placed
00516     target - the MCU or board that the project will compile for
00517     toolchain_name - the name of the build tools
00518 
00519     Keyword arguments:
00520     libraries_paths - The location of libraries to include when linking
00521     linker_script - the file that drives the linker to do it's job
00522     clean - Rebuild everything if True
00523     notify - Notify function for logs
00524     name - the name of the project
00525     macros - additional macros
00526     inc_dirs - additional directories where include files may be found
00527     jobs - how many compilers we can run at once
00528     report - a dict where a result may be appended
00529     properties - UUUUHHHHH beats me
00530     project_id - the name put in the report
00531     project_description - the human-readable version of what this thing does
00532     config - a Config object to use instead of creating one
00533     app_config - location of a chosen mbed_app.json file
00534     build_profile - a dict of flags that will be passed to the compiler
00535     stats_depth - depth level for memap to display file/dirs
00536     ignore - list of paths to add to mbedignore
00537     """
00538 
00539     # Convert src_path to a list if needed
00540     if not isinstance(src_paths, list):
00541         src_paths = [src_paths]
00542     # Extend src_paths wiht libraries_paths
00543     if libraries_paths is not None:
00544         src_paths.extend(libraries_paths)
00545         inc_dirs.extend(map(dirname, libraries_paths))
00546 
00547     if clean and exists(build_path):
00548         rmtree(build_path)
00549     mkdir(build_path)
00550 
00551     toolchain = prepare_toolchain(
00552         src_paths, build_path, target, toolchain_name, macros=macros,
00553         clean=clean, jobs=jobs, notify=notify, config=config,
00554         app_config=app_config, build_profile=build_profile, ignore=ignore)
00555 
00556     # The first path will give the name to the library
00557     name = (name or toolchain.config.name or
00558             basename(normpath(abspath(src_paths[0]))))
00559     notify.info("Building project %s (%s, %s)" %
00560                 (name, toolchain.target.name, toolchain_name))
00561 
00562     # Initialize reporting
00563     if report != None:
00564         start = time()
00565         # If project_id is specified, use that over the default name
00566         id_name = project_id.upper() if project_id else name.upper()
00567         description = project_description if project_description else name
00568         vendor_label = toolchain.target.extra_labels[0]
00569         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00570         cur_result = create_result(toolchain.target.name, toolchain_name,
00571                                    id_name, description)
00572         if properties != None:
00573             prep_properties(properties, toolchain.target.name, toolchain_name,
00574                             vendor_label)
00575 
00576     try:
00577         # Call unified scan_resources
00578         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00579 
00580         # Change linker script if specified
00581         if linker_script is not None:
00582             resources.linker_script = linker_script
00583 
00584         # Compile Sources
00585         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00586         resources.objects.extend(objects)
00587 
00588         # Link Program
00589         if toolchain.config.has_regions:
00590             res, _ = toolchain.link_program(resources, build_path, name + "_application")
00591             region_list = list(toolchain.config.regions)
00592             region_list = [r._replace(filename=res) if r.active else r
00593                            for r in region_list]
00594             res = "%s.%s" % (join(build_path, name),
00595                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00596             merge_region_list(region_list, res, notify)
00597         else:
00598             res, _ = toolchain.link_program(resources, build_path, name)
00599 
00600         memap_instance = getattr(toolchain, 'memap_instance', None)
00601         memap_table = ''
00602         if memap_instance:
00603             # Write output to stdout in text (pretty table) format
00604             memap_table = memap_instance.generate_output('table', stats_depth)
00605             notify.info(memap_table)
00606 
00607             # Write output to file in JSON format
00608             map_out = join(build_path, name + "_map.json")
00609             memap_instance.generate_output('json', stats_depth, map_out)
00610 
00611             # Write output to file in CSV format for the CI
00612             map_csv = join(build_path, name + "_map.csv")
00613             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00614 
00615             map_html = join(build_path, name + "_map.html")
00616             memap_instance.generate_output('html', stats_depth, map_html)
00617 
00618         resources.detect_duplicates(toolchain)
00619 
00620         if report != None:
00621             end = time()
00622             cur_result["elapsed_time"] = end - start
00623             cur_result["result"] = "OK"
00624             cur_result["memory_usage"] = (memap_instance.mem_report
00625                                           if memap_instance is not None else None)
00626             cur_result["bin"] = res
00627             cur_result["elf"] = splitext(res)[0] + ".elf"
00628             cur_result.update(toolchain.report)
00629 
00630             add_result_to_report(report, cur_result)
00631 
00632         return res
00633 
00634     except Exception as exc:
00635         if report != None:
00636             end = time()
00637 
00638             if isinstance(exc, NotSupportedException):
00639                 cur_result["result"] = "NOT_SUPPORTED"
00640             else:
00641                 cur_result["result"] = "FAIL"
00642 
00643             cur_result["elapsed_time"] = end - start
00644 
00645             add_result_to_report(report, cur_result)
00646         # Let Exception propagate
00647         raise
00648 
00649 def build_library (src_paths, build_path, target, toolchain_name,
00650                   dependencies_paths=None, name=None, clean=False,
00651                   archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
00652                   report=None, properties=None, project_id=None,
00653                   remove_config_header_file=False, app_config=None,
00654                   build_profile=None, ignore=None):
00655     """ Build a library
00656 
00657     Positional arguments:
00658     src_paths - a path or list of paths that contain all files needed to build
00659                 the library
00660     build_path - the directory where all of the object files will be placed
00661     target - the MCU or board that the project will compile for
00662     toolchain_name - the name of the build tools
00663 
00664     Keyword arguments:
00665     dependencies_paths - The location of libraries to include when linking
00666     name - the name of the library
00667     clean - Rebuild everything if True
00668     archive - whether the library will create an archive file
00669     notify - Notify function for logs
00670     macros - additional macros
00671     inc_dirs - additional directories where include files may be found
00672     jobs - how many compilers we can run at once
00673     report - a dict where a result may be appended
00674     properties - UUUUHHHHH beats me
00675     project_id - the name that goes in the report
00676     remove_config_header_file - delete config header file when done building
00677     app_config - location of a chosen mbed_app.json file
00678     build_profile - a dict of flags that will be passed to the compiler
00679     ignore - list of paths to add to mbedignore
00680     """
00681 
00682     # Convert src_path to a list if needed
00683     if not isinstance(src_paths, list):
00684         src_paths = [src_paths]
00685 
00686     # Build path
00687     if archive:
00688         # Use temp path when building archive
00689         tmp_path = join(build_path, '.temp')
00690         mkdir(tmp_path)
00691     else:
00692         tmp_path = build_path
00693 
00694     # Clean the build directory
00695     if clean and exists(tmp_path):
00696         rmtree(tmp_path)
00697     mkdir(tmp_path)
00698 
00699     # Pass all params to the unified prepare_toolchain()
00700     toolchain = prepare_toolchain(
00701         src_paths, build_path, target, toolchain_name, macros=macros,
00702         clean=clean, jobs=jobs, notify=notify, app_config=app_config,
00703         build_profile=build_profile, ignore=ignore)
00704 
00705     # The first path will give the name to the library
00706     if name is None:
00707         name = basename(normpath(abspath(src_paths[0])))
00708     notify.info("Building library %s (%s, %s)" %
00709                    (name, toolchain.target.name, toolchain_name))
00710 
00711     # Initialize reporting
00712     if report != None:
00713         start = time()
00714         # If project_id is specified, use that over the default name
00715         id_name = project_id.upper() if project_id else name.upper()
00716         description = name
00717         vendor_label = toolchain.target.extra_labels[0]
00718         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00719         cur_result = create_result(toolchain.target.name, toolchain_name,
00720                                    id_name, description)
00721         cur_result['type'] = 'library'
00722         if properties != None:
00723             prep_properties(properties, toolchain.target.name, toolchain_name,
00724                             vendor_label)
00725 
00726     for src_path in src_paths:
00727         if not exists(src_path):
00728             error_msg = "The library source folder does not exist: %s", src_path
00729             if report != None:
00730                 cur_result["output"] = error_msg
00731                 cur_result["result"] = "FAIL"
00732                 add_result_to_report(report, cur_result)
00733             raise Exception(error_msg)
00734 
00735     try:
00736         # Call unified scan_resources
00737         resources = scan_resources(src_paths, toolchain,
00738                                    dependencies_paths=dependencies_paths,
00739                                    inc_dirs=inc_dirs)
00740 
00741 
00742         # Copy headers, objects and static libraries - all files needed for
00743         # static lib
00744         toolchain.copy_files(resources.headers, build_path, resources=resources)
00745         toolchain.copy_files(resources.objects, build_path, resources=resources)
00746         toolchain.copy_files(resources.libraries, build_path,
00747                              resources=resources)
00748         toolchain.copy_files(resources.json_files, build_path,
00749                              resources=resources)
00750         if resources.linker_script:
00751             toolchain.copy_files(resources.linker_script, build_path,
00752                                  resources=resources)
00753 
00754         if resources.hex_files:
00755             toolchain.copy_files(resources.hex_files, build_path,
00756                                  resources=resources)
00757 
00758         # Compile Sources
00759         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00760         resources.objects.extend(objects)
00761 
00762         if archive:
00763             toolchain.build_library(objects, build_path, name)
00764 
00765         if remove_config_header_file:
00766             config_header_path = toolchain.get_config_header()
00767             if config_header_path:
00768                 remove(config_header_path)
00769 
00770         if report != None:
00771             end = time()
00772             cur_result["elapsed_time"] = end - start
00773             cur_result["result"] = "OK"
00774 
00775 
00776             add_result_to_report(report, cur_result)
00777         return True
00778 
00779     except Exception as exc:
00780         if report != None:
00781             end = time()
00782 
00783             if isinstance(exc, ToolException):
00784                 cur_result["result"] = "FAIL"
00785             elif isinstance(exc, NotSupportedException):
00786                 cur_result["result"] = "NOT_SUPPORTED"
00787 
00788             cur_result["elapsed_time"] = end - start
00789 
00790             add_result_to_report(report, cur_result)
00791 
00792         # Let Exception propagate
00793         raise
00794 
00795 ######################
00796 ### Legacy methods ###
00797 ######################
00798 
00799 def mbed2_obj_path(target_name, toolchain_name):
00800     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00801     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00802 
00803 def build_lib (lib_id, target, toolchain_name, clean=False, macros=None,
00804               notify=None, jobs=1, report=None, properties=None,
00805               build_profile=None, ignore=None):
00806     """ Legacy method for building mbed libraries
00807 
00808     Positional arguments:
00809     lib_id - the library's unique identifier
00810     target - the MCU or board that the project will compile for
00811     toolchain_name - the name of the build tools
00812 
00813     Keyword arguments:
00814     clean - Rebuild everything if True
00815     macros - additional macros
00816     notify - Notify function for logs
00817     jobs - how many compilers we can run at once
00818     report - a dict where a result may be appended
00819     properties - UUUUHHHHH beats me
00820     build_profile - a dict of flags that will be passed to the compiler
00821     ignore - list of paths to add to mbedignore
00822     """
00823     lib = Library(lib_id)
00824     if not lib.is_supported(target, toolchain_name):
00825         print('Library "%s" is not yet supported on target %s with toolchain %s'
00826               % (lib_id, target.name, toolchain_name))
00827         return False
00828 
00829     # We need to combine macros from parameter list with macros from library
00830     # definition
00831     lib_macros = lib.macros if lib.macros else []
00832     if macros:
00833         macros.extend(lib_macros)
00834     else:
00835         macros = lib_macros
00836 
00837     src_paths = lib.source_dir
00838     build_path = lib.build_dir
00839     dependencies_paths = lib.dependencies
00840     inc_dirs = lib.inc_dirs
00841     inc_dirs_ext = lib.inc_dirs_ext
00842 
00843     if not isinstance(src_paths, list):
00844         src_paths = [src_paths]
00845 
00846     # The first path will give the name to the library
00847     name = basename(src_paths[0])
00848 
00849     if report != None:
00850         start = time()
00851         id_name = name.upper()
00852         description = name
00853         vendor_label = target.extra_labels[0]
00854         cur_result = None
00855         prep_report(report, target.name, toolchain_name, id_name)
00856         cur_result = create_result(target.name, toolchain_name, id_name,
00857                                    description)
00858 
00859         if properties != None:
00860             prep_properties(properties, target.name, toolchain_name,
00861                             vendor_label)
00862 
00863     for src_path in src_paths:
00864         if not exists(src_path):
00865             error_msg = "The library source folder does not exist: %s", src_path
00866 
00867             if report != None:
00868                 cur_result["output"] = error_msg
00869                 cur_result["result"] = "FAIL"
00870                 add_result_to_report(report, cur_result)
00871 
00872             raise Exception(error_msg)
00873 
00874     try:
00875         # Toolchain instance
00876         # Create the desired build directory structure
00877         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00878         mkdir(bin_path)
00879         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00880                                                             toolchain_name))
00881         mkdir(tmp_path)
00882 
00883         toolchain = prepare_toolchain(
00884             src_paths, tmp_path, target, toolchain_name, macros=macros,
00885             notify=notify, build_profile=build_profile, jobs=jobs, clean=clean,
00886             ignore=ignore)
00887 
00888         notify.info("Building library %s (%s, %s)" %
00889                        (name.upper(), target.name, toolchain_name))
00890 
00891         # Take into account the library configuration (MBED_CONFIG_FILE)
00892         config = toolchain.config
00893         config.add_config_files([MBED_CONFIG_FILE])
00894 
00895         # Scan Resources
00896         resources = []
00897         for src_path in src_paths:
00898             resources.append(toolchain.scan_resources(src_path))
00899 
00900         # Add extra include directories / files which are required by library
00901         # This files usually are not in the same directory as source files so
00902         # previous scan will not include them
00903         if inc_dirs_ext is not None:
00904             for inc_ext in inc_dirs_ext:
00905                 resources.append(toolchain.scan_resources(inc_ext))
00906 
00907         # Dependencies Include Paths
00908         dependencies_include_dir = []
00909         if dependencies_paths is not None:
00910             for path in dependencies_paths:
00911                 lib_resources = toolchain.scan_resources(path)
00912                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00913                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00914 
00915         if inc_dirs:
00916             dependencies_include_dir.extend(inc_dirs)
00917 
00918         # Add other discovered configuration data to the configuration object
00919         for res in resources:
00920             config.load_resources(res)
00921         toolchain.set_config_data(toolchain.config.get_config_data())
00922 
00923 
00924         # Copy Headers
00925         for resource in resources:
00926             toolchain.copy_files(resource.headers, build_path,
00927                                  resources=resource)
00928 
00929         dependencies_include_dir.extend(
00930             toolchain.scan_resources(build_path).inc_dirs)
00931 
00932         # Compile Sources
00933         objects = []
00934         for resource in resources:
00935             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00936 
00937         needed_update = toolchain.build_library(objects, bin_path, name)
00938 
00939         if report != None and needed_update:
00940             end = time()
00941             cur_result["elapsed_time"] = end - start
00942             cur_result["result"] = "OK"
00943 
00944             add_result_to_report(report, cur_result)
00945         return True
00946 
00947     except Exception:
00948         if report != None:
00949             end = time()
00950             cur_result["result"] = "FAIL"
00951             cur_result["elapsed_time"] = end - start
00952 
00953             add_result_to_report(report, cur_result)
00954 
00955         # Let Exception propagate
00956         raise
00957 
00958 # We do have unique legacy conventions about how we build and package the mbed
00959 # library
00960 def build_mbed_libs (target, toolchain_name, clean=False, macros=None,
00961                     notify=None, jobs=1, report=None, properties=None,
00962                     build_profile=None, ignore=None):
00963     """ Function returns True is library was built and false if building was
00964     skipped
00965 
00966     Positional arguments:
00967     target - the MCU or board that the project will compile for
00968     toolchain_name - the name of the build tools
00969 
00970     Keyword arguments:
00971     clean - Rebuild everything if True
00972     macros - additional macros
00973     notify - Notify function for logs
00974     jobs - how many compilers we can run at once
00975     report - a dict where a result may be appended
00976     properties - UUUUHHHHH beats me
00977     build_profile - a dict of flags that will be passed to the compiler
00978     ignore - list of paths to add to mbedignore
00979     """
00980 
00981     if report != None:
00982         start = time()
00983         id_name = "MBED"
00984         description = "mbed SDK"
00985         vendor_label = target.extra_labels[0]
00986         cur_result = None
00987         prep_report(report, target.name, toolchain_name, id_name)
00988         cur_result = create_result(target.name, toolchain_name, id_name,
00989                                    description)
00990 
00991         if properties != None:
00992             prep_properties(properties, target.name, toolchain_name,
00993                             vendor_label)
00994 
00995     # Check toolchain support
00996     if toolchain_name not in target.supported_toolchains:
00997         supported_toolchains_text = ", ".join(target.supported_toolchains)
00998         print('%s target is not yet supported by toolchain %s' %
00999               (target.name, toolchain_name))
01000         print('%s target supports %s toolchain%s' %
01001               (target.name, supported_toolchains_text, 's'
01002                if len(target.supported_toolchains) > 1 else ''))
01003 
01004         if report != None:
01005             cur_result["result"] = "SKIP"
01006             add_result_to_report(report, cur_result)
01007 
01008         return False
01009 
01010     try:
01011         # Source and Build Paths
01012         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01013         build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01014         mkdir(build_toolchain)
01015 
01016         # Toolchain
01017         tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
01018         mkdir(tmp_path)
01019 
01020         toolchain = prepare_toolchain(
01021             [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
01022             build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
01023 
01024         # Take into account the library configuration (MBED_CONFIG_FILE)
01025         config = toolchain.config
01026         config.add_config_files([MBED_CONFIG_FILE])
01027         toolchain.set_config_data(toolchain.config.get_config_data())
01028 
01029         # mbed
01030         notify.info("Building library %s (%s, %s)" %
01031                        ('MBED', target.name, toolchain_name))
01032 
01033         # Common Headers
01034         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01035         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01036 
01037         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01038                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01039                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01040             resources = toolchain.scan_resources(dir)
01041             toolchain.copy_files(resources.headers, dest)
01042             library_incdirs.append(dest)
01043 
01044         cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH)
01045         toolchain.copy_files(cmsis_implementation.headers, build_target)
01046         toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain)
01047         toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain)
01048 
01049         hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH)
01050         toolchain.copy_files(hal_implementation.headers +
01051                              hal_implementation.hex_files +
01052                              hal_implementation.libraries +
01053                              [MBED_CONFIG_FILE],
01054                              build_target, resources=hal_implementation)
01055         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
01056         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
01057         incdirs = toolchain.scan_resources(build_target).inc_dirs
01058         objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
01059                                             library_incdirs + incdirs + [tmp_path])
01060         toolchain.copy_files(objects, build_toolchain)
01061 
01062         # Common Sources
01063         mbed_resources = None
01064         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
01065             mbed_resources += toolchain.scan_resources(dir)
01066 
01067         objects = toolchain.compile_sources(mbed_resources,
01068                                             library_incdirs + incdirs)
01069 
01070         # A number of compiled files need to be copied as objects as opposed to
01071         # way the linker search for symbols in archives. These are:
01072         #   - mbed_retarget.o: to make sure that the C standard lib symbols get
01073         #                 overridden
01074         #   - mbed_board.o: mbed_die is weak
01075         #   - mbed_overrides.o: this contains platform overrides of various
01076         #                       weak SDK functions
01077         #   - mbed_main.o: this contains main redirection
01078         separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
01079                                             'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
01080 
01081         for obj in objects:
01082             for name in separate_names:
01083                 if obj.endswith(name):
01084                     separate_objects.append(obj)
01085 
01086         for obj in separate_objects:
01087             objects.remove(obj)
01088 
01089         toolchain.build_library(objects, build_toolchain, "mbed")
01090 
01091         for obj in separate_objects:
01092             toolchain.copy_files(obj, build_toolchain)
01093 
01094         if report != None:
01095             end = time()
01096             cur_result["elapsed_time"] = end - start
01097             cur_result["result"] = "OK"
01098 
01099             add_result_to_report(report, cur_result)
01100 
01101         return True
01102 
01103     except Exception as exc:
01104         if report != None:
01105             end = time()
01106             cur_result["result"] = "FAIL"
01107             cur_result["elapsed_time"] = end - start
01108 
01109             cur_result["output"] += str(exc)
01110 
01111             add_result_to_report(report, cur_result)
01112 
01113         # Let Exception propagate
01114         raise
01115 
01116 
01117 def get_unique_supported_toolchains (release_targets=None):
01118     """ Get list of all unique toolchains supported by targets
01119 
01120     Keyword arguments:
01121     release_targets - tuple structure returned from get_mbed_official_release().
01122                       If release_targets is not specified, then it queries all
01123                       known targets
01124     """
01125     unique_supported_toolchains = []
01126 
01127     if not release_targets:
01128         for target in TARGET_NAMES:
01129             for toolchain in TARGET_MAP[target].supported_toolchains:
01130                 if toolchain not in unique_supported_toolchains:
01131                     unique_supported_toolchains.append(toolchain)
01132     else:
01133         for target in release_targets:
01134             for toolchain in target[1]:
01135                 if toolchain not in unique_supported_toolchains:
01136                     unique_supported_toolchains.append(toolchain)
01137 
01138     if "ARM" in unique_supported_toolchains:
01139         unique_supported_toolchains.append("ARMC6")
01140 
01141     return unique_supported_toolchains
01142 
01143 def mcu_toolchain_list (release_version='5'):
01144     """  Shows list of toolchains
01145 
01146     """
01147 
01148     if isinstance(release_version, basestring):
01149         # Force release_version to lowercase if it is a string
01150         release_version = release_version.lower()
01151     else:
01152         # Otherwise default to printing all known targets and toolchains
01153         release_version = 'all'
01154 
01155 
01156     version_release_targets = {}
01157     version_release_target_names = {}
01158 
01159     for version in RELEASE_VERSIONS:
01160         version_release_targets[version] = get_mbed_official_release(version)
01161         version_release_target_names[version] = [x[0] for x in
01162                                                  version_release_targets[
01163                                                      version]]
01164 
01165     if release_version in RELEASE_VERSIONS:
01166         release_targets = version_release_targets[release_version]
01167     else:
01168         release_targets = None
01169 
01170     unique_supported_toolchains = get_unique_supported_toolchains(
01171         release_targets)
01172     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01173     return "\n".join(columns)
01174 
01175 
01176 def mcu_target_list (release_version='5'):
01177     """  Shows target list
01178 
01179     """
01180 
01181     if isinstance(release_version, basestring):
01182         # Force release_version to lowercase if it is a string
01183         release_version = release_version.lower()
01184     else:
01185         # Otherwise default to printing all known targets and toolchains
01186         release_version = 'all'
01187 
01188 
01189     version_release_targets = {}
01190     version_release_target_names = {}
01191 
01192     for version in RELEASE_VERSIONS:
01193         version_release_targets[version] = get_mbed_official_release(version)
01194         version_release_target_names[version] = [x[0] for x in
01195                                                  version_release_targets[
01196                                                      version]]
01197 
01198     if release_version in RELEASE_VERSIONS:
01199         release_targets = version_release_targets[release_version]
01200     else:
01201         release_targets = None
01202 
01203     target_names = []
01204 
01205     if release_targets:
01206         target_names = [x[0] for x in release_targets]
01207     else:
01208         target_names = TARGET_NAMES
01209 
01210     return "\n".join(target_names)
01211 
01212 
01213 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01214                          release_version='5'):
01215     """  Shows target map using prettytable
01216 
01217     Keyword arguments:
01218     verbose_html - emit html instead of a simple table
01219     platform_filter - remove results that match the string
01220     release_version - get the matrix for this major version number
01221     """
01222     # Only use it in this function so building works without extra modules
01223     from prettytable import PrettyTable
01224 
01225     if isinstance(release_version, basestring):
01226         # Force release_version to lowercase if it is a string
01227         release_version = release_version.lower()
01228     else:
01229         # Otherwise default to printing all known targets and toolchains
01230         release_version = 'all'
01231 
01232 
01233     version_release_targets = {}
01234     version_release_target_names = {}
01235 
01236     for version in RELEASE_VERSIONS:
01237         version_release_targets[version] = get_mbed_official_release(version)
01238         version_release_target_names[version] = [x[0] for x in
01239                                                  version_release_targets[
01240                                                      version]]
01241 
01242     if release_version in RELEASE_VERSIONS:
01243         release_targets = version_release_targets[release_version]
01244     else:
01245         release_targets = None
01246 
01247     unique_supported_toolchains = get_unique_supported_toolchains(
01248         release_targets)
01249     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01250 
01251     # All tests status table print
01252     columns = prepend_columns + unique_supported_toolchains
01253     table_printer = PrettyTable(columns)
01254     # Align table
01255     for col in columns:
01256         table_printer.align[col] = "c"
01257     table_printer.align["Target"] = "l"
01258 
01259     perm_counter = 0
01260     target_counter = 0
01261 
01262     target_names = []
01263 
01264     if release_targets:
01265         target_names = [x[0] for x in release_targets]
01266     else:
01267         target_names = TARGET_NAMES
01268 
01269     for target in sorted(target_names):
01270         if platform_filter is not None:
01271             # FIlter out platforms using regex
01272             if re.search(platform_filter, target) is None:
01273                 continue
01274         target_counter += 1
01275 
01276         row = [target]  # First column is platform name
01277 
01278         for version in RELEASE_VERSIONS:
01279             if target in version_release_target_names[version]:
01280                 text = "Supported"
01281             else:
01282                 text = "-"
01283             row.append(text)
01284 
01285         for unique_toolchain in unique_supported_toolchains:
01286             if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
01287                 (unique_toolchain == "ARMC6" and
01288                  "ARM" in TARGET_MAP[target].supported_toolchains)):
01289                 text = "Supported"
01290                 perm_counter += 1
01291             else:
01292                 text = "-"
01293 
01294             row.append(text)
01295         table_printer.add_row(row)
01296 
01297     result = table_printer.get_html_string() if verbose_html \
01298              else table_printer.get_string()
01299     result += "\n"
01300     result += "Supported targets: %d\n"% (target_counter)
01301     if target_counter == 1:
01302         result += "Supported toolchains: %d"% (perm_counter)
01303     return result
01304 
01305 
01306 def get_target_supported_toolchains (target):
01307     """ Returns target supported toolchains list
01308 
01309     Positional arguments:
01310     target - the target to get the supported toolchains of
01311     """
01312     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01313         else None
01314 
01315 
01316 def print_build_results (result_list, build_name):
01317     """ Generate result string for build results
01318 
01319     Positional arguments:
01320     result_list - the list of results to print
01321     build_name - the name of the build we are printing result for
01322     """
01323     result = ""
01324     if len(result_list) > 0:
01325         result += build_name + "\n"
01326         result += "\n".join(["  * %s" % f for f in result_list])
01327         result += "\n"
01328     return result
01329 
01330 def print_build_memory_usage (report):
01331     """ Generate result table with memory usage values for build results
01332     Aggregates (puts together) reports obtained from self.get_memory_summary()
01333 
01334     Positional arguments:
01335     report - Report generated during build procedure.
01336     """
01337     from prettytable import PrettyTable
01338     columns_text = ['name', 'target', 'toolchain']
01339     columns_int = ['static_ram', 'total_flash']
01340     table = PrettyTable(columns_text + columns_int)
01341 
01342     for col in columns_text:
01343         table.align[col] = 'l'
01344 
01345     for col in columns_int:
01346         table.align[col] = 'r'
01347 
01348     for target in report:
01349         for toolchain in report[target]:
01350             for name in report[target][toolchain]:
01351                 for dlist in report[target][toolchain][name]:
01352                     for dlistelem in dlist:
01353                         # Get 'memory_usage' record and build table with
01354                         # statistics
01355                         record = dlist[dlistelem]
01356                         if 'memory_usage' in record and record['memory_usage']:
01357                             # Note that summary should be in the last record of
01358                             # 'memory_usage' section. This is why we are
01359                             # grabbing last "[-1]" record.
01360                             row = [
01361                                 record['description'],
01362                                 record['target_name'],
01363                                 record['toolchain_name'],
01364                                 record['memory_usage'][-1]['summary'][
01365                                     'static_ram'],
01366                                 record['memory_usage'][-1]['summary'][
01367                                     'total_flash'],
01368                             ]
01369                             table.add_row(row)
01370 
01371     result = "Memory map breakdown for built projects (values in Bytes):\n"
01372     result += table.get_string(sortby='name')
01373     return result
01374 
01375 def write_build_report (build_report, template_filename, filename):
01376     """Write a build report to disk using a template file
01377 
01378     Positional arguments:
01379     build_report - a report generated by the build system
01380     template_filename - a file that contains the template for the style of build
01381                         report
01382     filename - the location on disk to write the file to
01383     """
01384     build_report_failing = []
01385     build_report_passing = []
01386 
01387     for report in build_report:
01388         if len(report["failing"]) > 0:
01389             build_report_failing.append(report)
01390         else:
01391             build_report_passing.append(report)
01392 
01393     env = Environment(extensions=['jinja2.ext.with_'])
01394     env.loader = FileSystemLoader('ci_templates')
01395     template = env.get_template(template_filename)
01396 
01397     with open(filename, 'w+') as placeholder:
01398         placeholder.write(template.render(
01399             failing_builds=build_report_failing,
01400             passing_builds=build_report_passing))
01401 
01402 
01403 def merge_build_data(filename, toolchain_report, app_type):
01404     path_to_file = dirname(abspath(filename))
01405     try:
01406         build_data = load(open(filename))
01407     except (IOError, ValueError):
01408         build_data = {'builds': []}
01409     for tgt in toolchain_report.values():
01410         for tc in tgt.values():
01411             for project in tc.values():
01412                 for build in project:
01413                     try:
01414                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01415                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01416                     except KeyError:
01417                         pass
01418                     if 'type' not in build[0]:
01419                         build[0]['type'] = app_type
01420                     build_data['builds'].append(build[0])
01421     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))