Daniel Vizcaya / Mbed OS 04_RTOS_Embebidos
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import tempfile
00021 import datetime
00022 import uuid
00023 import struct
00024 import zlib
00025 import hashlib
00026 from shutil import rmtree
00027 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
00028 from os.path import relpath
00029 from os import linesep, remove, makedirs
00030 from time import time
00031 from intelhex import IntelHex
00032 from json import load, dump
00033 from jinja2 import FileSystemLoader
00034 from jinja2.environment import Environment
00035 
00036 from .arm_pack_manager import Cache
00037 from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
00038                     ToolException, InvalidReleaseTargetException,
00039                     intelhex_offset, integer)
00040 from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
00041                     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
00042                     MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
00043                     MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
00044                     BUILD_DIR)
00045 from .targets import TARGET_NAMES, TARGET_MAP
00046 from .libraries import Library
00047 from .toolchains import TOOLCHAIN_CLASSES
00048 from .config import Config
00049 
00050 RELEASE_VERSIONS = ['2', '5']
00051 
00052 def prep_report (report, target_name, toolchain_name, id_name):
00053     """Setup report keys
00054 
00055     Positional arguments:
00056     report - the report to fill
00057     target_name - the target being used
00058     toolchain_name - the toolchain being used
00059     id_name - the name of the executable or library being built
00060     """
00061     if not target_name in report:
00062         report[target_name] = {}
00063 
00064     if not toolchain_name in report[target_name]:
00065         report[target_name][toolchain_name] = {}
00066 
00067     if not id_name in report[target_name][toolchain_name]:
00068         report[target_name][toolchain_name][id_name] = []
00069 
00070 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00071     """Setup test properties
00072 
00073     Positional arguments:
00074     properties - the dict to fill
00075     target_name - the target the test is targeting
00076     toolchain_name - the toolchain that will compile the test
00077     vendor_label - the vendor
00078     """
00079     if not target_name in properties:
00080         properties[target_name] = {}
00081 
00082     if not toolchain_name in properties[target_name]:
00083         properties[target_name][toolchain_name] = {}
00084 
00085     properties[target_name][toolchain_name]["target"] = target_name
00086     properties[target_name][toolchain_name]["vendor"] = vendor_label
00087     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00088 
00089 def create_result (target_name, toolchain_name, id_name, description):
00090     """Create a result dictionary
00091 
00092     Positional arguments:
00093     target_name - the target being built for
00094     toolchain_name - the toolchain doing the building
00095     id_name - the name of the executable or library being built
00096     description - a human readable description of what's going on
00097     """
00098     cur_result = {}
00099     cur_result["target_name"] = target_name
00100     cur_result["toolchain_name"] = toolchain_name
00101     cur_result["id"] = id_name
00102     cur_result["description"] = description
00103     cur_result["elapsed_time"] = 0
00104     cur_result["output"] = ""
00105 
00106     return cur_result
00107 
00108 def add_result_to_report (report, result):
00109     """Add a single result to a report dictionary
00110 
00111     Positional arguments:
00112     report - the report to append to
00113     result - the result to append
00114     """
00115     result["date"] = datetime.datetime.utcnow().isoformat()
00116     result["uuid"] = str(uuid.uuid1())
00117     target = result["target_name"]
00118     toolchain = result["toolchain_name"]
00119     id_name = result['id']
00120     result_wrap = {0: result}
00121     report[target][toolchain][id_name].append(result_wrap)
00122 
00123 def get_config (src_paths, target, toolchain_name, app_config=None):
00124     """Get the configuration object for a target-toolchain combination
00125 
00126     Positional arguments:
00127     src_paths - paths to scan for the configuration files
00128     target - the device we are building for
00129     toolchain_name - the string that identifies the build tools
00130     """
00131     # Convert src_paths to a list if needed
00132     if not isinstance(src_paths, list):
00133         src_paths = [src_paths]
00134 
00135     # Pass all params to the unified prepare_resources()
00136     toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
00137                                   app_config=app_config)
00138 
00139     # Scan src_path for config files
00140     resources = toolchain.scan_resources(src_paths[0])
00141     for path in src_paths[1:]:
00142         resources.add(toolchain.scan_resources(path))
00143 
00144     # Update configuration files until added features creates no changes
00145     prev_features = set()
00146     while True:
00147         # Update the configuration with any .json files found while scanning
00148         toolchain.config.add_config_files(resources.json_files)
00149 
00150         # Add features while we find new ones
00151         features = set(toolchain.config.get_features())
00152         if features == prev_features:
00153             break
00154 
00155         for feature in features:
00156             if feature in resources.features:
00157                 resources += resources.features[feature]
00158 
00159         prev_features = features
00160     toolchain.config.validate_config()
00161     if toolchain.config.has_regions:
00162         _ = list(toolchain.config.regions)
00163 
00164     cfg, macros = toolchain.config.get_config_data()
00165     features = toolchain.config.get_features()
00166     return cfg, macros, features
00167 
00168 def is_official_target (target_name, version):
00169     """ Returns True, None if a target is part of the official release for the
00170     given version. Return False, 'reason' if a target is not part of the
00171     official release for the given version.
00172 
00173     Positional arguments:
00174     target_name - Name if the target (ex. 'K64F')
00175     version - The release version string. Should be a string contained within
00176               RELEASE_VERSIONS
00177     """
00178 
00179     result = True
00180     reason = None
00181     target = TARGET_MAP[target_name]
00182 
00183     if hasattr(target, 'release_versions') \
00184        and version in target.release_versions:
00185         if version == '2':
00186             # For version 2, either ARM or uARM toolchain support is required
00187             required_toolchains = set(['ARM', 'uARM'])
00188 
00189             if not len(required_toolchains.intersection(
00190                     set(target.supported_toolchains))) > 0:
00191                 result = False
00192                 reason = ("Target '%s' must support " % target.name) + \
00193                     ("one of the folowing toolchains to be included in the") + \
00194                     ((" mbed 2.0 official release: %s" + linesep) %
00195                      ", ".join(required_toolchains)) + \
00196                     ("Currently it is only configured to support the ") + \
00197                     ("following toolchains: %s" %
00198                      ", ".join(target.supported_toolchains))
00199 
00200         elif version == '5':
00201             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00202             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00203             required_toolchains_sorted = list(required_toolchains)
00204             required_toolchains_sorted.sort()
00205             supported_toolchains = set(target.supported_toolchains)
00206             supported_toolchains_sorted = list(supported_toolchains)
00207             supported_toolchains_sorted.sort()
00208 
00209             if not required_toolchains.issubset(supported_toolchains):
00210                 result = False
00211                 reason = ("Target '%s' must support " % target.name) + \
00212                     ("ALL of the folowing toolchains to be included in the") + \
00213                     ((" mbed OS 5.0 official release: %s" + linesep) %
00214                      ", ".join(required_toolchains_sorted)) + \
00215                     ("Currently it is only configured to support the ") + \
00216                     ("following toolchains: %s" %
00217                      ", ".join(supported_toolchains_sorted))
00218 
00219             elif not target.default_lib == 'std':
00220                 result = False
00221                 reason = ("Target '%s' must set the " % target.name) + \
00222                     ("'default_lib' to 'std' to be included in the ") + \
00223                     ("mbed OS 5.0 official release." + linesep) + \
00224                     ("Currently it is set to '%s'" % target.default_lib)
00225 
00226         else:
00227             result = False
00228             reason = ("Target '%s' has set an invalid release version of '%s'" %
00229                       version) + \
00230                 ("Please choose from the following release versions: %s" %
00231                  ', '.join(RELEASE_VERSIONS))
00232 
00233     else:
00234         result = False
00235         if not hasattr(target, 'release_versions'):
00236             reason = "Target '%s' " % target.name
00237             reason += "does not have the 'release_versions' key set"
00238         elif not version in target.release_versions:
00239             reason = "Target '%s' does not contain the version '%s' " % \
00240                      (target.name, version)
00241             reason += "in its 'release_versions' key"
00242 
00243     return result, reason
00244 
00245 def transform_release_toolchains (toolchains, version):
00246     """ Given a list of toolchains and a release version, return a list of
00247     only the supported toolchains for that release
00248 
00249     Positional arguments:
00250     toolchains - The list of toolchains
00251     version - The release version string. Should be a string contained within
00252               RELEASE_VERSIONS
00253     """
00254     if version == '5':
00255         return ['ARM', 'GCC_ARM', 'IAR']
00256     else:
00257         return toolchains
00258 
00259 
00260 def get_mbed_official_release (version):
00261     """ Given a release version string, return a tuple that contains a target
00262     and the supported toolchains for that release.
00263     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00264                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00265 
00266     Positional arguments:
00267     version - The version string. Should be a string contained within
00268               RELEASE_VERSIONS
00269     """
00270 
00271     mbed_official_release = (
00272         tuple(
00273             tuple(
00274                 [
00275                     TARGET_MAP[target].name,
00276                     tuple(transform_release_toolchains(
00277                         TARGET_MAP[target].supported_toolchains, version))
00278                 ]
00279             ) for target in TARGET_NAMES \
00280             if (hasattr(TARGET_MAP[target], 'release_versions')
00281                 and version in TARGET_MAP[target].release_versions)
00282         )
00283     )
00284 
00285     for target in mbed_official_release:
00286         is_official, reason = is_official_target(target[0], version)
00287 
00288         if not is_official:
00289             raise InvalidReleaseTargetException(reason)
00290 
00291     return mbed_official_release
00292 
00293 ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
00294 def target_supports_toolchain(target, toolchain_name):
00295     if toolchain_name in ARM_COMPILERS:
00296         return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
00297     else:
00298         return toolchain_name in target.supported_toolchains
00299 
00300 
00301 def prepare_toolchain (src_paths, build_dir, target, toolchain_name,
00302                       macros=None, clean=False, jobs=1,
00303                       notify=None, config=None, app_config=None,
00304                       build_profile=None, ignore=None):
00305     """ Prepares resource related objects - toolchain, target, config
00306 
00307     Positional arguments:
00308     src_paths - the paths to source directories
00309     target - ['LPC1768', 'LPC11U24', etc.]
00310     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00311 
00312     Keyword arguments:
00313     macros - additional macros
00314     clean - Rebuild everything if True
00315     jobs - how many compilers we can run at once
00316     notify - Notify function for logs
00317     config - a Config object to use instead of creating one
00318     app_config - location of a chosen mbed_app.json file
00319     build_profile - a list of mergeable build profiles
00320     ignore - list of paths to add to mbedignore
00321     """
00322 
00323     # We need to remove all paths which are repeated to avoid
00324     # multiple compilations and linking with the same objects
00325     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00326 
00327     # If the configuration object was not yet created, create it now
00328     config = config or Config(target, src_paths, app_config=app_config)
00329     target = config.target
00330     if not target_supports_toolchain(target, toolchain_name):
00331         raise NotSupportedException(
00332             "Target {} is not supported by toolchain {}".format(
00333                 target.name, toolchain_name))
00334 
00335     try:
00336         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
00337     except KeyError:
00338         raise KeyError("Toolchain %s not supported" % toolchain_name)
00339 
00340     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
00341     for contents in build_profile or []:
00342         for key in profile:
00343             profile[key].extend(contents[toolchain_name].get(key, []))
00344 
00345     toolchain = cur_tc(
00346         target, notify, macros, build_dir=build_dir, build_profile=profile)
00347 
00348     toolchain.config = config
00349     toolchain.jobs = jobs
00350     toolchain.build_all = clean
00351 
00352     if ignore:
00353         toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore)
00354 
00355     return toolchain
00356 
00357 def _printihex(ihex):
00358     import pprint
00359     pprint.PrettyPrinter().pprint(ihex.todict())
00360 
00361 def _real_region_size(region):
00362     try:
00363         part = intelhex_offset(region.filename, offset=region.start)
00364         return (part.maxaddr() - part.minaddr()) + 1
00365     except AttributeError:
00366         return region.size
00367 
00368 
00369 def _fill_header(region_list, current_region):
00370     """Fill an application header region
00371 
00372     This is done it three steps:
00373      * Fill the whole region with zeros
00374      * Fill const, timestamp and size entries with their data
00375      * Fill the digests using this header as the header region
00376     """
00377     region_dict = {r.name: r for r in region_list}
00378     header = IntelHex()
00379     header.puts(current_region.start, b'\x00' * current_region.size)
00380     start = current_region.start
00381     for member in current_region.filename:
00382         _, type, subtype, data = member
00383         member_size = Config.header_member_size(member)
00384         if type == "const":
00385             fmt = {
00386                 "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
00387                 "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
00388             }[subtype]
00389             header.puts(start, struct.pack(fmt, integer(data, 0)))
00390         elif type == "timestamp":
00391             fmt = {"32le": "<L", "64le": "<Q",
00392                    "32be": ">L", "64be": ">Q"}[subtype]
00393             header.puts(start, struct.pack(fmt, time()))
00394         elif type == "size":
00395             fmt = {"32le": "<L", "64le": "<Q",
00396                    "32be": ">L", "64be": ">Q"}[subtype]
00397             size = sum(_real_region_size(region_dict[r]) for r in data)
00398             header.puts(start, struct.pack(fmt, size))
00399         elif type  == "digest":
00400             if data == "header":
00401                 ih = header[:start]
00402             else:
00403                 ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
00404             if subtype.startswith("CRCITT32"):
00405                 fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
00406                 header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
00407             elif subtype.startswith("SHA"):
00408                 if subtype == "SHA256":
00409                     hash = hashlib.sha256()
00410                 elif subtype == "SHA512":
00411                     hash = hashlib.sha512()
00412                 hash.update(ih.tobinarray())
00413                 header.puts(start, hash.digest())
00414         start += Config.header_member_size(member)
00415     return header
00416 
00417 def merge_region_list (region_list, destination, notify, padding=b'\xFF'):
00418     """Merge the region_list into a single image
00419 
00420     Positional Arguments:
00421     region_list - list of regions, which should contain filenames
00422     destination - file name to write all regions to
00423     padding - bytes to fill gapps with
00424     """
00425     merged = IntelHex()
00426     _, format = splitext(destination)
00427 
00428     notify.info("Merging Regions")
00429 
00430     for region in region_list:
00431         if region.active and not region.filename:
00432             raise ToolException("Active region has no contents: No file found.")
00433         if isinstance(region.filename, list):
00434             header_basename, _ = splitext(destination)
00435             header_filename = header_basename + "_header.hex"
00436             _fill_header(region_list, region).tofile(header_filename, format='hex')
00437             region = region._replace(filename=header_filename)
00438         if region.filename:
00439             notify.info("  Filling region %s with %s" % (region.name, region.filename))
00440             part = intelhex_offset(region.filename, offset=region.start)
00441             part_size = (part.maxaddr() - part.minaddr()) + 1
00442             if part_size > region.size:
00443                 raise ToolException("Contents of region %s does not fit"
00444                                     % region.name)
00445             merged.merge(part)
00446             pad_size = region.size - part_size
00447             if pad_size > 0 and region != region_list[-1]:
00448                 notify.info("  Padding region %s with 0x%x bytes" %
00449                             (region.name, pad_size))
00450                 if format is ".hex":
00451                     """The offset will be in the hex file generated when we're done,
00452                     so we can skip padding here"""
00453                 else:
00454                     merged.puts(merged.maxaddr() + 1, padding * pad_size)
00455 
00456     if not exists(dirname(destination)):
00457         makedirs(dirname(destination))
00458     notify.info("Space used after regions merged: 0x%x" %
00459                 (merged.maxaddr() - merged.minaddr() + 1))
00460     with open(destination, "wb+") as output:
00461         merged.tofile(output, format=format.strip("."))
00462 
00463 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00464                    inc_dirs=None, base_path=None, collect_ignores=False):
00465     """ Scan resources using initialized toolcain
00466 
00467     Positional arguments
00468     src_paths - the paths to source directories
00469     toolchain - valid toolchain object
00470     dependencies_paths - dependency paths that we should scan for include dirs
00471     inc_dirs - additional include directories which should be added to
00472                the scanner resources
00473     """
00474 
00475     # Scan src_path
00476     resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
00477                                          collect_ignores=collect_ignores)
00478     for path in src_paths[1:]:
00479         resources.add(toolchain.scan_resources(path, base_path=base_path,
00480                                                collect_ignores=collect_ignores))
00481 
00482     # Scan dependency paths for include dirs
00483     if dependencies_paths is not None:
00484         for path in dependencies_paths:
00485             lib_resources = toolchain.scan_resources(path)
00486             resources.inc_dirs.extend(lib_resources.inc_dirs)
00487 
00488     # Add additional include directories if passed
00489     if inc_dirs:
00490         if isinstance(inc_dirs, list):
00491             resources.inc_dirs.extend(inc_dirs)
00492         else:
00493             resources.inc_dirs.append(inc_dirs)
00494 
00495     # Load resources into the config system which might expand/modify resources
00496     # based on config data
00497     resources = toolchain.config.load_resources(resources)
00498 
00499     # Set the toolchain's configuration data
00500     toolchain.set_config_data(toolchain.config.get_config_data())
00501 
00502     return resources
00503 
00504 def build_project (src_paths, build_path, target, toolchain_name,
00505                   libraries_paths=None, linker_script=None, clean=False,
00506                   notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
00507                   report=None, properties=None, project_id=None,
00508                   project_description=None, config=None,
00509                   app_config=None, build_profile=None, stats_depth=None, ignore=None):
00510     """ Build a project. A project may be a test or a user program.
00511 
00512     Positional arguments:
00513     src_paths - a path or list of paths that contain all files needed to build
00514                 the project
00515     build_path - the directory where all of the object files will be placed
00516     target - the MCU or board that the project will compile for
00517     toolchain_name - the name of the build tools
00518 
00519     Keyword arguments:
00520     libraries_paths - The location of libraries to include when linking
00521     linker_script - the file that drives the linker to do it's job
00522     clean - Rebuild everything if True
00523     notify - Notify function for logs
00524     name - the name of the project
00525     macros - additional macros
00526     inc_dirs - additional directories where include files may be found
00527     jobs - how many compilers we can run at once
00528     report - a dict where a result may be appended
00529     properties - UUUUHHHHH beats me
00530     project_id - the name put in the report
00531     project_description - the human-readable version of what this thing does
00532     config - a Config object to use instead of creating one
00533     app_config - location of a chosen mbed_app.json file
00534     build_profile - a dict of flags that will be passed to the compiler
00535     stats_depth - depth level for memap to display file/dirs
00536     ignore - list of paths to add to mbedignore
00537     """
00538 
00539     # Convert src_path to a list if needed
00540     if not isinstance(src_paths, list):
00541         src_paths = [src_paths]
00542     # Extend src_paths wiht libraries_paths
00543     if libraries_paths is not None:
00544         src_paths.extend(libraries_paths)
00545         inc_dirs.extend(map(dirname, libraries_paths))
00546 
00547     if clean and exists(build_path):
00548         rmtree(build_path)
00549     mkdir(build_path)
00550 
00551     toolchain = prepare_toolchain(
00552         src_paths, build_path, target, toolchain_name, macros=macros,
00553         clean=clean, jobs=jobs, notify=notify, config=config,
00554         app_config=app_config, build_profile=build_profile, ignore=ignore)
00555 
00556     # The first path will give the name to the library
00557     name = (name or toolchain.config.name or
00558             basename(normpath(abspath(src_paths[0]))))
00559     notify.info("Building project %s (%s, %s)" %
00560                 (name, toolchain.target.name, toolchain_name))
00561 
00562     # Initialize reporting
00563     if report != None:
00564         start = time()
00565         # If project_id is specified, use that over the default name
00566         id_name = project_id.upper() if project_id else name.upper()
00567         description = project_description if project_description else name
00568         vendor_label = toolchain.target.extra_labels[0]
00569         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00570         cur_result = create_result(toolchain.target.name, toolchain_name,
00571                                    id_name, description)
00572         if properties != None:
00573             prep_properties(properties, toolchain.target.name, toolchain_name,
00574                             vendor_label)
00575 
00576     try:
00577         # Call unified scan_resources
00578         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00579 
00580         # Change linker script if specified
00581         if linker_script is not None:
00582             resources.linker_script = linker_script
00583 
00584         # Compile Sources
00585         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00586         resources.objects.extend(objects)
00587 
00588         # Link Program
00589         if toolchain.config.has_regions:
00590             res, _ = toolchain.link_program(resources, build_path, name + "_application")
00591             region_list = list(toolchain.config.regions)
00592             region_list = [r._replace(filename=res) if r.active else r
00593                            for r in region_list]
00594             res = "%s.%s" % (join(build_path, name),
00595                              getattr(toolchain.target, "OUTPUT_EXT", "bin"))
00596             merge_region_list(region_list, res, notify)
00597         else:
00598             res, _ = toolchain.link_program(resources, build_path, name)
00599 
00600         memap_instance = getattr(toolchain, 'memap_instance', None)
00601         memap_table = ''
00602         if memap_instance:
00603             # Write output to stdout in text (pretty table) format
00604             memap_table = memap_instance.generate_output('table', stats_depth)
00605             notify.info(memap_table)
00606 
00607             # Write output to file in JSON format
00608             map_out = join(build_path, name + "_map.json")
00609             memap_instance.generate_output('json', stats_depth, map_out)
00610 
00611             # Write output to file in CSV format for the CI
00612             map_csv = join(build_path, name + "_map.csv")
00613             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
00614 
00615         resources.detect_duplicates(toolchain)
00616 
00617         if report != None:
00618             end = time()
00619             cur_result["elapsed_time"] = end - start
00620             cur_result["result"] = "OK"
00621             cur_result["memory_usage"] = (memap_instance.mem_report
00622                                           if memap_instance is not None else None)
00623             cur_result["bin"] = res
00624             cur_result["elf"] = splitext(res)[0] + ".elf"
00625             cur_result.update(toolchain.report)
00626 
00627             add_result_to_report(report, cur_result)
00628 
00629         return res
00630 
00631     except Exception as exc:
00632         if report != None:
00633             end = time()
00634 
00635             if isinstance(exc, NotSupportedException):
00636                 cur_result["result"] = "NOT_SUPPORTED"
00637             else:
00638                 cur_result["result"] = "FAIL"
00639 
00640             cur_result["elapsed_time"] = end - start
00641 
00642             add_result_to_report(report, cur_result)
00643         # Let Exception propagate
00644         raise
00645 
00646 def build_library (src_paths, build_path, target, toolchain_name,
00647                   dependencies_paths=None, name=None, clean=False,
00648                   archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
00649                   report=None, properties=None, project_id=None,
00650                   remove_config_header_file=False, app_config=None,
00651                   build_profile=None, ignore=None):
00652     """ Build a library
00653 
00654     Positional arguments:
00655     src_paths - a path or list of paths that contain all files needed to build
00656                 the library
00657     build_path - the directory where all of the object files will be placed
00658     target - the MCU or board that the project will compile for
00659     toolchain_name - the name of the build tools
00660 
00661     Keyword arguments:
00662     dependencies_paths - The location of libraries to include when linking
00663     name - the name of the library
00664     clean - Rebuild everything if True
00665     archive - whether the library will create an archive file
00666     notify - Notify function for logs
00667     macros - additional macros
00668     inc_dirs - additional directories where include files may be found
00669     jobs - how many compilers we can run at once
00670     report - a dict where a result may be appended
00671     properties - UUUUHHHHH beats me
00672     project_id - the name that goes in the report
00673     remove_config_header_file - delete config header file when done building
00674     app_config - location of a chosen mbed_app.json file
00675     build_profile - a dict of flags that will be passed to the compiler
00676     ignore - list of paths to add to mbedignore
00677     """
00678 
00679     # Convert src_path to a list if needed
00680     if not isinstance(src_paths, list):
00681         src_paths = [src_paths]
00682 
00683     # Build path
00684     if archive:
00685         # Use temp path when building archive
00686         tmp_path = join(build_path, '.temp')
00687         mkdir(tmp_path)
00688     else:
00689         tmp_path = build_path
00690 
00691     # Clean the build directory
00692     if clean and exists(tmp_path):
00693         rmtree(tmp_path)
00694     mkdir(tmp_path)
00695 
00696     # Pass all params to the unified prepare_toolchain()
00697     toolchain = prepare_toolchain(
00698         src_paths, build_path, target, toolchain_name, macros=macros,
00699         clean=clean, jobs=jobs, notify=notify, app_config=app_config,
00700         build_profile=build_profile, ignore=ignore)
00701 
00702     # The first path will give the name to the library
00703     if name is None:
00704         name = basename(normpath(abspath(src_paths[0])))
00705     notify.info("Building library %s (%s, %s)" %
00706                    (name, toolchain.target.name, toolchain_name))
00707 
00708     # Initialize reporting
00709     if report != None:
00710         start = time()
00711         # If project_id is specified, use that over the default name
00712         id_name = project_id.upper() if project_id else name.upper()
00713         description = name
00714         vendor_label = toolchain.target.extra_labels[0]
00715         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00716         cur_result = create_result(toolchain.target.name, toolchain_name,
00717                                    id_name, description)
00718         cur_result['type'] = 'library'
00719         if properties != None:
00720             prep_properties(properties, toolchain.target.name, toolchain_name,
00721                             vendor_label)
00722 
00723     for src_path in src_paths:
00724         if not exists(src_path):
00725             error_msg = "The library source folder does not exist: %s", src_path
00726             if report != None:
00727                 cur_result["output"] = error_msg
00728                 cur_result["result"] = "FAIL"
00729                 add_result_to_report(report, cur_result)
00730             raise Exception(error_msg)
00731 
00732     try:
00733         # Call unified scan_resources
00734         resources = scan_resources(src_paths, toolchain,
00735                                    dependencies_paths=dependencies_paths,
00736                                    inc_dirs=inc_dirs)
00737 
00738 
00739         # Copy headers, objects and static libraries - all files needed for
00740         # static lib
00741         toolchain.copy_files(resources.headers, build_path, resources=resources)
00742         toolchain.copy_files(resources.objects, build_path, resources=resources)
00743         toolchain.copy_files(resources.libraries, build_path,
00744                              resources=resources)
00745         toolchain.copy_files(resources.json_files, build_path,
00746                              resources=resources)
00747         if resources.linker_script:
00748             toolchain.copy_files(resources.linker_script, build_path,
00749                                  resources=resources)
00750 
00751         if resources.hex_files:
00752             toolchain.copy_files(resources.hex_files, build_path,
00753                                  resources=resources)
00754 
00755         # Compile Sources
00756         objects = toolchain.compile_sources(resources, resources.inc_dirs)
00757         resources.objects.extend(objects)
00758 
00759         if archive:
00760             toolchain.build_library(objects, build_path, name)
00761 
00762         if remove_config_header_file:
00763             config_header_path = toolchain.get_config_header()
00764             if config_header_path:
00765                 remove(config_header_path)
00766 
00767         if report != None:
00768             end = time()
00769             cur_result["elapsed_time"] = end - start
00770             cur_result["result"] = "OK"
00771 
00772 
00773             add_result_to_report(report, cur_result)
00774         return True
00775 
00776     except Exception as exc:
00777         if report != None:
00778             end = time()
00779 
00780             if isinstance(exc, ToolException):
00781                 cur_result["result"] = "FAIL"
00782             elif isinstance(exc, NotSupportedException):
00783                 cur_result["result"] = "NOT_SUPPORTED"
00784 
00785             cur_result["elapsed_time"] = end - start
00786 
00787             add_result_to_report(report, cur_result)
00788 
00789         # Let Exception propagate
00790         raise
00791 
00792 ######################
00793 ### Legacy methods ###
00794 ######################
00795 
00796 def mbed2_obj_path(target_name, toolchain_name):
00797     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
00798     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
00799 
00800 def build_lib (lib_id, target, toolchain_name, clean=False, macros=None,
00801               notify=None, jobs=1, report=None, properties=None,
00802               build_profile=None, ignore=None):
00803     """ Legacy method for building mbed libraries
00804 
00805     Positional arguments:
00806     lib_id - the library's unique identifier
00807     target - the MCU or board that the project will compile for
00808     toolchain_name - the name of the build tools
00809 
00810     Keyword arguments:
00811     clean - Rebuild everything if True
00812     macros - additional macros
00813     notify - Notify function for logs
00814     jobs - how many compilers we can run at once
00815     report - a dict where a result may be appended
00816     properties - UUUUHHHHH beats me
00817     build_profile - a dict of flags that will be passed to the compiler
00818     ignore - list of paths to add to mbedignore
00819     """
00820     lib = Library(lib_id)
00821     if not lib.is_supported(target, toolchain_name):
00822         print('Library "%s" is not yet supported on target %s with toolchain %s'
00823               % (lib_id, target.name, toolchain_name))
00824         return False
00825 
00826     # We need to combine macros from parameter list with macros from library
00827     # definition
00828     lib_macros = lib.macros if lib.macros else []
00829     if macros:
00830         macros.extend(lib_macros)
00831     else:
00832         macros = lib_macros
00833 
00834     src_paths = lib.source_dir
00835     build_path = lib.build_dir
00836     dependencies_paths = lib.dependencies
00837     inc_dirs = lib.inc_dirs
00838     inc_dirs_ext = lib.inc_dirs_ext
00839 
00840     if not isinstance(src_paths, list):
00841         src_paths = [src_paths]
00842 
00843     # The first path will give the name to the library
00844     name = basename(src_paths[0])
00845 
00846     if report != None:
00847         start = time()
00848         id_name = name.upper()
00849         description = name
00850         vendor_label = target.extra_labels[0]
00851         cur_result = None
00852         prep_report(report, target.name, toolchain_name, id_name)
00853         cur_result = create_result(target.name, toolchain_name, id_name,
00854                                    description)
00855 
00856         if properties != None:
00857             prep_properties(properties, target.name, toolchain_name,
00858                             vendor_label)
00859 
00860     for src_path in src_paths:
00861         if not exists(src_path):
00862             error_msg = "The library source folder does not exist: %s", src_path
00863 
00864             if report != None:
00865                 cur_result["output"] = error_msg
00866                 cur_result["result"] = "FAIL"
00867                 add_result_to_report(report, cur_result)
00868 
00869             raise Exception(error_msg)
00870 
00871     try:
00872         # Toolchain instance
00873         # Create the desired build directory structure
00874         bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name))
00875         mkdir(bin_path)
00876         tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name,
00877                                                             toolchain_name))
00878         mkdir(tmp_path)
00879 
00880         toolchain = prepare_toolchain(
00881             src_paths, tmp_path, target, toolchain_name, macros=macros,
00882             notify=notify, build_profile=build_profile, jobs=jobs, clean=clean,
00883             ignore=ignore)
00884 
00885         notify.info("Building library %s (%s, %s)" %
00886                        (name.upper(), target.name, toolchain_name))
00887 
00888         # Take into account the library configuration (MBED_CONFIG_FILE)
00889         config = toolchain.config
00890         config.add_config_files([MBED_CONFIG_FILE])
00891 
00892         # Scan Resources
00893         resources = []
00894         for src_path in src_paths:
00895             resources.append(toolchain.scan_resources(src_path))
00896 
00897         # Add extra include directories / files which are required by library
00898         # This files usually are not in the same directory as source files so
00899         # previous scan will not include them
00900         if inc_dirs_ext is not None:
00901             for inc_ext in inc_dirs_ext:
00902                 resources.append(toolchain.scan_resources(inc_ext))
00903 
00904         # Dependencies Include Paths
00905         dependencies_include_dir = []
00906         if dependencies_paths is not None:
00907             for path in dependencies_paths:
00908                 lib_resources = toolchain.scan_resources(path)
00909                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00910                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00911 
00912         if inc_dirs:
00913             dependencies_include_dir.extend(inc_dirs)
00914 
00915         # Add other discovered configuration data to the configuration object
00916         for res in resources:
00917             config.load_resources(res)
00918         toolchain.set_config_data(toolchain.config.get_config_data())
00919 
00920 
00921         # Copy Headers
00922         for resource in resources:
00923             toolchain.copy_files(resource.headers, build_path,
00924                                  resources=resource)
00925 
00926         dependencies_include_dir.extend(
00927             toolchain.scan_resources(build_path).inc_dirs)
00928 
00929         # Compile Sources
00930         objects = []
00931         for resource in resources:
00932             objects.extend(toolchain.compile_sources(resource, dependencies_include_dir))
00933 
00934         needed_update = toolchain.build_library(objects, bin_path, name)
00935 
00936         if report != None and needed_update:
00937             end = time()
00938             cur_result["elapsed_time"] = end - start
00939             cur_result["result"] = "OK"
00940 
00941             add_result_to_report(report, cur_result)
00942         return True
00943 
00944     except Exception:
00945         if report != None:
00946             end = time()
00947             cur_result["result"] = "FAIL"
00948             cur_result["elapsed_time"] = end - start
00949 
00950             add_result_to_report(report, cur_result)
00951 
00952         # Let Exception propagate
00953         raise
00954 
00955 # We do have unique legacy conventions about how we build and package the mbed
00956 # library
00957 def build_mbed_libs (target, toolchain_name, clean=False, macros=None,
00958                     notify=None, jobs=1, report=None, properties=None,
00959                     build_profile=None, ignore=None):
00960     """ Function returns True is library was built and false if building was
00961     skipped
00962 
00963     Positional arguments:
00964     target - the MCU or board that the project will compile for
00965     toolchain_name - the name of the build tools
00966 
00967     Keyword arguments:
00968     clean - Rebuild everything if True
00969     macros - additional macros
00970     notify - Notify function for logs
00971     jobs - how many compilers we can run at once
00972     report - a dict where a result may be appended
00973     properties - UUUUHHHHH beats me
00974     build_profile - a dict of flags that will be passed to the compiler
00975     ignore - list of paths to add to mbedignore
00976     """
00977 
00978     if report != None:
00979         start = time()
00980         id_name = "MBED"
00981         description = "mbed SDK"
00982         vendor_label = target.extra_labels[0]
00983         cur_result = None
00984         prep_report(report, target.name, toolchain_name, id_name)
00985         cur_result = create_result(target.name, toolchain_name, id_name,
00986                                    description)
00987 
00988         if properties != None:
00989             prep_properties(properties, target.name, toolchain_name,
00990                             vendor_label)
00991 
00992     # Check toolchain support
00993     if toolchain_name not in target.supported_toolchains:
00994         supported_toolchains_text = ", ".join(target.supported_toolchains)
00995         print('%s target is not yet supported by toolchain %s' %
00996               (target.name, toolchain_name))
00997         print('%s target supports %s toolchain%s' %
00998               (target.name, supported_toolchains_text, 's'
00999                if len(target.supported_toolchains) > 1 else ''))
01000 
01001         if report != None:
01002             cur_result["result"] = "SKIP"
01003             add_result_to_report(report, cur_result)
01004 
01005         return False
01006 
01007     try:
01008         # Source and Build Paths
01009         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01010         build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
01011         mkdir(build_toolchain)
01012 
01013         # Toolchain
01014         tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
01015         mkdir(tmp_path)
01016 
01017         toolchain = prepare_toolchain(
01018             [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
01019             build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
01020 
01021         # Take into account the library configuration (MBED_CONFIG_FILE)
01022         config = toolchain.config
01023         config.add_config_files([MBED_CONFIG_FILE])
01024         toolchain.set_config_data(toolchain.config.get_config_data())
01025 
01026         # mbed
01027         notify.info("Building library %s (%s, %s)" %
01028                        ('MBED', target.name, toolchain_name))
01029 
01030         # Common Headers
01031         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01032         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
01033 
01034         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
01035                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
01036                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
01037             resources = toolchain.scan_resources(dir)
01038             toolchain.copy_files(resources.headers, dest)
01039             library_incdirs.append(dest)
01040 
01041         cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH)
01042         toolchain.copy_files(cmsis_implementation.headers, build_target)
01043         toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain)
01044         toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain)
01045 
01046         hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH)
01047         toolchain.copy_files(hal_implementation.headers +
01048                              hal_implementation.hex_files +
01049                              hal_implementation.libraries +
01050                              [MBED_CONFIG_FILE],
01051                              build_target, resources=hal_implementation)
01052         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
01053         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
01054         incdirs = toolchain.scan_resources(build_target).inc_dirs
01055         objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
01056                                             library_incdirs + incdirs + [tmp_path])
01057         toolchain.copy_files(objects, build_toolchain)
01058 
01059         # Common Sources
01060         mbed_resources = None
01061         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
01062             mbed_resources += toolchain.scan_resources(dir)
01063 
01064         objects = toolchain.compile_sources(mbed_resources,
01065                                             library_incdirs + incdirs)
01066 
01067         # A number of compiled files need to be copied as objects as opposed to
01068         # way the linker search for symbols in archives. These are:
01069         #   - mbed_retarget.o: to make sure that the C standard lib symbols get
01070         #                 overridden
01071         #   - mbed_board.o: mbed_die is weak
01072         #   - mbed_overrides.o: this contains platform overrides of various
01073         #                       weak SDK functions
01074         #   - mbed_main.o: this contains main redirection
01075         separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
01076                                             'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
01077 
01078         for obj in objects:
01079             for name in separate_names:
01080                 if obj.endswith(name):
01081                     separate_objects.append(obj)
01082 
01083         for obj in separate_objects:
01084             objects.remove(obj)
01085 
01086         toolchain.build_library(objects, build_toolchain, "mbed")
01087 
01088         for obj in separate_objects:
01089             toolchain.copy_files(obj, build_toolchain)
01090 
01091         if report != None:
01092             end = time()
01093             cur_result["elapsed_time"] = end - start
01094             cur_result["result"] = "OK"
01095 
01096             add_result_to_report(report, cur_result)
01097 
01098         return True
01099 
01100     except Exception as exc:
01101         if report != None:
01102             end = time()
01103             cur_result["result"] = "FAIL"
01104             cur_result["elapsed_time"] = end - start
01105 
01106             cur_result["output"] += str(exc)
01107 
01108             add_result_to_report(report, cur_result)
01109 
01110         # Let Exception propagate
01111         raise
01112 
01113 
01114 def get_unique_supported_toolchains (release_targets=None):
01115     """ Get list of all unique toolchains supported by targets
01116 
01117     Keyword arguments:
01118     release_targets - tuple structure returned from get_mbed_official_release().
01119                       If release_targets is not specified, then it queries all
01120                       known targets
01121     """
01122     unique_supported_toolchains = []
01123 
01124     if not release_targets:
01125         for target in TARGET_NAMES:
01126             for toolchain in TARGET_MAP[target].supported_toolchains:
01127                 if toolchain not in unique_supported_toolchains:
01128                     unique_supported_toolchains.append(toolchain)
01129     else:
01130         for target in release_targets:
01131             for toolchain in target[1]:
01132                 if toolchain not in unique_supported_toolchains:
01133                     unique_supported_toolchains.append(toolchain)
01134 
01135     if "ARM" in unique_supported_toolchains:
01136         unique_supported_toolchains.append("ARMC6")
01137 
01138     return unique_supported_toolchains
01139 
01140 def mcu_toolchain_list (release_version='5'):
01141     """  Shows list of toolchains
01142 
01143     """
01144 
01145     if isinstance(release_version, basestring):
01146         # Force release_version to lowercase if it is a string
01147         release_version = release_version.lower()
01148     else:
01149         # Otherwise default to printing all known targets and toolchains
01150         release_version = 'all'
01151 
01152 
01153     version_release_targets = {}
01154     version_release_target_names = {}
01155 
01156     for version in RELEASE_VERSIONS:
01157         version_release_targets[version] = get_mbed_official_release(version)
01158         version_release_target_names[version] = [x[0] for x in
01159                                                  version_release_targets[
01160                                                      version]]
01161 
01162     if release_version in RELEASE_VERSIONS:
01163         release_targets = version_release_targets[release_version]
01164     else:
01165         release_targets = None
01166 
01167     unique_supported_toolchains = get_unique_supported_toolchains(
01168         release_targets)
01169     columns = ["mbed OS %s" % x for x in RELEASE_VERSIONS] + unique_supported_toolchains
01170     return "\n".join(columns)
01171 
01172 
01173 def mcu_target_list (release_version='5'):
01174     """  Shows target list
01175 
01176     """
01177 
01178     if isinstance(release_version, basestring):
01179         # Force release_version to lowercase if it is a string
01180         release_version = release_version.lower()
01181     else:
01182         # Otherwise default to printing all known targets and toolchains
01183         release_version = 'all'
01184 
01185 
01186     version_release_targets = {}
01187     version_release_target_names = {}
01188 
01189     for version in RELEASE_VERSIONS:
01190         version_release_targets[version] = get_mbed_official_release(version)
01191         version_release_target_names[version] = [x[0] for x in
01192                                                  version_release_targets[
01193                                                      version]]
01194 
01195     if release_version in RELEASE_VERSIONS:
01196         release_targets = version_release_targets[release_version]
01197     else:
01198         release_targets = None
01199 
01200     target_names = []
01201 
01202     if release_targets:
01203         target_names = [x[0] for x in release_targets]
01204     else:
01205         target_names = TARGET_NAMES
01206 
01207     return "\n".join(target_names)
01208 
01209 
01210 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01211                          release_version='5'):
01212     """  Shows target map using prettytable
01213 
01214     Keyword arguments:
01215     verbose_html - emit html instead of a simple table
01216     platform_filter - remove results that match the string
01217     release_version - get the matrix for this major version number
01218     """
01219     # Only use it in this function so building works without extra modules
01220     from prettytable import PrettyTable
01221 
01222     if isinstance(release_version, basestring):
01223         # Force release_version to lowercase if it is a string
01224         release_version = release_version.lower()
01225     else:
01226         # Otherwise default to printing all known targets and toolchains
01227         release_version = 'all'
01228 
01229 
01230     version_release_targets = {}
01231     version_release_target_names = {}
01232 
01233     for version in RELEASE_VERSIONS:
01234         version_release_targets[version] = get_mbed_official_release(version)
01235         version_release_target_names[version] = [x[0] for x in
01236                                                  version_release_targets[
01237                                                      version]]
01238 
01239     if release_version in RELEASE_VERSIONS:
01240         release_targets = version_release_targets[release_version]
01241     else:
01242         release_targets = None
01243 
01244     unique_supported_toolchains = get_unique_supported_toolchains(
01245         release_targets)
01246     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01247 
01248     # All tests status table print
01249     columns = prepend_columns + unique_supported_toolchains
01250     table_printer = PrettyTable(columns)
01251     # Align table
01252     for col in columns:
01253         table_printer.align[col] = "c"
01254     table_printer.align["Target"] = "l"
01255 
01256     perm_counter = 0
01257     target_counter = 0
01258 
01259     target_names = []
01260 
01261     if release_targets:
01262         target_names = [x[0] for x in release_targets]
01263     else:
01264         target_names = TARGET_NAMES
01265 
01266     for target in sorted(target_names):
01267         if platform_filter is not None:
01268             # FIlter out platforms using regex
01269             if re.search(platform_filter, target) is None:
01270                 continue
01271         target_counter += 1
01272 
01273         row = [target]  # First column is platform name
01274 
01275         for version in RELEASE_VERSIONS:
01276             if target in version_release_target_names[version]:
01277                 text = "Supported"
01278             else:
01279                 text = "-"
01280             row.append(text)
01281 
01282         for unique_toolchain in unique_supported_toolchains:
01283             if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
01284                 (unique_toolchain == "ARMC6" and
01285                  "ARM" in TARGET_MAP[target].supported_toolchains)):
01286                 text = "Supported"
01287                 perm_counter += 1
01288             else:
01289                 text = "-"
01290 
01291             row.append(text)
01292         table_printer.add_row(row)
01293 
01294     result = table_printer.get_html_string() if verbose_html \
01295              else table_printer.get_string()
01296     result += "\n"
01297     result += "Supported targets: %d\n"% (target_counter)
01298     if target_counter == 1:
01299         result += "Supported toolchains: %d"% (perm_counter)
01300     return result
01301 
01302 
01303 def get_target_supported_toolchains (target):
01304     """ Returns target supported toolchains list
01305 
01306     Positional arguments:
01307     target - the target to get the supported toolchains of
01308     """
01309     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01310         else None
01311 
01312 
01313 def print_build_results (result_list, build_name):
01314     """ Generate result string for build results
01315 
01316     Positional arguments:
01317     result_list - the list of results to print
01318     build_name - the name of the build we are printing result for
01319     """
01320     result = ""
01321     if len(result_list) > 0:
01322         result += build_name + "\n"
01323         result += "\n".join(["  * %s" % f for f in result_list])
01324         result += "\n"
01325     return result
01326 
01327 def print_build_memory_usage (report):
01328     """ Generate result table with memory usage values for build results
01329     Aggregates (puts together) reports obtained from self.get_memory_summary()
01330 
01331     Positional arguments:
01332     report - Report generated during build procedure.
01333     """
01334     from prettytable import PrettyTable
01335     columns_text = ['name', 'target', 'toolchain']
01336     columns_int = ['static_ram', 'total_flash']
01337     table = PrettyTable(columns_text + columns_int)
01338 
01339     for col in columns_text:
01340         table.align[col] = 'l'
01341 
01342     for col in columns_int:
01343         table.align[col] = 'r'
01344 
01345     for target in report:
01346         for toolchain in report[target]:
01347             for name in report[target][toolchain]:
01348                 for dlist in report[target][toolchain][name]:
01349                     for dlistelem in dlist:
01350                         # Get 'memory_usage' record and build table with
01351                         # statistics
01352                         record = dlist[dlistelem]
01353                         if 'memory_usage' in record and record['memory_usage']:
01354                             # Note that summary should be in the last record of
01355                             # 'memory_usage' section. This is why we are
01356                             # grabbing last "[-1]" record.
01357                             row = [
01358                                 record['description'],
01359                                 record['target_name'],
01360                                 record['toolchain_name'],
01361                                 record['memory_usage'][-1]['summary'][
01362                                     'static_ram'],
01363                                 record['memory_usage'][-1]['summary'][
01364                                     'total_flash'],
01365                             ]
01366                             table.add_row(row)
01367 
01368     result = "Memory map breakdown for built projects (values in Bytes):\n"
01369     result += table.get_string(sortby='name')
01370     return result
01371 
01372 def write_build_report (build_report, template_filename, filename):
01373     """Write a build report to disk using a template file
01374 
01375     Positional arguments:
01376     build_report - a report generated by the build system
01377     template_filename - a file that contains the template for the style of build
01378                         report
01379     filename - the location on disk to write the file to
01380     """
01381     build_report_failing = []
01382     build_report_passing = []
01383 
01384     for report in build_report:
01385         if len(report["failing"]) > 0:
01386             build_report_failing.append(report)
01387         else:
01388             build_report_passing.append(report)
01389 
01390     env = Environment(extensions=['jinja2.ext.with_'])
01391     env.loader = FileSystemLoader('ci_templates')
01392     template = env.get_template(template_filename)
01393 
01394     with open(filename, 'w+') as placeholder:
01395         placeholder.write(template.render(
01396             failing_builds=build_report_failing,
01397             passing_builds=build_report_passing))
01398 
01399 
01400 def merge_build_data(filename, toolchain_report, app_type):
01401     path_to_file = dirname(abspath(filename))
01402     try:
01403         build_data = load(open(filename))
01404     except (IOError, ValueError):
01405         build_data = {'builds': []}
01406     for tgt in toolchain_report.values():
01407         for tc in tgt.values():
01408             for project in tc.values():
01409                 for build in project:
01410                     try:
01411                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
01412                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
01413                     except KeyError:
01414                         pass
01415                     if 'type' not in build[0]:
01416                         build[0]['type'] = app_type
01417                     build_data['builds'].append(build[0])
01418     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))