the other jimmy / mbed-sdk-tools

Fork of mbed-sdk-tools by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers build_api.py Source File

build_api.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2016 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 
00018 import re
00019 import tempfile
00020 from types import ListType
00021 from shutil import rmtree
00022 from os.path import join, exists, dirname, basename, abspath, normpath
00023 from os import linesep, remove
00024 from time import time
00025 
00026 from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\
00027     ToolException, InvalidReleaseTargetException
00028 from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\
00029     MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\
00030     MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\
00031     BUILD_DIR
00032 from tools.targets import TARGET_NAMES, TARGET_MAP
00033 from tools.libraries import Library
00034 from tools.toolchains import TOOLCHAIN_CLASSES, mbedToolchain
00035 from jinja2 import FileSystemLoader
00036 from jinja2.environment import Environment
00037 from tools.config import Config
00038 from tools.build_profiles import find_build_profile, get_toolchain_profile, find_targets_json
00039 from tools.targets import set_targets_json_location
00040 
00041 RELEASE_VERSIONS = ['2', '5']
00042 
00043 def prep_report (report, target_name, toolchain_name, id_name):
00044     """Setup report keys
00045 
00046     Positional arguments:
00047     report - the report to fill
00048     target_name - the target being used
00049     toolchain_name - the toolchain being used
00050     id_name - the name of the executable or library being built
00051     """
00052     if not target_name in report:
00053         report[target_name] = {}
00054 
00055     if not toolchain_name in report[target_name]:
00056         report[target_name][toolchain_name] = {}
00057 
00058     if not id_name in report[target_name][toolchain_name]:
00059         report[target_name][toolchain_name][id_name] = []
00060 
00061 def prep_properties (properties, target_name, toolchain_name, vendor_label):
00062     """Setup test properties
00063 
00064     Positional arguments:
00065     properties - the dict to fill
00066     target_name - the target the test is targeting
00067     toolchain_name - the toolchain that will compile the test
00068     vendor_label - the vendor
00069     """
00070     if not target_name in properties:
00071         properties[target_name] = {}
00072 
00073     if not toolchain_name in properties[target_name]:
00074         properties[target_name][toolchain_name] = {}
00075 
00076     properties[target_name][toolchain_name]["target"] = target_name
00077     properties[target_name][toolchain_name]["vendor"] = vendor_label
00078     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
00079 
00080 def create_result (target_name, toolchain_name, id_name, description):
00081     """Create a result dictionary
00082 
00083     Positional arguments:
00084     target_name - the target being built for
00085     toolchain_name - the toolchain doing the building
00086     id_name - the name of the executable or library being built
00087     description - a human readable description of what's going on
00088     """
00089     cur_result = {}
00090     cur_result["target_name"] = target_name
00091     cur_result["toolchain_name"] = toolchain_name
00092     cur_result["id"] = id_name
00093     cur_result["description"] = description
00094     cur_result["elapsed_time"] = 0
00095     cur_result["output"] = ""
00096 
00097     return cur_result
00098 
00099 def add_result_to_report (report, result):
00100     """Add a single result to a report dictionary
00101 
00102     Positional arguments:
00103     report - the report to append to
00104     result - the result to append
00105     """
00106     target = result["target_name"]
00107     toolchain = result["toolchain_name"]
00108     id_name = result['id']
00109     result_wrap = {0: result}
00110     report[target][toolchain][id_name].append(result_wrap)
00111 
00112 def get_config (src_paths, target, toolchain_name):
00113     """Get the configuration object for a target-toolchain combination
00114 
00115     Positional arguments:
00116     src_paths - paths to scan for the configuration files
00117     target - the device we are building for
00118     toolchain_name - the string that identifies the build tools
00119     """
00120     # Convert src_paths to a list if needed
00121     if type(src_paths) != ListType:
00122         src_paths = [src_paths]
00123 
00124     # Pass all params to the unified prepare_resources()
00125     toolchain = prepare_toolchain(src_paths, target, toolchain_name)
00126 
00127     # Scan src_path for config files
00128     resources = toolchain.scan_resources(src_paths[0])
00129     for path in src_paths[1:]:
00130         resources.add(toolchain.scan_resources(path))
00131 
00132     # Update configuration files until added features creates no changes
00133     prev_features = set()
00134     while True:
00135         # Update the configuration with any .json files found while scanning
00136         toolchain.config.add_config_files(resources.json_files)
00137 
00138         # Add features while we find new ones
00139         features = set(toolchain.config.get_features())
00140         if features == prev_features:
00141             break
00142 
00143         for feature in features:
00144             if feature in resources.features:
00145                 resources += resources.features[feature]
00146 
00147         prev_features = features
00148     toolchain.config.validate_config()
00149 
00150     cfg, macros = toolchain.config.get_config_data()
00151     features = toolchain.config.get_features()
00152     return cfg, macros, features
00153 
00154 def is_official_target (target_name, version):
00155     """ Returns True, None if a target is part of the official release for the
00156     given version. Return False, 'reason' if a target is not part of the
00157     official release for the given version.
00158 
00159     Positional arguments:
00160     target_name - Name if the target (ex. 'K64F')
00161     version - The release version string. Should be a string contained within
00162               RELEASE_VERSIONS
00163     """
00164 
00165     result = True
00166     reason = None
00167     target = TARGET_MAP[target_name]
00168 
00169     if hasattr(target, 'release_versions') \
00170        and version in target.release_versions:
00171         if version == '2':
00172             # For version 2, either ARM or uARM toolchain support is required
00173             required_toolchains = set(['ARM', 'uARM'])
00174 
00175             if not len(required_toolchains.intersection(
00176                     set(target.supported_toolchains))) > 0:
00177                 result = False
00178                 reason = ("Target '%s' must support " % target.name) + \
00179                     ("one of the folowing toolchains to be included in the") + \
00180                     ((" mbed 2.0 official release: %s" + linesep) %
00181                      ", ".join(required_toolchains)) + \
00182                     ("Currently it is only configured to support the ") + \
00183                     ("following toolchains: %s" %
00184                      ", ".join(target.supported_toolchains))
00185 
00186         elif version == '5':
00187             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
00188             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
00189             required_toolchains_sorted = list(required_toolchains)
00190             required_toolchains_sorted.sort()
00191             supported_toolchains = set(target.supported_toolchains)
00192             supported_toolchains_sorted = list(supported_toolchains)
00193             supported_toolchains_sorted.sort()
00194 
00195             if not required_toolchains.issubset(supported_toolchains):
00196                 result = False
00197                 reason = ("Target '%s' must support " % target.name) + \
00198                     ("ALL of the folowing toolchains to be included in the") + \
00199                     ((" mbed OS 5.0 official release: %s" + linesep) %
00200                      ", ".join(required_toolchains_sorted)) + \
00201                     ("Currently it is only configured to support the ") + \
00202                     ("following toolchains: %s" %
00203                      ", ".join(supported_toolchains_sorted))
00204 
00205             elif not target.default_lib == 'std':
00206                 result = False
00207                 reason = ("Target '%s' must set the " % target.name) + \
00208                     ("'default_lib' to 'std' to be included in the ") + \
00209                     ("mbed OS 5.0 official release." + linesep) + \
00210                     ("Currently it is set to '%s'" % target.default_lib)
00211 
00212         else:
00213             result = False
00214             reason = ("Target '%s' has set an invalid release version of '%s'" %
00215                       version) + \
00216                 ("Please choose from the following release versions: %s" %
00217                  ', '.join(RELEASE_VERSIONS))
00218 
00219     else:
00220         result = False
00221         if not hasattr(target, 'release_versions'):
00222             reason = "Target '%s' " % target.name
00223             reason += "does not have the 'release_versions' key set"
00224         elif not version in target.release_versions:
00225             reason = "Target '%s' does not contain the version '%s' " % \
00226                      (target.name, version)
00227             reason += "in its 'release_versions' key"
00228 
00229     return result, reason
00230 
00231 def transform_release_toolchains (toolchains, version):
00232     """ Given a list of toolchains and a release version, return a list of
00233     only the supported toolchains for that release
00234 
00235     Positional arguments:
00236     toolchains - The list of toolchains
00237     version - The release version string. Should be a string contained within
00238               RELEASE_VERSIONS
00239     """
00240     if version == '5':
00241         return ['ARM', 'GCC_ARM', 'IAR']
00242     else:
00243         return toolchains
00244 
00245 
00246 def get_mbed_official_release (version):
00247     """ Given a release version string, return a tuple that contains a target
00248     and the supported toolchains for that release.
00249     Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
00250                            ('K64F', ('ARM', 'GCC_ARM')), ...)
00251 
00252     Positional arguments:
00253     version - The version string. Should be a string contained within
00254               RELEASE_VERSIONS
00255     """
00256 
00257     mbed_official_release = (
00258         tuple(
00259             tuple(
00260                 [
00261                     TARGET_MAP[target].name,
00262                     tuple(transform_release_toolchains(
00263                         TARGET_MAP[target].supported_toolchains, version))
00264                 ]
00265             ) for target in TARGET_NAMES \
00266             if (hasattr(TARGET_MAP[target], 'release_versions')
00267                 and version in TARGET_MAP[target].release_versions)
00268         )
00269     )
00270 
00271     for target in mbed_official_release:
00272         is_official, reason = is_official_target(target[0], version)
00273 
00274         if not is_official:
00275             raise InvalidReleaseTargetException(reason)
00276 
00277     return mbed_official_release
00278 
00279 
00280 def prepare_toolchain (src_paths, target, toolchain_name,
00281                       macros=None, clean=False, jobs=1,
00282                       notify=None, silent=False, verbose=False,
00283                       extra_verbose=False, config=None,
00284                       app_config=None, build_profile=None):
00285     """ Prepares resource related objects - toolchain, target, config
00286 
00287     Positional arguments:
00288     src_paths - the paths to source directories
00289     target - ['LPC1768', 'LPC11U24', 'LPC2368', etc.]
00290     toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
00291 
00292     Keyword arguments:
00293     macros - additional macros
00294     clean - Rebuild everything if True
00295     jobs - how many compilers we can run at once
00296     notify - Notify function for logs
00297     silent - suppress printing of progress indicators
00298     verbose - Write the actual tools command lines used if True
00299     extra_verbose - even more output!
00300     config - a Config object to use instead of creating one
00301     app_config - location of a chosen mbed_app.json file
00302     build_profile - a dict of flags that will be passed to the compiler
00303     """
00304 
00305     # We need to remove all paths which are repeated to avoid
00306     # multiple compilations and linking with the same objects
00307     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
00308 
00309     # If the configuration object was not yet created, create it now
00310     config = config or Config(target, src_paths, app_config=app_config)
00311     target = config.target
00312 
00313     # Toolchain instance
00314     try:
00315         toolchain = TOOLCHAIN_CLASSES[toolchain_name](
00316             target, notify, macros, silent,
00317             extra_verbose=extra_verbose, build_profile=build_profile)
00318     except KeyError:
00319         raise KeyError("Toolchain %s not supported" % toolchain_name)
00320 
00321     toolchain.config = config
00322     toolchain.jobs = jobs
00323     toolchain.build_all = clean
00324     toolchain.VERBOSE = verbose
00325 
00326     return toolchain
00327 
00328 def scan_resources (src_paths, toolchain, dependencies_paths=None,
00329                    inc_dirs=None, base_path=None):
00330     """ Scan resources using initialized toolcain
00331 
00332     Positional arguments
00333     src_paths - the paths to source directories
00334     toolchain - valid toolchain object
00335     dependencies_paths - dependency paths that we should scan for include dirs
00336     inc_dirs - additional include directories which should be added to
00337                the scanner resources
00338     """
00339 
00340     # Scan src_path
00341     resources = toolchain.scan_resources(src_paths[0], base_path=base_path)
00342     for path in src_paths[1:]:
00343         resources.add(toolchain.scan_resources(path, base_path=base_path))
00344 
00345     # Scan dependency paths for include dirs
00346     if dependencies_paths is not None:
00347         for path in dependencies_paths:
00348             lib_resources = toolchain.scan_resources(path)
00349             resources.inc_dirs.extend(lib_resources.inc_dirs)
00350 
00351     # Add additional include directories if passed
00352     if inc_dirs:
00353         if type(inc_dirs) == ListType:
00354             resources.inc_dirs.extend(inc_dirs)
00355         else:
00356             resources.inc_dirs.append(inc_dirs)
00357 
00358     # Load resources into the config system which might expand/modify resources
00359     # based on config data
00360     resources = toolchain.config.load_resources(resources)
00361 
00362     # Set the toolchain's configuration data
00363     toolchain.set_config_data(toolchain.config.get_config_data())
00364 
00365     return resources
00366 
00367 def build_project (src_paths, build_path, target, toolchain_name,
00368                   libraries_paths=None, linker_script=None,
00369                   clean=False, notify=None, verbose=False, name=None,
00370                   macros=None, inc_dirs=None, jobs=1, silent=False,
00371                   report=None, properties=None, project_id=None,
00372                   project_description=None, extra_verbose=False, config=None,
00373                   app_config=None, build_profile=None):
00374     """ Build a project. A project may be a test or a user program.
00375 
00376     Positional arguments:
00377     src_paths - a path or list of paths that contain all files needed to build
00378                 the project
00379     build_path - the directory where all of the object files will be placed
00380     target - the MCU or board that the project will compile for
00381     toolchain_name - the name of the build tools
00382 
00383     Keyword arguments:
00384     libraries_paths - The location of libraries to include when linking
00385     linker_script - the file that drives the linker to do it's job
00386     clean - Rebuild everything if True
00387     notify - Notify function for logs
00388     verbose - Write the actual tools command lines used if True
00389     name - the name of the project
00390     macros - additional macros
00391     inc_dirs - additional directories where include files may be found
00392     jobs - how many compilers we can run at once
00393     silent - suppress printing of progress indicators
00394     report - a dict where a result may be appended
00395     properties - UUUUHHHHH beats me
00396     project_id - the name put in the report
00397     project_description - the human-readable version of what this thing does
00398     extra_verbose - even more output!
00399     config - a Config object to use instead of creating one
00400     app_config - location of a chosen mbed_app.json file
00401     build_profile - a dict of flags that will be passed to the compiler
00402     """
00403 
00404     # Convert src_path to a list if needed
00405     if type(src_paths) != ListType:
00406         src_paths = [src_paths]
00407     # Extend src_paths wiht libraries_paths
00408     if libraries_paths is not None:
00409         src_paths.extend(libraries_paths)
00410         inc_dirs.extend(map(dirname, libraries_paths))
00411 
00412     # Build Directory
00413     if clean and exists(build_path):
00414         rmtree(build_path)
00415     mkdir(build_path)
00416 
00417     ###################################
00418     # mbed Classic/2.0/libary support #
00419 
00420     # Find build system profile
00421     profile = None
00422     targets_json = None
00423     for path in src_paths:
00424         profile = find_build_profile(path) or profile
00425         if profile:
00426             targets_json = join(dirname(abspath(__file__)), 'legacy_targets.json')
00427         else:
00428             targets_json = find_targets_json(path) or targets_json
00429 
00430     # Apply targets.json to active targets
00431     if targets_json:
00432         if verbose:
00433             print("Using targets from %s" % targets_json)
00434         set_targets_json_location(targets_json)
00435 
00436     # Apply profile to toolchains
00437     if profile:
00438         def init_hook(self):
00439             profile_data = get_toolchain_profile(self.name, profile)
00440             if not profile_data:
00441                 return
00442             if verbose:
00443                 self.info("Using toolchain %s profile %s" % (self.name, profile))
00444 
00445             for k,v in profile_data.items():
00446                 if self.flags.has_key(k):
00447                     self.flags[k] = v
00448                 else:
00449                     setattr(self, k, v)
00450 
00451         mbedToolchain.init = init_hook
00452 
00453     # mbed Classic/2.0/libary support #
00454     ###################################
00455 
00456     # Pass all params to the unified prepare_toolchain()
00457     toolchain = prepare_toolchain(
00458         src_paths, target, toolchain_name, macros=macros, clean=clean,
00459         jobs=jobs, notify=notify, silent=silent, verbose=verbose,
00460         extra_verbose=extra_verbose, config=config, app_config=app_config,
00461         build_profile=build_profile)
00462 
00463     # The first path will give the name to the library
00464     if name is None:
00465         name = basename(normpath(abspath(src_paths[0])))
00466     toolchain.info("Building project %s (%s, %s)" %
00467                    (name, toolchain.target.name, toolchain_name))
00468 
00469     # Initialize reporting
00470     if report != None:
00471         start = time()
00472         # If project_id is specified, use that over the default name
00473         id_name = project_id.upper() if project_id else name.upper()
00474         description = project_description if project_description else name
00475         vendor_label = toolchain.target.extra_labels[0]
00476         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00477         cur_result = create_result(toolchain.target.name, toolchain_name,
00478                                    id_name, description)
00479         if properties != None:
00480             prep_properties(properties, toolchain.target.name, toolchain_name,
00481                             vendor_label)
00482 
00483     try:
00484         # Call unified scan_resources
00485         resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
00486 
00487         # Change linker script if specified
00488         if linker_script is not None:
00489             resources.linker_script = linker_script
00490 
00491         # Compile Sources
00492         objects = toolchain.compile_sources(resources, build_path,
00493                                             resources.inc_dirs)
00494         resources.objects.extend(objects)
00495 
00496         # Link Program
00497         res, _ = toolchain.link_program(resources, build_path, name)
00498 
00499         memap_instance = getattr(toolchain, 'memap_instance', None)
00500         memap_table = ''
00501         if memap_instance:
00502             # Write output to stdout in text (pretty table) format
00503             memap_table = memap_instance.generate_output('table')
00504 
00505             if not silent:
00506                 print memap_table
00507 
00508             # Write output to file in JSON format
00509             map_out = join(build_path, name + "_map.json")
00510             memap_instance.generate_output('json', map_out)
00511 
00512             # Write output to file in CSV format for the CI
00513             map_csv = join(build_path, name + "_map.csv")
00514             memap_instance.generate_output('csv-ci', map_csv)
00515 
00516         resources.detect_duplicates(toolchain)
00517 
00518         if report != None:
00519             end = time()
00520             cur_result["elapsed_time"] = end - start
00521             cur_result["output"] = toolchain.get_output() + memap_table
00522             cur_result["result"] = "OK"
00523             cur_result["memory_usage"] = toolchain.map_outputs
00524 
00525             add_result_to_report(report, cur_result)
00526 
00527         return res
00528 
00529     except Exception as exc:
00530         if report != None:
00531             end = time()
00532 
00533             if isinstance(exc, NotSupportedException):
00534                 cur_result["result"] = "NOT_SUPPORTED"
00535             else:
00536                 cur_result["result"] = "FAIL"
00537 
00538             cur_result["elapsed_time"] = end - start
00539 
00540             toolchain_output = toolchain.get_output()
00541             if toolchain_output:
00542                 cur_result["output"] += toolchain_output
00543 
00544             add_result_to_report(report, cur_result)
00545 
00546         # Let Exception propagate
00547         raise
00548 
00549 def build_library (src_paths, build_path, target, toolchain_name,
00550                   dependencies_paths=None, name=None, clean=False,
00551                   archive=True, notify=None, verbose=False, macros=None,
00552                   inc_dirs=None, jobs=1, silent=False, report=None,
00553                   properties=None, extra_verbose=False, project_id=None,
00554                   remove_config_header_file=False, app_config=None,
00555                   build_profile=None):
00556     """ Build a library
00557 
00558     Positional arguments:
00559     src_paths - a path or list of paths that contain all files needed to build
00560                 the library
00561     build_path - the directory where all of the object files will be placed
00562     target - the MCU or board that the project will compile for
00563     toolchain_name - the name of the build tools
00564 
00565     Keyword arguments:
00566     dependencies_paths - The location of libraries to include when linking
00567     name - the name of the library
00568     clean - Rebuild everything if True
00569     archive - whether the library will create an archive file
00570     notify - Notify function for logs
00571     verbose - Write the actual tools command lines used if True
00572     macros - additional macros
00573     inc_dirs - additional directories where include files may be found
00574     jobs - how many compilers we can run at once
00575     silent - suppress printing of progress indicators
00576     report - a dict where a result may be appended
00577     properties - UUUUHHHHH beats me
00578     extra_verbose - even more output!
00579     project_id - the name that goes in the report
00580     remove_config_header_file - delete config header file when done building
00581     app_config - location of a chosen mbed_app.json file
00582     build_profile - a dict of flags that will be passed to the compiler
00583     """
00584 
00585     # Convert src_path to a list if needed
00586     if type(src_paths) != ListType:
00587         src_paths = [src_paths]
00588 
00589     # Build path
00590     if archive:
00591         # Use temp path when building archive
00592         tmp_path = join(build_path, '.temp')
00593         mkdir(tmp_path)
00594     else:
00595         tmp_path = build_path
00596 
00597     # Clean the build directory
00598     if clean and exists(tmp_path):
00599         rmtree(tmp_path)
00600     mkdir(tmp_path)
00601 
00602     # Pass all params to the unified prepare_toolchain()
00603     toolchain = prepare_toolchain(
00604         src_paths, target, toolchain_name, macros=macros, clean=clean,
00605         jobs=jobs, notify=notify, silent=silent, verbose=verbose,
00606         extra_verbose=extra_verbose, app_config=app_config,
00607         build_profile=build_profile)
00608 
00609     # The first path will give the name to the library
00610     if name is None:
00611         name = basename(normpath(abspath(src_paths[0])))
00612     toolchain.info("Building library %s (%s, %s)" %
00613                    (name, toolchain.target.name, toolchain_name))
00614 
00615     # Initialize reporting
00616     if report != None:
00617         start = time()
00618         # If project_id is specified, use that over the default name
00619         id_name = project_id.upper() if project_id else name.upper()
00620         description = name
00621         vendor_label = toolchain.target.extra_labels[0]
00622         prep_report(report, toolchain.target.name, toolchain_name, id_name)
00623         cur_result = create_result(toolchain.target.name, toolchain_name,
00624                                    id_name, description)
00625         if properties != None:
00626             prep_properties(properties, toolchain.target.name, toolchain_name,
00627                             vendor_label)
00628 
00629     for src_path in src_paths:
00630         if not exists(src_path):
00631             error_msg = "The library source folder does not exist: %s", src_path
00632             if report != None:
00633                 cur_result["output"] = error_msg
00634                 cur_result["result"] = "FAIL"
00635                 add_result_to_report(report, cur_result)
00636             raise Exception(error_msg)
00637 
00638     try:
00639         # Call unified scan_resources
00640         resources = scan_resources(src_paths, toolchain,
00641                                    dependencies_paths=dependencies_paths,
00642                                    inc_dirs=inc_dirs)
00643 
00644 
00645         # Copy headers, objects and static libraries - all files needed for
00646         # static lib
00647         toolchain.copy_files(resources.headers, build_path, resources=resources)
00648         toolchain.copy_files(resources.objects, build_path, resources=resources)
00649         toolchain.copy_files(resources.libraries, build_path,
00650                              resources=resources)
00651         toolchain.copy_files(resources.json_files, build_path,
00652                              resources=resources)
00653         if resources.linker_script:
00654             toolchain.copy_files(resources.linker_script, build_path,
00655                                  resources=resources)
00656 
00657         if resources.hex_files:
00658             toolchain.copy_files(resources.hex_files, build_path,
00659                                  resources=resources)
00660 
00661         # Compile Sources
00662         objects = toolchain.compile_sources(resources, abspath(tmp_path),
00663                                             resources.inc_dirs)
00664         resources.objects.extend(objects)
00665 
00666         if archive:
00667             toolchain.build_library(objects, build_path, name)
00668 
00669         if remove_config_header_file:
00670             config_header_path = toolchain.get_config_header()
00671             if config_header_path:
00672                 remove(config_header_path)
00673 
00674         if report != None:
00675             end = time()
00676             cur_result["elapsed_time"] = end - start
00677             cur_result["output"] = toolchain.get_output()
00678             cur_result["result"] = "OK"
00679 
00680 
00681             add_result_to_report(report, cur_result)
00682         return True
00683 
00684     except Exception as exc:
00685         if report != None:
00686             end = time()
00687 
00688             if isinstance(exc, ToolException):
00689                 cur_result["result"] = "FAIL"
00690             elif isinstance(exc, NotSupportedException):
00691                 cur_result["result"] = "NOT_SUPPORTED"
00692 
00693             cur_result["elapsed_time"] = end - start
00694 
00695             toolchain_output = toolchain.get_output()
00696             if toolchain_output:
00697                 cur_result["output"] += toolchain_output
00698 
00699             add_result_to_report(report, cur_result)
00700 
00701         # Let Exception propagate
00702         raise
00703 
00704 ######################
00705 ### Legacy methods ###
00706 ######################
00707 
00708 def build_lib(lib_id, target, toolchain_name, verbose=False,
00709               clean=False, macros=None, notify=None, jobs=1, silent=False,
00710               report=None, properties=None, extra_verbose=False,
00711               build_profile=None):
00712     """ Legacy method for building mbed libraries
00713 
00714     Positional arguments:
00715     lib_id - the library's unique identifier
00716     target - the MCU or board that the project will compile for
00717     toolchain_name - the name of the build tools
00718 
00719     Keyword arguments:
00720     clean - Rebuild everything if True
00721     verbose - Write the actual tools command lines used if True
00722     macros - additional macros
00723     notify - Notify function for logs
00724     jobs - how many compilers we can run at once
00725     silent - suppress printing of progress indicators
00726     report - a dict where a result may be appended
00727     properties - UUUUHHHHH beats me
00728     extra_verbose - even more output!
00729     build_profile - a dict of flags that will be passed to the compiler
00730     """
00731     lib = Library(lib_id)
00732     if not lib.is_supported(target, toolchain_name):
00733         print('Library "%s" is not yet supported on target %s with toolchain %s'
00734               % (lib_id, target.name, toolchain_name))
00735         return False
00736 
00737     # We need to combine macros from parameter list with macros from library
00738     # definition
00739     lib_macros = lib.macros if lib.macros else []
00740     if macros:
00741         macros.extend(lib_macros)
00742     else:
00743         macros = lib_macros
00744 
00745     src_paths = lib.source_dir
00746     build_path = lib.build_dir
00747     dependencies_paths = lib.dependencies
00748     inc_dirs = lib.inc_dirs
00749     inc_dirs_ext = lib.inc_dirs_ext
00750 
00751     if type(src_paths) != ListType:
00752         src_paths = [src_paths]
00753 
00754     # The first path will give the name to the library
00755     name = basename(src_paths[0])
00756 
00757     if report != None:
00758         start = time()
00759         id_name = name.upper()
00760         description = name
00761         vendor_label = target.extra_labels[0]
00762         cur_result = None
00763         prep_report(report, target.name, toolchain_name, id_name)
00764         cur_result = create_result(target.name, toolchain_name, id_name,
00765                                    description)
00766 
00767         if properties != None:
00768             prep_properties(properties, target.name, toolchain_name,
00769                             vendor_label)
00770 
00771     for src_path in src_paths:
00772         if not exists(src_path):
00773             error_msg = "The library source folder does not exist: %s", src_path
00774 
00775             if report != None:
00776                 cur_result["output"] = error_msg
00777                 cur_result["result"] = "FAIL"
00778                 add_result_to_report(report, cur_result)
00779 
00780             raise Exception(error_msg)
00781 
00782     try:
00783         # Toolchain instance
00784         toolchain = TOOLCHAIN_CLASSES[toolchain_name](
00785             target, macros=macros, notify=notify, silent=silent,
00786             extra_verbose=extra_verbose, build_profile=build_profile)
00787         toolchain.VERBOSE = verbose
00788         toolchain.jobs = jobs
00789         toolchain.build_all = clean
00790 
00791         toolchain.info("Building library %s (%s, %s)" %
00792                        (name.upper(), target.name, toolchain_name))
00793 
00794         # Take into account the library configuration (MBED_CONFIG_FILE)
00795         config = Config(target)
00796         toolchain.config = config
00797         config.add_config_files([MBED_CONFIG_FILE])
00798 
00799         # Scan Resources
00800         resources = []
00801         for src_path in src_paths:
00802             resources.append(toolchain.scan_resources(src_path))
00803 
00804         # Add extra include directories / files which are required by library
00805         # This files usually are not in the same directory as source files so
00806         # previous scan will not include them
00807         if inc_dirs_ext is not None:
00808             for inc_ext in inc_dirs_ext:
00809                 resources.append(toolchain.scan_resources(inc_ext))
00810 
00811         # Dependencies Include Paths
00812         dependencies_include_dir = []
00813         if dependencies_paths is not None:
00814             for path in dependencies_paths:
00815                 lib_resources = toolchain.scan_resources(path)
00816                 dependencies_include_dir.extend(lib_resources.inc_dirs)
00817                 dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
00818 
00819         if inc_dirs:
00820             dependencies_include_dir.extend(inc_dirs)
00821 
00822         # Add other discovered configuration data to the configuration object
00823         for res in resources:
00824             config.load_resources(res)
00825         toolchain.set_config_data(toolchain.config.get_config_data())
00826 
00827         # Create the desired build directory structure
00828         bin_path = join(build_path, toolchain.obj_path)
00829         mkdir(bin_path)
00830         tmp_path = join(build_path, '.temp', toolchain.obj_path)
00831         mkdir(tmp_path)
00832 
00833         # Copy Headers
00834         for resource in resources:
00835             toolchain.copy_files(resource.headers, build_path,
00836                                  resources=resource)
00837 
00838         dependencies_include_dir.extend(
00839             toolchain.scan_resources(build_path).inc_dirs)
00840 
00841         # Compile Sources
00842         objects = []
00843         for resource in resources:
00844             objects.extend(toolchain.compile_sources(resource, tmp_path,
00845                                                      dependencies_include_dir))
00846 
00847         needed_update = toolchain.build_library(objects, bin_path, name)
00848 
00849         if report != None and needed_update:
00850             end = time()
00851             cur_result["elapsed_time"] = end - start
00852             cur_result["output"] = toolchain.get_output()
00853             cur_result["result"] = "OK"
00854 
00855             add_result_to_report(report, cur_result)
00856         return True
00857 
00858     except Exception:
00859         if report != None:
00860             end = time()
00861             cur_result["result"] = "FAIL"
00862             cur_result["elapsed_time"] = end - start
00863 
00864             toolchain_output = toolchain.get_output()
00865             if toolchain_output:
00866                 cur_result["output"] += toolchain_output
00867 
00868             add_result_to_report(report, cur_result)
00869 
00870         # Let Exception propagate
00871         raise
00872 
00873 # We do have unique legacy conventions about how we build and package the mbed
00874 # library
00875 def build_mbed_libs (target, toolchain_name, verbose=False,
00876                     clean=False, macros=None, notify=None, jobs=1, silent=False,
00877                     report=None, properties=None, extra_verbose=False,
00878                     build_profile=None):
00879     """ Function returns True is library was built and false if building was
00880     skipped
00881 
00882     Positional arguments:
00883     target - the MCU or board that the project will compile for
00884     toolchain_name - the name of the build tools
00885 
00886     Keyword arguments:
00887     verbose - Write the actual tools command lines used if True
00888     clean - Rebuild everything if True
00889     macros - additional macros
00890     notify - Notify function for logs
00891     jobs - how many compilers we can run at once
00892     silent - suppress printing of progress indicators
00893     report - a dict where a result may be appended
00894     properties - UUUUHHHHH beats me
00895     extra_verbose - even more output!
00896     build_profile - a dict of flags that will be passed to the compiler
00897     """
00898 
00899     if report != None:
00900         start = time()
00901         id_name = "MBED"
00902         description = "mbed SDK"
00903         vendor_label = target.extra_labels[0]
00904         cur_result = None
00905         prep_report(report, target.name, toolchain_name, id_name)
00906         cur_result = create_result(target.name, toolchain_name, id_name,
00907                                    description)
00908 
00909         if properties != None:
00910             prep_properties(properties, target.name, toolchain_name,
00911                             vendor_label)
00912 
00913     # Check toolchain support
00914     if toolchain_name not in target.supported_toolchains:
00915         supported_toolchains_text = ", ".join(target.supported_toolchains)
00916         print('%s target is not yet supported by toolchain %s' %
00917               (target.name, toolchain_name))
00918         print('%s target supports %s toolchain%s' %
00919               (target.name, supported_toolchains_text, 's'
00920                if len(target.supported_toolchains) > 1 else ''))
00921 
00922         if report != None:
00923             cur_result["result"] = "SKIP"
00924             add_result_to_report(report, cur_result)
00925 
00926         return False
00927 
00928     try:
00929         # Toolchain
00930         toolchain = TOOLCHAIN_CLASSES[toolchain_name](
00931             target, macros=macros, notify=notify, silent=silent,
00932             extra_verbose=extra_verbose, build_profile=build_profile)
00933         toolchain.VERBOSE = verbose
00934         toolchain.jobs = jobs
00935         toolchain.build_all = clean
00936 
00937         # Take into account the library configuration (MBED_CONFIG_FILE)
00938         config = Config(target)
00939         toolchain.config = config
00940         config.add_config_files([MBED_CONFIG_FILE])
00941         toolchain.set_config_data(toolchain.config.get_config_data())
00942 
00943         # Source and Build Paths
00944         build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
00945         build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
00946         mkdir(build_toolchain)
00947 
00948         tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
00949         mkdir(tmp_path)
00950 
00951         # CMSIS
00952         toolchain.info("Building library %s (%s, %s)" %
00953                        ('CMSIS', target.name, toolchain_name))
00954         cmsis_src = MBED_CMSIS_PATH
00955         resources = toolchain.scan_resources(cmsis_src)
00956 
00957         toolchain.copy_files(resources.headers, build_target)
00958         toolchain.copy_files(resources.linker_script, build_toolchain)
00959         toolchain.copy_files(resources.bin_files, build_toolchain)
00960 
00961         objects = toolchain.compile_sources(resources, tmp_path)
00962         toolchain.copy_files(objects, build_toolchain)
00963 
00964         # mbed
00965         toolchain.info("Building library %s (%s, %s)" %
00966                        ('MBED', target.name, toolchain_name))
00967 
00968         # Common Headers
00969         toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
00970         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
00971 
00972         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
00973                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
00974                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
00975             resources = toolchain.scan_resources(dir)
00976             toolchain.copy_files(resources.headers, dest)
00977             library_incdirs.append(dest)
00978 
00979         # Target specific sources
00980         hal_src = MBED_TARGETS_PATH
00981         hal_implementation = toolchain.scan_resources(hal_src)
00982         toolchain.copy_files(hal_implementation.headers +
00983                              hal_implementation.hex_files +
00984                              hal_implementation.libraries +
00985                              [MBED_CONFIG_FILE],
00986                              build_target, resources=hal_implementation)
00987         toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
00988         toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
00989         incdirs = toolchain.scan_resources(build_target).inc_dirs
00990         objects = toolchain.compile_sources(hal_implementation, tmp_path,
00991                                             library_incdirs + incdirs)
00992         toolchain.copy_files(objects, build_toolchain)
00993 
00994         # Common Sources
00995         mbed_resources = None
00996         for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
00997             mbed_resources += toolchain.scan_resources(dir)
00998 
00999         objects = toolchain.compile_sources(mbed_resources, tmp_path,
01000                                             library_incdirs + incdirs)
01001 
01002         # A number of compiled files need to be copied as objects as opposed to
01003         # way the linker search for symbols in archives. These are:
01004         #   - retarget.o: to make sure that the C standard lib symbols get
01005         #                 overridden
01006         #   - board.o: mbed_die is weak
01007         #   - mbed_overrides.o: this contains platform overrides of various
01008         #                       weak SDK functions
01009         separate_names, separate_objects = ['retarget.o', 'board.o',
01010                                             'mbed_overrides.o'], []
01011 
01012         for obj in objects:
01013             for name in separate_names:
01014                 if obj.endswith(name):
01015                     separate_objects.append(obj)
01016 
01017         for obj in separate_objects:
01018             objects.remove(obj)
01019 
01020         toolchain.build_library(objects, build_toolchain, "mbed")
01021 
01022         for obj in separate_objects:
01023             toolchain.copy_files(obj, build_toolchain)
01024 
01025         if report != None:
01026             end = time()
01027             cur_result["elapsed_time"] = end - start
01028             cur_result["output"] = toolchain.get_output()
01029             cur_result["result"] = "OK"
01030 
01031             add_result_to_report(report, cur_result)
01032 
01033         return True
01034 
01035     except Exception as exc:
01036         if report != None:
01037             end = time()
01038             cur_result["result"] = "FAIL"
01039             cur_result["elapsed_time"] = end - start
01040 
01041             toolchain_output = toolchain.get_output()
01042             if toolchain_output:
01043                 cur_result["output"] += toolchain_output
01044 
01045             cur_result["output"] += str(exc)
01046 
01047             add_result_to_report(report, cur_result)
01048 
01049         # Let Exception propagate
01050         raise
01051 
01052 
01053 def get_unique_supported_toolchains (release_targets=None):
01054     """ Get list of all unique toolchains supported by targets
01055 
01056     Keyword arguments:
01057     release_targets - tuple structure returned from get_mbed_official_release().
01058                       If release_targets is not specified, then it queries all
01059                       known targets
01060     """
01061     unique_supported_toolchains = []
01062 
01063     if not release_targets:
01064         for target in TARGET_NAMES:
01065             for toolchain in TARGET_MAP[target].supported_toolchains:
01066                 if toolchain not in unique_supported_toolchains:
01067                     unique_supported_toolchains.append(toolchain)
01068     else:
01069         for target in release_targets:
01070             for toolchain in target[1]:
01071                 if toolchain not in unique_supported_toolchains:
01072                     unique_supported_toolchains.append(toolchain)
01073 
01074     return unique_supported_toolchains
01075 
01076 
01077 def mcu_toolchain_matrix (verbose_html=False, platform_filter=None,
01078                          release_version='5'):
01079     """  Shows target map using prettytable
01080 
01081     Keyword arguments:
01082     verbose_html - emit html instead of a simple table
01083     platform_filter - remove results that match the string
01084     release_version - get the matrix for this major version number
01085     """
01086     # Only use it in this function so building works without extra modules
01087     from prettytable import PrettyTable
01088 
01089     if isinstance(release_version, basestring):
01090         # Force release_version to lowercase if it is a string
01091         release_version = release_version.lower()
01092     else:
01093         # Otherwise default to printing all known targets and toolchains
01094         release_version = 'all'
01095 
01096 
01097     version_release_targets = {}
01098     version_release_target_names = {}
01099 
01100     for version in RELEASE_VERSIONS:
01101         version_release_targets[version] = get_mbed_official_release(version)
01102         version_release_target_names[version] = [x[0] for x in
01103                                                  version_release_targets[
01104                                                      version]]
01105 
01106     if release_version in RELEASE_VERSIONS:
01107         release_targets = version_release_targets[release_version]
01108     else:
01109         release_targets = None
01110 
01111     unique_supported_toolchains = get_unique_supported_toolchains(
01112         release_targets)
01113     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
01114 
01115     # All tests status table print
01116     columns = prepend_columns + unique_supported_toolchains
01117     table_printer = PrettyTable(columns)
01118     # Align table
01119     for col in columns:
01120         table_printer.align[col] = "c"
01121     table_printer.align["Target"] = "l"
01122 
01123     perm_counter = 0
01124     target_counter = 0
01125 
01126     target_names = []
01127 
01128     if release_targets:
01129         target_names = [x[0] for x in release_targets]
01130     else:
01131         target_names = TARGET_NAMES
01132 
01133     for target in sorted(target_names):
01134         if platform_filter is not None:
01135             # FIlter out platforms using regex
01136             if re.search(platform_filter, target) is None:
01137                 continue
01138         target_counter += 1
01139 
01140         row = [target]  # First column is platform name
01141 
01142         for version in RELEASE_VERSIONS:
01143             if target in version_release_target_names[version]:
01144                 text = "Supported"
01145             else:
01146                 text = "-"
01147             row.append(text)
01148 
01149         for unique_toolchain in unique_supported_toolchains:
01150             if unique_toolchain in TARGET_MAP[target].supported_toolchains:
01151                 text = "Supported"
01152                 perm_counter += 1
01153             else:
01154                 text = "-"
01155 
01156             row.append(text)
01157         table_printer.add_row(row)
01158 
01159     result = table_printer.get_html_string() if verbose_html \
01160              else table_printer.get_string()
01161     result += "\n"
01162     result += "Supported targets: %d\n"% (target_counter)
01163     if target_counter == 1:
01164         result += "Supported toolchains: %d"% (perm_counter)
01165     return result
01166 
01167 
01168 def get_target_supported_toolchains (target):
01169     """ Returns target supported toolchains list
01170 
01171     Positional arguments:
01172     target - the target to get the supported toolchains of
01173     """
01174     return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
01175         else None
01176 
01177 
01178 def static_analysis_scan (target, toolchain_name, cppcheck_cmd,
01179                          cppcheck_msg_format, verbose=False,
01180                          clean=False, macros=None, notify=None, jobs=1,
01181                          extra_verbose=False, build_profile=None):
01182     """Perform static analysis on a target and toolchain combination
01183 
01184     Positional arguments:
01185     target - the target to fake the build for
01186     toolchain_name - pretend you would compile with this toolchain
01187     cppcheck_cmd - the command used to do static analysis
01188     cppcheck_msg_format - the format of the check messages
01189 
01190     Keyword arguments:
01191     verbose - more printing!
01192     clean - start from a clean slate
01193     macros - extra macros to compile with
01194     notify - the notification event handling function
01195     jobs - number of commands to run at once
01196     extra_verbose - even moar printing
01197     build_profile - a dict of flags that will be passed to the compiler
01198     """
01199     # Toolchain
01200     toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
01201                                                   notify=notify,
01202                                                   extra_verbose=extra_verbose,
01203                                                   build_profile=build_profile)
01204     toolchain.VERBOSE = verbose
01205     toolchain.jobs = jobs
01206     toolchain.build_all = clean
01207 
01208     # Source and Build Paths
01209     build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
01210     build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
01211     mkdir(build_toolchain)
01212 
01213     tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
01214     mkdir(tmp_path)
01215 
01216     # CMSIS
01217     toolchain.info("Static analysis for %s (%s, %s)" %
01218                    ('CMSIS', target.name, toolchain_name))
01219     cmsis_src = MBED_CMSIS_PATH
01220     resources = toolchain.scan_resources(cmsis_src)
01221 
01222     # Copy files before analysis
01223     toolchain.copy_files(resources.headers, build_target)
01224     toolchain.copy_files(resources.linker_script, build_toolchain)
01225 
01226     # Gather include paths, c, cpp sources and macros to transfer to cppcheck
01227     # command line
01228     includes = ["-I%s"% i for i in resources.inc_dirs]
01229     includes.append("-I%s"% str(build_target))
01230     c_sources = " ".join(resources.c_sources)
01231     cpp_sources = " ".join(resources.cpp_sources)
01232     macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
01233 
01234     includes = [inc.strip() for inc in includes]
01235     macros = [mac.strip() for mac in macros]
01236 
01237     check_cmd = cppcheck_cmd
01238     check_cmd += cppcheck_msg_format
01239     check_cmd += includes
01240     check_cmd += macros
01241 
01242     # We need to pass some params via file to avoid "command line too long in
01243     # some OSs"
01244     tmp_file = tempfile.NamedTemporaryFile(delete=False)
01245     tmp_file.writelines(line + '\n' for line in c_sources.split())
01246     tmp_file.writelines(line + '\n' for line in cpp_sources.split())
01247     tmp_file.close()
01248     check_cmd += ["--file-list=%s"% tmp_file.name]
01249 
01250     _stdout, _stderr, _ = run_cmd(check_cmd)
01251     if verbose:
01252         print _stdout
01253     print _stderr
01254 
01255     # =========================================================================
01256 
01257     # MBED
01258     toolchain.info("Static analysis for %s (%s, %s)" %
01259                    ('MBED', target.name, toolchain_name))
01260 
01261     # Common Headers
01262     toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
01263     toolchain.copy_files(toolchain.scan_resources(MBED_DRIVERS).headers,
01264                          MBED_LIBRARIES)
01265     toolchain.copy_files(toolchain.scan_resources(MBED_PLATFORM).headers,
01266                          MBED_LIBRARIES)
01267     toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
01268                          MBED_LIBRARIES)
01269 
01270     # Target specific sources
01271     hal_src = join(MBED_TARGETS_PATH, "hal")
01272     hal_implementation = toolchain.scan_resources(hal_src)
01273 
01274     # Copy files before analysis
01275     toolchain.copy_files(hal_implementation.headers +
01276                          hal_implementation.hex_files, build_target,
01277                          resources=hal_implementation)
01278     incdirs = toolchain.scan_resources(build_target)
01279 
01280     target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
01281     target_includes.append("-I%s"% str(build_target))
01282     target_includes.append("-I%s"% str(hal_src))
01283     target_c_sources = " ".join(incdirs.c_sources)
01284     target_cpp_sources = " ".join(incdirs.cpp_sources)
01285     target_macros = ["-D%s"% s for s in
01286                      toolchain.get_symbols() + toolchain.macros]
01287 
01288     # Common Sources
01289     mbed_resources = toolchain.scan_resources(MBED_COMMON)
01290 
01291     # Gather include paths, c, cpp sources and macros to transfer to cppcheck
01292     # command line
01293     mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
01294     mbed_includes.append("-I%s"% str(build_target))
01295     mbed_includes.append("-I%s"% str(MBED_DRIVERS))
01296     mbed_includes.append("-I%s"% str(MBED_PLATFORM))
01297     mbed_includes.append("-I%s"% str(MBED_HAL))
01298     mbed_c_sources = " ".join(mbed_resources.c_sources)
01299     mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
01300 
01301     target_includes = [inc.strip() for inc in target_includes]
01302     mbed_includes = [inc.strip() for inc in mbed_includes]
01303     target_macros = [mac.strip() for mac in target_macros]
01304 
01305     check_cmd = cppcheck_cmd
01306     check_cmd += cppcheck_msg_format
01307     check_cmd += target_includes
01308     check_cmd += mbed_includes
01309     check_cmd += target_macros
01310 
01311     # We need to pass some parames via file to avoid "command line too long in
01312     # some OSs"
01313     tmp_file = tempfile.NamedTemporaryFile(delete=False)
01314     tmp_file.writelines(line + '\n' for line in target_c_sources.split())
01315     tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
01316     tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
01317     tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
01318     tmp_file.close()
01319     check_cmd += ["--file-list=%s"% tmp_file.name]
01320 
01321     _stdout, _stderr, _ = run_cmd_ext(check_cmd)
01322     if verbose:
01323         print _stdout
01324     print _stderr
01325 
01326 
01327 def static_analysis_scan_lib (lib_id, target, toolchain, cppcheck_cmd,
01328                              cppcheck_msg_format, verbose=False,
01329                              clean=False, macros=None, notify=None, jobs=1,
01330                              extra_verbose=False, build_profile=None):
01331     """Perform static analysis on a library as if it were to be compiled for a
01332     particular target and toolchain combination
01333     """
01334     lib = Library(lib_id)
01335     if lib.is_supported(target, toolchain):
01336         static_analysis_scan_library(
01337             lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd,
01338             cppcheck_msg_format, lib.dependencies, verbose=verbose,
01339             clean=clean, macros=macros, notify=notify, jobs=jobs,
01340             extra_verbose=extra_verbose, build_profile=build_profile)
01341     else:
01342         print('Library "%s" is not yet supported on target %s with toolchain %s'
01343               % (lib_id, target.name, toolchain))
01344 
01345 
01346 def static_analysis_scan_library (src_paths, build_path, target, toolchain_name,
01347                                  cppcheck_cmd, cppcheck_msg_format,
01348                                  dependencies_paths=None,
01349                                  name=None, clean=False, notify=None,
01350                                  verbose=False, macros=None, jobs=1,
01351                                  extra_verbose=False, build_profile=None):
01352     """ Function scans library for statically detectable defects
01353 
01354     Positional arguments:
01355     src_paths - the list of library paths to scan
01356     build_path - the location directory of result files
01357     target - the target to fake the build for
01358     toolchain_name - pretend you would compile with this toolchain
01359     cppcheck_cmd - the command used to do static analysis
01360     cppcheck_msg_format - the format of the check messages
01361 
01362     Keyword arguments:
01363     dependencies_paths - the paths to sources that this library depends on
01364     name - the name of this library
01365     clean - start from a clean slate
01366     notify - the notification event handling function
01367     verbose - more printing!
01368     macros - extra macros to compile with
01369     jobs - number of commands to run at once
01370     extra_verbose - even moar printing
01371     build_profile - a dict of flags that will be passed to the compiler
01372     """
01373     if type(src_paths) != ListType:
01374         src_paths = [src_paths]
01375 
01376     for src_path in src_paths:
01377         if not exists(src_path):
01378             raise Exception("The library source folder does not exist: %s",
01379                             src_path)
01380 
01381     # Toolchain instance
01382     toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, macros=macros,
01383                                                   notify=notify,
01384                                                   extra_verbose=extra_verbose,
01385                                                   build_profile=build_profile)
01386     toolchain.VERBOSE = verbose
01387     toolchain.jobs = jobs
01388 
01389     # The first path will give the name to the library
01390     name = basename(src_paths[0])
01391     toolchain.info("Static analysis for library %s (%s, %s)" %
01392                    (name.upper(), target.name, toolchain_name))
01393 
01394     # Scan Resources
01395     resources = []
01396     for src_path in src_paths:
01397         resources.append(toolchain.scan_resources(src_path))
01398 
01399     # Dependencies Include Paths
01400     dependencies_include_dir = []
01401     if dependencies_paths is not None:
01402         for path in dependencies_paths:
01403             lib_resources = toolchain.scan_resources(path)
01404             dependencies_include_dir.extend(lib_resources.inc_dirs)
01405 
01406     # Create the desired build directory structure
01407     bin_path = join(build_path, toolchain.obj_path)
01408     mkdir(bin_path)
01409     tmp_path = join(build_path, '.temp', toolchain.obj_path)
01410     mkdir(tmp_path)
01411 
01412     # Gather include paths, c, cpp sources and macros to transfer to cppcheck
01413     # command line
01414     includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
01415     c_sources = " "
01416     cpp_sources = " "
01417     macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
01418 
01419     # Copy Headers
01420     for resource in resources:
01421         toolchain.copy_files(resource.headers, build_path, resources=resource)
01422         includes += ["-I%s" % i for i in resource.inc_dirs]
01423         c_sources += " ".join(resource.c_sources) + " "
01424         cpp_sources += " ".join(resource.cpp_sources) + " "
01425 
01426     dependencies_include_dir.extend(
01427         toolchain.scan_resources(build_path).inc_dirs)
01428 
01429     includes = [inc.strip() for inc in includes]
01430     macros = [mac.strip() for mac in macros]
01431 
01432     check_cmd = cppcheck_cmd
01433     check_cmd += cppcheck_msg_format
01434     check_cmd += includes
01435     check_cmd += macros
01436 
01437     # We need to pass some parameters via file to avoid "command line too long
01438     # in some OSs". A temporary file is created to store e.g. cppcheck list of
01439     # files for command line
01440     tmp_file = tempfile.NamedTemporaryFile(delete=False)
01441     tmp_file.writelines(line + '\n' for line in c_sources.split())
01442     tmp_file.writelines(line + '\n' for line in cpp_sources.split())
01443     tmp_file.close()
01444     check_cmd += ["--file-list=%s"% tmp_file.name]
01445 
01446     # This will allow us to grab result from both stdio and stderr outputs (so
01447     # we can show them) We assume static code analysis tool is outputting
01448     # defects on STDERR
01449     _stdout, _stderr, _ = run_cmd_ext(check_cmd)
01450     if verbose:
01451         print _stdout
01452     print _stderr
01453 
01454 
01455 def print_build_results (result_list, build_name):
01456     """ Generate result string for build results
01457 
01458     Positional arguments:
01459     result_list - the list of results to print
01460     build_name - the name of the build we are printing result for
01461     """
01462     result = ""
01463     if len(result_list) > 0:
01464         result += build_name + "\n"
01465         result += "\n".join(["  * %s" % f for f in result_list])
01466         result += "\n"
01467     return result
01468 
01469 def print_build_memory_usage (report):
01470     """ Generate result table with memory usage values for build results
01471     Aggregates (puts together) reports obtained from self.get_memory_summary()
01472 
01473     Positional arguments:
01474     report - Report generated during build procedure.
01475     """
01476     from prettytable import PrettyTable
01477     columns_text = ['name', 'target', 'toolchain']
01478     columns_int = ['static_ram', 'stack', 'heap', 'total_ram', 'total_flash']
01479     table = PrettyTable(columns_text + columns_int)
01480 
01481     for col in columns_text:
01482         table.align[col] = 'l'
01483 
01484     for col in columns_int:
01485         table.align[col] = 'r'
01486 
01487     for target in report:
01488         for toolchain in report[target]:
01489             for name in report[target][toolchain]:
01490                 for dlist in report[target][toolchain][name]:
01491                     for dlistelem in dlist:
01492                         # Get 'memory_usage' record and build table with
01493                         # statistics
01494                         record = dlist[dlistelem]
01495                         if 'memory_usage' in record and record['memory_usage']:
01496                             # Note that summary should be in the last record of
01497                             # 'memory_usage' section. This is why we are
01498                             # grabbing last "[-1]" record.
01499                             row = [
01500                                 record['description'],
01501                                 record['target_name'],
01502                                 record['toolchain_name'],
01503                                 record['memory_usage'][-1]['summary'][
01504                                     'static_ram'],
01505                                 record['memory_usage'][-1]['summary']['stack'],
01506                                 record['memory_usage'][-1]['summary']['heap'],
01507                                 record['memory_usage'][-1]['summary'][
01508                                     'total_ram'],
01509                                 record['memory_usage'][-1]['summary'][
01510                                     'total_flash'],
01511                             ]
01512                             table.add_row(row)
01513 
01514     result = "Memory map breakdown for built projects (values in Bytes):\n"
01515     result += table.get_string(sortby='name')
01516     return result
01517 
01518 def write_build_report (build_report, template_filename, filename):
01519     """Write a build report to disk using a template file
01520 
01521     Positional arguments:
01522     build_report - a report generated by the build system
01523     template_filename - a file that contains the template for the style of build
01524                         report
01525     filename - the location on disk to write the file to
01526     """
01527     build_report_failing = []
01528     build_report_passing = []
01529 
01530     for report in build_report:
01531         if len(report["failing"]) > 0:
01532             build_report_failing.append(report)
01533         else:
01534             build_report_passing.append(report)
01535 
01536     env = Environment(extensions=['jinja2.ext.with_'])
01537     env.loader = FileSystemLoader('ci_templates')
01538     template = env.get_template(template_filename)
01539 
01540     with open(filename, 'w+') as placeholder:
01541         placeholder.write(template.render(
01542             failing_builds=build_report_failing,
01543             passing_builds=build_report_passing))