Clone of official tools

Revision:
29:1210849dba19
Parent:
27:5461402c33f8
Child:
30:f12ce67666d0
--- a/build_api.py	Mon Aug 29 10:55:42 2016 +0100
+++ b/build_api.py	Mon Aug 29 11:18:36 2016 +0100
@@ -17,18 +17,16 @@
 
 import re
 import tempfile
-import colorama
-
-from copy import copy
 from types import ListType
 from shutil import rmtree
-from os.path import join, exists, basename, abspath, normpath
-from os import getcwd, walk, linesep
+from os.path import join, exists, basename, abspath, normpath, dirname
+from os import linesep, remove
 from time import time
-import fnmatch
 
-from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException, InvalidReleaseTargetException
-from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
+from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\
+    ToolException, InvalidReleaseTargetException
+from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL,\
+    MBED_COMMON, MBED_CONFIG_FILE
 from tools.targets import TARGET_NAMES, TARGET_MAP, set_targets_json_location
 from tools.libraries import Library
 from tools.toolchains import TOOLCHAIN_CLASSES, mbedToolchain
@@ -40,7 +38,14 @@
 RELEASE_VERSIONS = ['2', '5']
 
 def prep_report(report, target_name, toolchain_name, id_name):
-    # Setup report keys
+    """Setup report keys
+
+    Positional arguments:
+    report - the report to fill
+    target_name - the target being used
+    toolchain_name - the toolchain being used
+    id_name - the name of the executable or library being built
+    """
     if not target_name in report:
         report[target_name] = {}
 
@@ -51,7 +56,14 @@
         report[target_name][toolchain_name][id_name] = []
 
 def prep_properties(properties, target_name, toolchain_name, vendor_label):
-    # Setup test properties
+    """Setup test properties
+
+    Positional arguments:
+    properties - the dict to fill
+    target_name - the target the test is targeting
+    toolchain_name - the toolchain that will compile the test
+    vendor_label - the vendor
+    """
     if not target_name in properties:
         properties[target_name] = {}
 
@@ -63,6 +75,14 @@
     properties[target_name][toolchain_name]["toolchain"] = toolchain_name
 
 def create_result(target_name, toolchain_name, id_name, description):
+    """Create a result dictionary
+
+    Positional arguments:
+    target_name - the target being built for
+    toolchain_name - the toolchain doing the building
+    id_name - the name of the executable or library being built
+    description - a human readable description of what's going on
+    """
     cur_result = {}
     cur_result["target_name"] = target_name
     cur_result["toolchain_name"] = toolchain_name
@@ -74,13 +94,26 @@
     return cur_result
 
 def add_result_to_report(report, result):
+    """Add a single result to a report dictionary
+
+    Positional arguments:
+    report - the report to append to
+    result - the result to append
+    """
     target = result["target_name"]
     toolchain = result["toolchain_name"]
     id_name = result['id']
-    result_wrap = { 0: result }
+    result_wrap = {0: result}
     report[target][toolchain][id_name].append(result_wrap)
 
 def get_config(src_paths, target, toolchain_name):
+    """Get the configuration object for a target-toolchain combination
+
+    Positional arguments:
+    src_paths - paths to scan for the configuration files
+    target - the device we are building for
+    toolchain_name - the string that identifies the build tools
+    """
     # Convert src_paths to a list if needed
     if type(src_paths) != ListType:
         src_paths = [src_paths]
@@ -120,27 +153,33 @@
     given version. Return False, 'reason' if a target is not part of the
     official release for the given version.
 
-    target_name: Name if the target (ex. 'K64F')
-    version: The release version string. Should be a string contained within RELEASE_VERSIONS
+    Positional arguments:
+    target_name - Name if the target (ex. 'K64F')
+    version - The release version string. Should be a string contained within
+              RELEASE_VERSIONS
     """
-    
+
     result = True
     reason = None
     target = TARGET_MAP[target_name]
-    
-    if hasattr(target, 'release_versions') and version in target.release_versions:
+
+    if hasattr(target, 'release_versions') \
+       and version in target.release_versions:
         if version == '2':
             # For version 2, either ARM or uARM toolchain support is required
             required_toolchains = set(['ARM', 'uARM'])
-            
-            if not len(required_toolchains.intersection(set(target.supported_toolchains))) > 0:
-                result = False           
+
+            if not len(required_toolchains.intersection(
+                    set(target.supported_toolchains))) > 0:
+                result = False
                 reason = ("Target '%s' must support " % target.name) + \
-                    ("one of the folowing toolchains to be included in the mbed 2.0 ") + \
-                    (("official release: %s" + linesep) % ", ".join(required_toolchains)) + \
+                    ("one of the folowing toolchains to be included in the") + \
+                    ((" mbed 2.0 official release: %s" + linesep) %
+                     ", ".join(required_toolchains)) + \
                     ("Currently it is only configured to support the ") + \
-                    ("following toolchains: %s" % ", ".join(target.supported_toolchains))
-                    
+                    ("following toolchains: %s" %
+                     ", ".join(target.supported_toolchains))
+
         elif version == '5':
             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
             required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
@@ -149,45 +188,52 @@
             supported_toolchains = set(target.supported_toolchains)
             supported_toolchains_sorted = list(supported_toolchains)
             supported_toolchains_sorted.sort()
-            
+
             if not required_toolchains.issubset(supported_toolchains):
                 result = False
                 reason = ("Target '%s' must support " % target.name) + \
-                    ("ALL of the folowing toolchains to be included in the mbed OS 5.0 ") + \
-                    (("official release: %s" + linesep) % ", ".join(required_toolchains_sorted)) + \
+                    ("ALL of the folowing toolchains to be included in the") + \
+                    ((" mbed OS 5.0 official release: %s" + linesep) %
+                     ", ".join(required_toolchains_sorted)) + \
                     ("Currently it is only configured to support the ") + \
-                    ("following toolchains: %s" % ", ".join(supported_toolchains_sorted))
+                    ("following toolchains: %s" %
+                     ", ".join(supported_toolchains_sorted))
 
-            elif not target.default_build == 'standard':
+            elif not target.default_lib == 'std':
                 result = False
-                reason = ("Target '%s' must set the 'default_build' " % target.name) + \
-                    ("to 'standard' to be included in the mbed OS 5.0 ") + \
-                    ("official release." + linesep) + \
-                    ("Currently it is set to '%s'" % target.default_build)
+                reason = ("Target '%s' must set the " % target.name) + \
+                    ("'default_lib' to 'std' to be included in the ") + \
+                    ("mbed OS 5.0 official release." + linesep) + \
+                    ("Currently it is set to '%s'" % target.default_lib)
 
         else:
             result = False
-            reason = ("Target '%s' has set an invalid release version of '%s'" % version) + \
-                ("Please choose from the following release versions: %s" + ', '.join(RELEASE_VERSIONS))
+            reason = ("Target '%s' has set an invalid release version of '%s'" %
+                      version) + \
+                ("Please choose from the following release versions: %s" %
+                 ', '.join(RELEASE_VERSIONS))
 
     else:
         result = False
         if not hasattr(target, 'release_versions'):
-            reason = "Target '%s' does not have the 'release_versions' key set" % target.name
+            reason = "Target '%s' " % target.name
+            reason += "does not have the 'release_versions' key set"
         elif not version in target.release_versions:
-            reason = "Target '%s' does not contain the version '%s' in its 'release_versions' key" % (target.name, version)
-    
+            reason = "Target '%s' does not contain the version '%s' " % \
+                     (target.name, version)
+            reason += "in its 'release_versions' key"
+
     return result, reason
 
 def transform_release_toolchains(toolchains, version):
     """ Given a list of toolchains and a release version, return a list of
     only the supported toolchains for that release
 
-    toolchains: The list of toolchains
-    version: The release version string. Should be a string contained within RELEASE_VERSIONS
+    Positional arguments:
+    toolchains - The list of toolchains
+    version - The release version string. Should be a string contained within
+              RELEASE_VERSIONS
     """
-    toolchains_set = set(toolchains)
-
     if version == '5':
         return ['ARM', 'GCC_ARM', 'IAR']
     else:
@@ -197,41 +243,58 @@
 def get_mbed_official_release(version):
     """ Given a release version string, return a tuple that contains a target
     and the supported toolchains for that release.
-    Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), ('K64F', ('ARM', 'GCC_ARM')), ...)
+    Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
+                           ('K64F', ('ARM', 'GCC_ARM')), ...)
 
-    version: The version string. Should be a string contained within RELEASE_VERSIONS
+    Positional arguments:
+    version - The version string. Should be a string contained within
+              RELEASE_VERSIONS
     """
 
-    MBED_OFFICIAL_RELEASE = (
+    mbed_official_release = (
         tuple(
             tuple(
                 [
                     TARGET_MAP[target].name,
-                    tuple(transform_release_toolchains(TARGET_MAP[target].supported_toolchains, version))
+                    tuple(transform_release_toolchains(
+                        TARGET_MAP[target].supported_toolchains, version))
                 ]
-            ) for target in TARGET_NAMES if (hasattr(TARGET_MAP[target], 'release_versions') and version in TARGET_MAP[target].release_versions)
+            ) for target in TARGET_NAMES \
+            if (hasattr(TARGET_MAP[target], 'release_versions')
+                and version in TARGET_MAP[target].release_versions)
         )
     )
-    
-    for target in MBED_OFFICIAL_RELEASE:
+
+    for target in mbed_official_release:
         is_official, reason = is_official_target(target[0], version)
-        
+
         if not is_official:
             raise InvalidReleaseTargetException(reason)
-            
-    return MBED_OFFICIAL_RELEASE
+
+    return mbed_official_release
 
 
 def prepare_toolchain(src_paths, target, toolchain_name,
-        macros=None, options=None, clean=False, jobs=1,
-        notify=None, silent=False, verbose=False, extra_verbose=False, config=None):
+                      macros=None, options=None, clean=False, jobs=1,
+                      notify=None, silent=False, verbose=False,
+                      extra_verbose=False, config=None):
     """ Prepares resource related objects - toolchain, target, config
-    src_paths: the paths to source directories
-    target: ['LPC1768', 'LPC11U24', 'LPC2368']
-    toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
-    clean: Rebuild everything if True
-    notify: Notify function for logs
-    verbose: Write the actual tools command lines if True
+
+    Positional arguments:
+    src_paths - the paths to source directories
+    target - ['LPC1768', 'LPC11U24', 'LPC2368', etc.]
+    toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
+
+    Keyword arguments:
+    macros - additional macros
+    options - general compiler options like debug-symbols or small-build
+    clean - Rebuild everything if True
+    jobs - how many compilers we can run at once
+    notify - Notify function for logs
+    silent - suppress printing of progress indicators
+    verbose - Write the actual tools command lines used if True
+    extra_verbose - even more output!
+    config - a Config object to use instead of creating one
     """
 
     # We need to remove all paths which are repeated to avoid
@@ -250,8 +313,10 @@
 
     # Toolchain instance
     try:
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
-    except KeyError as e:
+        toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+            target, options, notify, macros, silent,
+            extra_verbose=extra_verbose)
+    except KeyError:
         raise KeyError("Toolchain %s not supported" % toolchain_name)
 
     toolchain.config = config
@@ -261,12 +326,16 @@
 
     return toolchain
 
-def scan_resources(src_paths, toolchain, dependencies_paths=None, inc_dirs=None):
+def scan_resources(src_paths, toolchain, dependencies_paths=None,
+                   inc_dirs=None):
     """ Scan resources using initialized toolcain
-    src_paths: the paths to source directories
-    toolchain: valid toolchain object
-    dependencies_paths: dependency paths that we should scan for include dirs
-    inc_dirs: additional include directories which should be added to thescanner resources
+
+    Positional arguments
+    src_paths - the paths to source directories
+    toolchain - valid toolchain object
+    dependencies_paths - dependency paths that we should scan for include dirs
+    inc_dirs - additional include directories which should be added to
+               the scanner resources
     """
 
     # Scan src_path
@@ -287,7 +356,8 @@
         else:
             resources.inc_dirs.append(inc_dirs)
 
-    # Load resources into the config system which might expand/modify resources based on config data
+    # Load resources into the config system which might expand/modify resources
+    # based on config data
     resources = toolchain.config.load_resources(resources)
 
     # Set the toolchain's configuration data
@@ -296,11 +366,38 @@
     return resources
 
 def build_project(src_paths, build_path, target, toolchain_name,
-        libraries_paths=None, options=None, linker_script=None,
-        clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
-        jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
-        extra_verbose=False, config=None):
-    """ This function builds project. Project can be for example one test / UT
+                  libraries_paths=None, options=None, linker_script=None,
+                  clean=False, notify=None, verbose=False, name=None,
+                  macros=None, inc_dirs=None, jobs=1, silent=False,
+                  report=None, properties=None, project_id=None,
+                  project_description=None, extra_verbose=False, config=None):
+    """ Build a project. A project may be a test or a user program.
+
+    Positional arguments:
+    src_paths - a path or list of paths that contain all files needed to build
+                the project
+    build_path - the directory where all of the object files will be placed
+    target - the MCU or board that the project will compile for
+    toolchain_name - the name of the build tools
+
+    Keyword arguments:
+    libraries_paths - The location of libraries to include when linking
+    options - general compiler options like debug-symbols or small-build
+    linker_script - the file that drives the linker to do it's job
+    clean - Rebuild everything if True
+    notify - Notify function for logs
+    verbose - Write the actual tools command lines used if True
+    name - the name of the project
+    macros - additional macros
+    inc_dirs - additional directories where include files may be found
+    jobs - how many compilers we can run at once
+    silent - suppress printing of progress indicators
+    report - a dict where a result may be appended
+    properties - UUUUHHHHH beats me
+    project_id - the name put in the report
+    project_description - the human-readable version of what this thing does
+    extra_verbose - even more output!
+    config - a Config object to use instead of creating one
     """
 
     # Convert src_path to a list if needed
@@ -311,9 +408,8 @@
         src_paths.extend(libraries_paths)
 
     # Build Directory
-    if clean:
-        if exists(build_path):
-            rmtree(build_path)
+    if clean and exists(build_path):
+        rmtree(build_path)
     mkdir(build_path)
 
 
@@ -327,7 +423,7 @@
     for path in src_paths:
         profile = find_build_profile(path) or profile
         if profile:
-            targets_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'legacy_targets.json')
+            targets_json = join(dirname(abspath(__file__)), 'legacy_targets.json')
         else:
             targets_json = find_targets_json(path) or targets_json
 
@@ -355,14 +451,16 @@
         mbedToolchain.init = init_hook
 
     # Pass all params to the unified prepare_toolchain()
-    toolchain = prepare_toolchain(src_paths, target, toolchain_name,
-        macros=macros, options=options, clean=clean, jobs=jobs,
-        notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config)
+    toolchain = prepare_toolchain(
+        src_paths, target, toolchain_name, macros=macros, options=options,
+        clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
+        extra_verbose=extra_verbose, config=config)
 
     # The first path will give the name to the library
     if name is None:
         name = basename(normpath(abspath(src_paths[0])))
-    toolchain.info("Building project %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
+    toolchain.info("Building project %s (%s, %s)" %
+                   (name, toolchain.target.name, toolchain_name))
 
     # Initialize reporting
     if report != None:
@@ -372,9 +470,11 @@
         description = project_description if project_description else name
         vendor_label = toolchain.target.extra_labels[0]
         prep_report(report, toolchain.target.name, toolchain_name, id_name)
-        cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
+        cur_result = create_result(toolchain.target.name, toolchain_name,
+                                   id_name, description)
         if properties != None:
-            prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
+            prep_properties(properties, toolchain.target.name, toolchain_name,
+                            vendor_label)
 
     try:
         # Call unified scan_resources
@@ -385,7 +485,8 @@
             resources.linker_script = linker_script
 
         # Compile Sources
-        objects = toolchain.compile_sources(resources, build_path, resources.inc_dirs)
+        objects = toolchain.compile_sources(resources, build_path,
+                                            resources.inc_dirs)
         resources.objects.extend(objects)
 
         # Link Program
@@ -402,11 +503,11 @@
 
         return res
 
-    except Exception, e:
+    except Exception as exc:
         if report != None:
             end = time()
 
-            if isinstance(e, NotSupportedException):
+            if isinstance(exc, NotSupportedException):
                 cur_result["result"] = "NOT_SUPPORTED"
             else:
                 cur_result["result"] = "FAIL"
@@ -423,19 +524,37 @@
         raise
 
 def build_library(src_paths, build_path, target, toolchain_name,
-         dependencies_paths=None, options=None, name=None, clean=False, archive=True,
-         notify=None, verbose=False, macros=None, inc_dirs=None,
-         jobs=1, silent=False, report=None, properties=None, extra_verbose=False,
-         project_id=None):
-    """ Prepares resource related objects - toolchain, target, config
-    src_paths: the paths to source directories
-    build_path: the path of the build directory
-    target: ['LPC1768', 'LPC11U24', 'LPC2368']
-    toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
-    clean: Rebuild everything if True
-    notify: Notify function for logs
-    verbose: Write the actual tools command lines if True
-    inc_dirs: additional include directories which should be included in build
+                  dependencies_paths=None, options=None, name=None, clean=False,
+                  archive=True, notify=None, verbose=False, macros=None,
+                  inc_dirs=None, jobs=1, silent=False, report=None,
+                  properties=None, extra_verbose=False, project_id=None,
+                  remove_config_header_file=False):
+    """ Build a library
+
+    Positional arguments:
+    src_paths - a path or list of paths that contain all files needed to build
+                the library
+    build_path - the directory where all of the object files will be placed
+    target - the MCU or board that the project will compile for
+    toolchain_name - the name of the build tools
+
+    Keyword arguments:
+    dependencies_paths - The location of libraries to include when linking
+    options - general compiler options like debug-symbols or small-build
+    name - the name of the library
+    clean - Rebuild everything if True
+    archive - whether the library will create an archive file
+    notify - Notify function for logs
+    verbose - Write the actual tools command lines used if True
+    macros - additional macros
+    inc_dirs - additional directories where include files may be found
+    jobs - how many compilers we can run at once
+    silent - suppress printing of progress indicators
+    report - a dict where a result may be appended
+    properties - UUUUHHHHH beats me
+    extra_verbose - even more output!
+    project_id - the name that goes in the report
+    remove_config_header_file - delete config header file when done building
     """
 
     # Convert src_path to a list if needed
@@ -450,15 +569,22 @@
     else:
         tmp_path = build_path
 
+    # Clean the build directory
+    if clean and exists(tmp_path):
+        rmtree(tmp_path)
+    mkdir(tmp_path)
+
     # Pass all params to the unified prepare_toolchain()
-    toolchain = prepare_toolchain(src_paths, target, toolchain_name,
-        macros=macros, options=options, clean=clean, jobs=jobs,
-        notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose)
+    toolchain = prepare_toolchain(
+        src_paths, target, toolchain_name, macros=macros, options=options,
+        clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
+        extra_verbose=extra_verbose)
 
     # The first path will give the name to the library
     if name is None:
         name = basename(normpath(abspath(src_paths[0])))
-    toolchain.info("Building library %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
+    toolchain.info("Building library %s (%s, %s)" %
+                   (name, toolchain.target.name, toolchain_name))
 
     # Initialize reporting
     if report != None:
@@ -468,9 +594,11 @@
         description = name
         vendor_label = toolchain.target.extra_labels[0]
         prep_report(report, toolchain.target.name, toolchain_name, id_name)
-        cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
+        cur_result = create_result(toolchain.target.name, toolchain_name,
+                                   id_name, description)
         if properties != None:
-            prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
+            prep_properties(properties, toolchain.target.name, toolchain_name,
+                            vendor_label)
 
     for src_path in src_paths:
         if not exists(src_path):
@@ -483,42 +611,57 @@
 
     try:
         # Call unified scan_resources
-        resources = scan_resources(src_paths, toolchain, dependencies_paths=dependencies_paths, inc_dirs=inc_dirs)
+        resources = scan_resources(src_paths, toolchain,
+                                   dependencies_paths=dependencies_paths,
+                                   inc_dirs=inc_dirs)
 
 
-        # Copy headers, objects and static libraries - all files needed for static lib
+        # Copy headers, objects and static libraries - all files needed for
+        # static lib
         toolchain.copy_files(resources.headers, build_path, resources=resources)
         toolchain.copy_files(resources.objects, build_path, resources=resources)
-        toolchain.copy_files(resources.libraries, build_path, resources=resources)
+        toolchain.copy_files(resources.libraries, build_path,
+                             resources=resources)
+        toolchain.copy_files(resources.json_files, build_path,
+                             resources=resources)
         if resources.linker_script:
-            toolchain.copy_files(resources.linker_script, build_path, resources=resources)
+            toolchain.copy_files(resources.linker_script, build_path,
+                                 resources=resources)
 
         if resources.hex_files:
-            toolchain.copy_files(resources.hex_files, build_path, resources=resources)
+            toolchain.copy_files(resources.hex_files, build_path,
+                                 resources=resources)
 
         # Compile Sources
-        objects = toolchain.compile_sources(resources, abspath(tmp_path), resources.inc_dirs)
+        objects = toolchain.compile_sources(resources, abspath(tmp_path),
+                                            resources.inc_dirs)
         resources.objects.extend(objects)
 
         if archive:
             toolchain.build_library(objects, build_path, name)
 
+        if remove_config_header_file:
+            config_header_path = toolchain.get_config_header()
+            if config_header_path:
+                remove(config_header_path)
+
         if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
             cur_result["output"] = toolchain.get_output()
             cur_result["result"] = "OK"
 
+
             add_result_to_report(report, cur_result)
         return True
 
-    except Exception, e:
+    except Exception as exc:
         if report != None:
             end = time()
 
-            if isinstance(e, ToolException):
+            if isinstance(exc, ToolException):
                 cur_result["result"] = "FAIL"
-            elif isinstance(e, NotSupportedException):
+            elif isinstance(exc, NotSupportedException):
                 cur_result["result"] = "NOT_SUPPORTED"
 
             cur_result["elapsed_time"] = end - start
@@ -530,27 +673,47 @@
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
-        raise e
+        raise
 
 ######################
 ### Legacy methods ###
 ######################
 
-def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+def build_lib(lib_id, target, toolchain_name, options=None, verbose=False,
+              clean=False, macros=None, notify=None, jobs=1, silent=False,
+              report=None, properties=None, extra_verbose=False):
     """ Legacy method for building mbed libraries
-        Function builds library in proper directory using all dependencies and macros defined by user.
+
+    Positional arguments:
+    lib_id - the library's unique identifier
+    target - the MCU or board that the project will compile for
+    toolchain_name - the name of the build tools
+
+    Keyword arguments:
+    options - general compiler options like debug-symbols or small-build
+    clean - Rebuild everything if True
+    verbose - Write the actual tools command lines used if True
+    macros - additional macros
+    notify - Notify function for logs
+    jobs - how many compilers we can run at once
+    silent - suppress printing of progress indicators
+    report - a dict where a result may be appended
+    properties - UUUUHHHHH beats me
+    extra_verbose - even more output!
     """
     lib = Library(lib_id)
     if not lib.is_supported(target, toolchain_name):
-        print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
+        print('Library "%s" is not yet supported on target %s with toolchain %s'
+              % (lib_id, target.name, toolchain_name))
         return False
 
-    # We need to combine macros from parameter list with macros from library definition
-    MACROS = lib.macros if lib.macros else []
+    # We need to combine macros from parameter list with macros from library
+    # definition
+    lib_macros = lib.macros if lib.macros else []
     if macros:
-        macros.extend(MACROS)
+        macros.extend(lib_macros)
     else:
-        macros = MACROS
+        macros = lib_macros
 
     src_paths = lib.source_dir
     build_path = lib.build_dir
@@ -558,17 +721,6 @@
     inc_dirs = lib.inc_dirs
     inc_dirs_ext = lib.inc_dirs_ext
 
-    """ src_path: the path of the source directory
-    build_path: the path of the build directory
-    target: ['LPC1768', 'LPC11U24', 'LPC2368']
-    toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
-    library_paths: List of paths to additional libraries
-    clean: Rebuild everything if True
-    notify: Notify function for logs
-    verbose: Write the actual tools command lines if True
-    inc_dirs: additional include directories which should be included in build
-    inc_dirs_ext: additional include directories which should be copied to library directory
-    """
     if type(src_paths) != ListType:
         src_paths = [src_paths]
 
@@ -582,10 +734,12 @@
         vendor_label = target.extra_labels[0]
         cur_result = None
         prep_report(report, target.name, toolchain_name, id_name)
-        cur_result = create_result(target.name, toolchain_name, id_name, description)
+        cur_result = create_result(target.name, toolchain_name, id_name,
+                                   description)
 
         if properties != None:
-            prep_properties(properties, target.name, toolchain_name, vendor_label)
+            prep_properties(properties, target.name, toolchain_name,
+                            vendor_label)
 
     for src_path in src_paths:
         if not exists(src_path):
@@ -600,12 +754,20 @@
 
     try:
         # Toolchain instance
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+        toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+            target, options, macros=macros, notify=notify, silent=silent,
+            extra_verbose=extra_verbose)
         toolchain.VERBOSE = verbose
         toolchain.jobs = jobs
         toolchain.build_all = clean
 
-        toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+        toolchain.info("Building library %s (%s, %s)" %
+                       (name.upper(), target.name, toolchain_name))
+
+        # Take into account the library configuration (MBED_CONFIG_FILE)
+        config = Config(target)
+        toolchain.config = config
+        config.add_config_files([MBED_CONFIG_FILE])
 
         # Scan Resources
         resources = []
@@ -629,6 +791,11 @@
         if inc_dirs:
             dependencies_include_dir.extend(inc_dirs)
 
+        # Add other discovered configuration data to the configuration object
+        for res in resources:
+            config.load_resources(res)
+        toolchain.set_config_data(toolchain.config.get_config_data())
+
         # Create the desired build directory structure
         bin_path = join(build_path, toolchain.obj_path)
         mkdir(bin_path)
@@ -637,14 +804,17 @@
 
         # Copy Headers
         for resource in resources:
-            toolchain.copy_files(resource.headers, build_path, resources=resource)
+            toolchain.copy_files(resource.headers, build_path,
+                                 resources=resource)
 
-        dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+        dependencies_include_dir.extend(
+            toolchain.scan_resources(build_path).inc_dirs)
 
         # Compile Sources
         objects = []
         for resource in resources:
-            objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
+            objects.extend(toolchain.compile_sources(resource, tmp_path,
+                                                     dependencies_include_dir))
 
         needed_update = toolchain.build_library(objects, bin_path, name)
 
@@ -657,7 +827,7 @@
             add_result_to_report(report, cur_result)
         return True
 
-    except Exception, e:
+    except Exception:
         if report != None:
             end = time()
             cur_result["result"] = "FAIL"
@@ -670,11 +840,32 @@
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
-        raise e
+        raise
+
+# We do have unique legacy conventions about how we build and package the mbed
+# library
+def build_mbed_libs(target, toolchain_name, options=None, verbose=False,
+                    clean=False, macros=None, notify=None, jobs=1, silent=False,
+                    report=None, properties=None, extra_verbose=False):
+    """ Function returns True is library was built and false if building was
+    skipped
 
-# We do have unique legacy conventions about how we build and package the mbed library
-def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
-    """ Function returns True is library was built and false if building was skipped """
+    Positional arguments:
+    target - the MCU or board that the project will compile for
+    toolchain_name - the name of the build tools
+
+    Keyword arguments:
+    options - general compiler options like debug-symbols or small-build
+    verbose - Write the actual tools command lines used if True
+    clean - Rebuild everything if True
+    macros - additional macros
+    notify - Notify function for logs
+    jobs - how many compilers we can run at once
+    silent - suppress printing of progress indicators
+    report - a dict where a result may be appended
+    properties - UUUUHHHHH beats me
+    extra_verbose - even more output!
+    """
 
     if report != None:
         start = time()
@@ -683,16 +874,21 @@
         vendor_label = target.extra_labels[0]
         cur_result = None
         prep_report(report, target.name, toolchain_name, id_name)
-        cur_result = create_result(target.name, toolchain_name, id_name, description)
+        cur_result = create_result(target.name, toolchain_name, id_name,
+                                   description)
 
         if properties != None:
-            prep_properties(properties, target.name, toolchain_name, vendor_label)
+            prep_properties(properties, target.name, toolchain_name,
+                            vendor_label)
 
     # Check toolchain support
     if toolchain_name not in target.supported_toolchains:
         supported_toolchains_text = ", ".join(target.supported_toolchains)
-        print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
-        print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
+        print('%s target is not yet supported by toolchain %s' %
+              (target.name, toolchain_name))
+        print('%s target supports %s toolchain%s' %
+              (target.name, supported_toolchains_text, 's'
+               if len(target.supported_toolchains) > 1 else ''))
 
         if report != None:
             cur_result["result"] = "SKIP"
@@ -702,69 +898,89 @@
 
     try:
         # Toolchain
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+        toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+            target, options, macros=macros, notify=notify, silent=silent,
+            extra_verbose=extra_verbose)
         toolchain.VERBOSE = verbose
         toolchain.jobs = jobs
         toolchain.build_all = clean
 
+        # Take into account the library configuration (MBED_CONFIG_FILE)
+        config = Config(target)
+        toolchain.config = config
+        config.add_config_files([MBED_CONFIG_FILE])
+        toolchain.set_config_data(toolchain.config.get_config_data())
+
         # Source and Build Paths
-        BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
-        BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
-        mkdir(BUILD_TOOLCHAIN)
+        build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
+        build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
+        mkdir(build_toolchain)
 
-        TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
-        mkdir(TMP_PATH)
+        tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+        mkdir(tmp_path)
 
         # CMSIS
-        toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
+        toolchain.info("Building library %s (%s, %s)" %
+                       ('CMSIS', target.name, toolchain_name))
         cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
         resources = toolchain.scan_resources(cmsis_src)
 
-        toolchain.copy_files(resources.headers, BUILD_TARGET)
-        toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
-        toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
+        toolchain.copy_files(resources.headers, build_target)
+        toolchain.copy_files(resources.linker_script, build_toolchain)
+        toolchain.copy_files(resources.bin_files, build_toolchain)
 
-        objects = toolchain.compile_sources(resources, TMP_PATH)
-        toolchain.copy_files(objects, BUILD_TOOLCHAIN)
+        objects = toolchain.compile_sources(resources, tmp_path)
+        toolchain.copy_files(objects, build_toolchain)
 
         # mbed
-        toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+        toolchain.info("Building library %s (%s, %s)" %
+                       ('MBED', target.name, toolchain_name))
 
         # Common Headers
-        toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
-        toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+        toolchain.copy_files(toolchain.scan_resources(MBED_API).headers,
+                             MBED_LIBRARIES)
+        toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
+                             MBED_LIBRARIES)
 
         # Target specific sources
-        HAL_SRC = join(MBED_TARGETS_PATH, "hal")
-        hal_implementation = toolchain.scan_resources(HAL_SRC)
-        toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, resources=hal_implementation)
-        incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
-        objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+        hal_src = join(MBED_TARGETS_PATH, "hal")
+        hal_implementation = toolchain.scan_resources(hal_src)
+        toolchain.copy_files(hal_implementation.headers +
+                             hal_implementation.hex_files +
+                             hal_implementation.libraries,
+                             build_target, resources=hal_implementation)
+        incdirs = toolchain.scan_resources(build_target).inc_dirs
+        objects = toolchain.compile_sources(hal_implementation, tmp_path,
+                                            [MBED_LIBRARIES] + incdirs)
 
         # Common Sources
         mbed_resources = toolchain.scan_resources(MBED_COMMON)
-        objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+        objects += toolchain.compile_sources(mbed_resources, tmp_path,
+                                             [MBED_LIBRARIES] + incdirs)
 
         # A number of compiled files need to be copied as objects as opposed to
-        # being part of the mbed library, for reasons that have to do with the way
-        # the linker search for symbols in archives. These are:
-        #   - retarget.o: to make sure that the C standard lib symbols get overridden
+        # being part of the mbed library, for reasons that have to do with the
+        # way the linker search for symbols in archives. These are:
+        #   - retarget.o: to make sure that the C standard lib symbols get
+        #                 overridden
         #   - board.o: mbed_die is weak
-        #   - mbed_overrides.o: this contains platform overrides of various weak SDK functions
-        separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
+        #   - mbed_overrides.o: this contains platform overrides of various
+        #                       weak SDK functions
+        separate_names, separate_objects = ['retarget.o', 'board.o',
+                                            'mbed_overrides.o'], []
 
-        for o in objects:
+        for obj in objects:
             for name in separate_names:
-                if o.endswith(name):
-                    separate_objects.append(o)
+                if obj.endswith(name):
+                    separate_objects.append(obj)
 
-        for o in separate_objects:
-            objects.remove(o)
+        for obj in separate_objects:
+            objects.remove(obj)
 
-        toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
+        toolchain.build_library(objects, build_toolchain, "mbed")
 
-        for o in separate_objects:
-            toolchain.copy_files(o, BUILD_TOOLCHAIN)
+        for obj in separate_objects:
+            toolchain.copy_files(obj, build_toolchain)
 
         if report != None:
             end = time()
@@ -776,7 +992,7 @@
 
         return True
 
-    except Exception, e:
+    except Exception as exc:
         if report != None:
             end = time()
             cur_result["result"] = "FAIL"
@@ -786,18 +1002,21 @@
             if toolchain_output:
                 cur_result["output"] += toolchain_output
 
-            cur_result["output"] += str(e)
+            cur_result["output"] += str(exc)
 
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
-        raise e
+        raise
 
 
 def get_unique_supported_toolchains(release_targets=None):
     """ Get list of all unique toolchains supported by targets
-    If release_targets is not specified, then it queries all known targets
-    release_targets: tuple structure returned from get_mbed_official_release()
+
+    Keyword arguments:
+    release_targets - tuple structure returned from get_mbed_official_release().
+                      If release_targets is not specified, then it queries all
+                      known targets
     """
     unique_supported_toolchains = []
 
@@ -815,9 +1034,17 @@
     return unique_supported_toolchains
 
 
-def mcu_toolchain_matrix(verbose_html=False, platform_filter=None, release_version='5'):
-    """  Shows target map using prettytable """
-    from prettytable import PrettyTable # Only use it in this function so building works without extra modules
+def mcu_toolchain_matrix(verbose_html=False, platform_filter=None,
+                         release_version='5'):
+    """  Shows target map using prettytable
+
+    Keyword arguments:
+    verbose_html - emit html instead of a simple table
+    platform_filter - remove results that match the string
+    release_version - get the matrix for this major version number
+    """
+    # Only use it in this function so building works without extra modules
+    from prettytable import PrettyTable
 
     if isinstance(release_version, basestring):
         # Force release_version to lowercase if it is a string
@@ -832,23 +1059,26 @@
 
     for version in RELEASE_VERSIONS:
         version_release_targets[version] = get_mbed_official_release(version)
-        version_release_target_names[version] = [x[0] for x in version_release_targets[version]]
+        version_release_target_names[version] = [x[0] for x in
+                                                 version_release_targets[
+                                                     version]]
 
     if release_version in RELEASE_VERSIONS:
         release_targets = version_release_targets[release_version]
     else:
         release_targets = None
 
-    unique_supported_toolchains = get_unique_supported_toolchains(release_targets)
+    unique_supported_toolchains = get_unique_supported_toolchains(
+        release_targets)
     prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
 
     # All tests status table print
     columns = prepend_columns + unique_supported_toolchains
-    pt = PrettyTable(columns)
+    table_printer = PrettyTable(columns)
     # Align table
     for col in columns:
-        pt.align[col] = "c"
-    pt.align["Target"] = "l"
+        table_printer.align[col] = "c"
+    table_printer.align["Target"] = "l"
 
     perm_counter = 0
     target_counter = 0
@@ -884,9 +1114,10 @@
                 text = "-"
 
             row.append(text)
-        pt.add_row(row)
+        table_printer.add_row(row)
 
-    result = pt.get_html_string() if verbose_html else pt.get_string()
+    result = table_printer.get_html_string() if verbose_html \
+             else table_printer.get_string()
     result += "\n"
     result += "Supported targets: %d\n"% (target_counter)
     if target_counter == 1:
@@ -895,57 +1126,87 @@
 
 
 def get_target_supported_toolchains(target):
-    """ Returns target supported toolchains list """
-    return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
+    """ Returns target supported toolchains list
+
+    Positional arguments:
+    target - the target to get the supported toolchains of
+    """
+    return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
+        else None
 
 
-def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+def static_analysis_scan(target, toolchain_name, cppcheck_cmd,
+                         cppcheck_msg_format, options=None, verbose=False,
+                         clean=False, macros=None, notify=None, jobs=1,
+                         extra_verbose=False):
+    """Perform static analysis on a target and toolchain combination
+
+    Positional arguments:
+    target - the target to fake the build for
+    toolchain_name - pretend you would compile with this toolchain
+    cppcheck_cmd - the command used to do static analysis
+    cppcheck_msg_format - the format of the check messages
+
+    Keyword arguments:
+    options - things like debug-symbols, or small-build, etc.
+    verbose - more printing!
+    clean - start from a clean slate
+    macros - extra macros to compile with
+    notify - the notification event handling function
+    jobs - number of commands to run at once
+    extra_verbose - even moar printing
+    """
     # Toolchain
-    toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+    toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options,
+                                                  macros=macros, notify=notify,
+                                                  extra_verbose=extra_verbose)
     toolchain.VERBOSE = verbose
     toolchain.jobs = jobs
     toolchain.build_all = clean
 
     # Source and Build Paths
-    BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
-    BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
-    mkdir(BUILD_TOOLCHAIN)
+    build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
+    build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
+    mkdir(build_toolchain)
 
-    TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
-    mkdir(TMP_PATH)
+    tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+    mkdir(tmp_path)
 
     # CMSIS
-    toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
+    toolchain.info("Static analysis for %s (%s, %s)" %
+                   ('CMSIS', target.name, toolchain_name))
     cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
     resources = toolchain.scan_resources(cmsis_src)
 
     # Copy files before analysis
-    toolchain.copy_files(resources.headers, BUILD_TARGET)
-    toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
+    toolchain.copy_files(resources.headers, build_target)
+    toolchain.copy_files(resources.linker_script, build_toolchain)
 
-    # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+    # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+    # command line
     includes = ["-I%s"% i for i in resources.inc_dirs]
-    includes.append("-I%s"% str(BUILD_TARGET))
+    includes.append("-I%s"% str(build_target))
     c_sources = " ".join(resources.c_sources)
     cpp_sources = " ".join(resources.cpp_sources)
     macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
 
-    includes = map(str.strip, includes)
-    macros = map(str.strip, macros)
+    includes = [inc.strip() for inc in includes]
+    macros = [mac.strip() for mac in macros]
 
-    check_cmd = CPPCHECK_CMD
-    check_cmd += CPPCHECK_MSG_FORMAT
+    check_cmd = cppcheck_cmd
+    check_cmd += cppcheck_msg_format
     check_cmd += includes
     check_cmd += macros
 
-    # We need to pass some params via file to avoid "command line too long in some OSs"
+    # We need to pass some params via file to avoid "command line too long in
+    # some OSs"
     tmp_file = tempfile.NamedTemporaryFile(delete=False)
     tmp_file.writelines(line + '\n' for line in c_sources.split())
     tmp_file.writelines(line + '\n' for line in cpp_sources.split())
     tmp_file.close()
     check_cmd += ["--file-list=%s"% tmp_file.name]
 
-    _stdout, _stderr, _rc = run_cmd(check_cmd)
+    _stdout, _stderr, _ = run_cmd(check_cmd)
     if verbose:
         print _stdout
     print _stderr
@@ -953,50 +1214,58 @@
     # =========================================================================
 
     # MBED
-    toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+    toolchain.info("Static analysis for %s (%s, %s)" %
+                   ('MBED', target.name, toolchain_name))
 
     # Common Headers
-    toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
-    toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+    toolchain.copy_files(toolchain.scan_resources(MBED_API).headers,
+                         MBED_LIBRARIES)
+    toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
+                         MBED_LIBRARIES)
 
     # Target specific sources
-    HAL_SRC = join(MBED_TARGETS_PATH, "hal")
-    hal_implementation = toolchain.scan_resources(HAL_SRC)
+    hal_src = join(MBED_TARGETS_PATH, "hal")
+    hal_implementation = toolchain.scan_resources(hal_src)
 
     # Copy files before analysis
-    toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, resources=hal_implementation)
-    incdirs = toolchain.scan_resources(BUILD_TARGET)
+    toolchain.copy_files(hal_implementation.headers +
+                         hal_implementation.hex_files, build_target,
+                         resources=hal_implementation)
+    incdirs = toolchain.scan_resources(build_target)
 
     target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
-    target_includes.append("-I%s"% str(BUILD_TARGET))
-    target_includes.append("-I%s"% str(HAL_SRC))
+    target_includes.append("-I%s"% str(build_target))
+    target_includes.append("-I%s"% str(hal_src))
     target_c_sources = " ".join(incdirs.c_sources)
     target_cpp_sources = " ".join(incdirs.cpp_sources)
-    target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
+    target_macros = ["-D%s"% s for s in
+                     toolchain.get_symbols() + toolchain.macros]
 
     # Common Sources
     mbed_resources = toolchain.scan_resources(MBED_COMMON)
 
-    # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+    # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+    # command line
     mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
-    mbed_includes.append("-I%s"% str(BUILD_TARGET))
+    mbed_includes.append("-I%s"% str(build_target))
     mbed_includes.append("-I%s"% str(MBED_COMMON))
     mbed_includes.append("-I%s"% str(MBED_API))
     mbed_includes.append("-I%s"% str(MBED_HAL))
     mbed_c_sources = " ".join(mbed_resources.c_sources)
     mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
 
-    target_includes = map(str.strip, target_includes)
-    mbed_includes = map(str.strip, mbed_includes)
-    target_macros = map(str.strip, target_macros)
+    target_includes = [inc.strip() for inc in target_includes]
+    mbed_includes = [inc.strip() for inc in mbed_includes]
+    target_macros = [mac.strip() for mac in target_macros]
 
-    check_cmd = CPPCHECK_CMD
-    check_cmd += CPPCHECK_MSG_FORMAT
+    check_cmd = cppcheck_cmd
+    check_cmd += cppcheck_msg_format
     check_cmd += target_includes
     check_cmd += mbed_includes
     check_cmd += target_macros
 
-    # We need to pass some parames via file to avoid "command line too long in some OSs"
+    # We need to pass some parames via file to avoid "command line too long in
+    # some OSs"
     tmp_file = tempfile.NamedTemporaryFile(delete=False)
     tmp_file.writelines(line + '\n' for line in target_c_sources.split())
     tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
@@ -1005,42 +1274,77 @@
     tmp_file.close()
     check_cmd += ["--file-list=%s"% tmp_file.name]
 
-    _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+    _stdout, _stderr, _ = run_cmd_ext(check_cmd)
     if verbose:
         print _stdout
     print _stderr
 
 
-def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
-                             options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd,
+                             cppcheck_msg_format, options=None, verbose=False,
+                             clean=False, macros=None, notify=None, jobs=1,
+                             extra_verbose=False):
+    """Perform static analysis on a library as if it were to be compiled for a
+    particular target and toolchain combination
+    """
     lib = Library(lib_id)
     if lib.is_supported(target, toolchain):
-        static_analysis_scan_library(lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
-                      lib.dependencies, options,
-                      verbose=verbose, clean=clean, macros=macros, notify=notify, jobs=jobs, extra_verbose=extra_verbose)
+        static_analysis_scan_library(
+            lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd,
+            cppcheck_msg_format, lib.dependencies, options, verbose=verbose,
+            clean=clean, macros=macros, notify=notify, jobs=jobs,
+            extra_verbose=extra_verbose)
     else:
-        print 'Library "%s" is not yet supported on target %s with toolchain %s'% (lib_id, target.name, toolchain)
+        print('Library "%s" is not yet supported on target %s with toolchain %s'
+              % (lib_id, target.name, toolchain))
 
 
-def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
-         dependencies_paths=None, options=None, name=None, clean=False,
-         notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False):
-    """ Function scans library (or just some set of sources/headers) for staticly detectable defects """
+def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
+                                 cppcheck_cmd, cppcheck_msg_format,
+                                 dependencies_paths=None, options=None,
+                                 name=None, clean=False, notify=None,
+                                 verbose=False, macros=None, jobs=1,
+                                 extra_verbose=False):
+    """ Function scans library for statically detectable defects
+
+    Positional arguments:
+    src_paths - the list of library paths to scan
+    build_path - the location directory of result files
+    target - the target to fake the build for
+    toolchain_name - pretend you would compile with this toolchain
+    cppcheck_cmd - the command used to do static analysis
+    cppcheck_msg_format - the format of the check messages
+
+    Keyword arguments:
+    dependencies_paths - the paths to sources that this library depends on
+    options - things like debug-symbols, or small-build, etc.
+    name - the name of this library
+    clean - start from a clean slate
+    notify - the notification event handling function
+    verbose - more printing!
+    macros - extra macros to compile with
+    jobs - number of commands to run at once
+    extra_verbose - even moar printing
+    """
     if type(src_paths) != ListType:
         src_paths = [src_paths]
 
     for src_path in src_paths:
         if not exists(src_path):
-            raise Exception("The library source folder does not exist: %s", src_path)
+            raise Exception("The library source folder does not exist: %s",
+                            src_path)
 
     # Toolchain instance
-    toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+    toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options,
+                                                  macros=macros, notify=notify,
+                                                  extra_verbose=extra_verbose)
     toolchain.VERBOSE = verbose
     toolchain.jobs = jobs
 
     # The first path will give the name to the library
     name = basename(src_paths[0])
-    toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+    toolchain.info("Static analysis for library %s (%s, %s)" %
+                   (name.upper(), target.name, toolchain_name))
 
     # Scan Resources
     resources = []
@@ -1060,7 +1364,8 @@
     tmp_path = join(build_path, '.temp', toolchain.obj_path)
     mkdir(tmp_path)
 
-    # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+    # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+    # command line
     includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
     c_sources = " "
     cpp_sources = " "
@@ -1073,34 +1378,42 @@
         c_sources += " ".join(resource.c_sources) + " "
         cpp_sources += " ".join(resource.cpp_sources) + " "
 
-    dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+    dependencies_include_dir.extend(
+        toolchain.scan_resources(build_path).inc_dirs)
 
-    includes = map(str.strip, includes)
-    macros = map(str.strip, macros)
+    includes = [inc.strip() for inc in includes]
+    macros = [mac.strip() for mac in macros]
 
     check_cmd = cppcheck_cmd
     check_cmd += cppcheck_msg_format
     check_cmd += includes
     check_cmd += macros
 
-    # We need to pass some parameters via file to avoid "command line too long in some OSs"
-    # Temporary file is created to store e.g. cppcheck list of files for command line
+    # We need to pass some parameters via file to avoid "command line too long
+    # in some OSs". A temporary file is created to store e.g. cppcheck list of
+    # files for command line
     tmp_file = tempfile.NamedTemporaryFile(delete=False)
     tmp_file.writelines(line + '\n' for line in c_sources.split())
     tmp_file.writelines(line + '\n' for line in cpp_sources.split())
     tmp_file.close()
     check_cmd += ["--file-list=%s"% tmp_file.name]
 
-    # This will allow us to grab result from both stdio and stderr outputs (so we can show them)
-    # We assume static code analysis tool is outputting defects on STDERR
-    _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+    # This will allow us to grab result from both stdio and stderr outputs (so
+    # we can show them) We assume static code analysis tool is outputting
+    # defects on STDERR
+    _stdout, _stderr, _ = run_cmd_ext(check_cmd)
     if verbose:
         print _stdout
     print _stderr
 
 
 def print_build_results(result_list, build_name):
-    """ Generate result string for build results """
+    """ Generate result string for build results
+
+    Positional arguments:
+    result_list - the list of results to print
+    build_name - the name of the build we are printing result for
+    """
     result = ""
     if len(result_list) > 0:
         result += build_name + "\n"
@@ -1108,10 +1421,12 @@
         result += "\n"
     return result
 
-def print_build_memory_usage_results(report):
+def print_build_memory_usage(report):
     """ Generate result table with memory usage values for build results
-        Agregates (puts together) reports obtained from self.get_memory_summary()
-        @param report Report generated during build procedure. See
+    Aggregates (puts together) reports obtained from self.get_memory_summary()
+
+    Positional arguments:
+    report - Report generated during build procedure.
     """
     from prettytable import PrettyTable
     columns_text = ['name', 'target', 'toolchain']
@@ -1129,21 +1444,25 @@
             for name in report[target][toolchain]:
                 for dlist in report[target][toolchain][name]:
                     for dlistelem in dlist:
-                        # Get 'memory_usage' record and build table with statistics
+                        # Get 'memory_usage' record and build table with
+                        # statistics
                         record = dlist[dlistelem]
                         if 'memory_usage' in record and record['memory_usage']:
                             # Note that summary should be in the last record of
-                            # 'memory_usage' section. This is why we are grabbing
-                            # last "[-1]" record.
+                            # 'memory_usage' section. This is why we are
+                            # grabbing last "[-1]" record.
                             row = [
                                 record['description'],
                                 record['target_name'],
                                 record['toolchain_name'],
-                                record['memory_usage'][-1]['summary']['static_ram'],
+                                record['memory_usage'][-1]['summary'][
+                                    'static_ram'],
                                 record['memory_usage'][-1]['summary']['stack'],
                                 record['memory_usage'][-1]['summary']['heap'],
-                                record['memory_usage'][-1]['summary']['total_ram'],
-                                record['memory_usage'][-1]['summary']['total_flash'],
+                                record['memory_usage'][-1]['summary'][
+                                    'total_ram'],
+                                record['memory_usage'][-1]['summary'][
+                                    'total_flash'],
                             ]
                             table.add_row(row)
 
@@ -1152,6 +1471,14 @@
     return result
 
 def write_build_report(build_report, template_filename, filename):
+    """Write a build report to disk using a template file
+
+    Positional arguments:
+    build_report - a report generated by the build system
+    template_filename - a file that contains the template for the style of build
+                        report
+    filename - the location on disk to write the file to
+    """
     build_report_failing = []
     build_report_passing = []
 
@@ -1165,5 +1492,7 @@
     env.loader = FileSystemLoader('ci_templates')
     template = env.get_template(template_filename)
 
-    with open(filename, 'w+') as f:
-        f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
+    with open(filename, 'w+') as placeholder:
+        placeholder.write(template.render(
+            failing_builds=build_report_failing,
+            passing_builds=build_report_passing))