Clone of official tools

Revision:
43:2a7da56ebd24
Parent:
41:2a77626a4c21
Child:
45:c4a728429846
--- a/build_api.py	Mon Nov 06 13:17:14 2017 -0600
+++ b/build_api.py	Tue Sep 25 13:43:09 2018 -0500
@@ -14,12 +14,15 @@
 See the License for the specific language governing permissions and
 limitations under the License.
 """
+from __future__ import print_function, division, absolute_import
 
 import re
 import tempfile
 import datetime
 import uuid
-from types import ListType
+import struct
+import zlib
+import hashlib
 from shutil import rmtree
 from os.path import join, exists, dirname, basename, abspath, normpath, splitext
 from os.path import relpath
@@ -27,20 +30,25 @@
 from time import time
 from intelhex import IntelHex
 from json import load, dump
-
-from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\
-    ToolException, InvalidReleaseTargetException, intelhex_offset
-from tools.paths import MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,\
-    MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL, MBED_CONFIG_FILE,\
-    MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,\
-    BUILD_DIR
-from tools.targets import TARGET_NAMES, TARGET_MAP, set_targets_json_location
-from tools.libraries import Library
-from tools.toolchains import TOOLCHAIN_CLASSES, mbedToolchain
 from jinja2 import FileSystemLoader
 from jinja2.environment import Environment
-from tools.config import Config
-from tools.build_profiles import find_build_profile, get_toolchain_profile, find_targets_json
+
+from .arm_pack_manager import Cache
+from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
+                    ToolException, InvalidReleaseTargetException,
+                    intelhex_offset, integer)
+from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
+                    MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
+                    MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
+                    MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
+                    BUILD_DIR)
+from .resources import Resources, FileType, FileRef
+from .notifier.mock import MockNotifier
+from .targets import TARGET_NAMES, TARGET_MAP, CORE_ARCH, set_targets_json_location
+from .libraries import Library
+from .toolchains import TOOLCHAIN_CLASSES, mbedToolchain
+from .config import Config
+from .build_profiles import find_build_profile, get_toolchain_profile, find_targets_json
 
 RELEASE_VERSIONS = ['2', '5']
 
@@ -115,7 +123,7 @@
     result_wrap = {0: result}
     report[target][toolchain][id_name].append(result_wrap)
 
-def get_config(src_paths, target, toolchain_name):
+def get_config(src_paths, target, toolchain_name=None, app_config=None):
     """Get the configuration object for a target-toolchain combination
 
     Positional arguments:
@@ -124,39 +132,23 @@
     toolchain_name - the string that identifies the build tools
     """
     # Convert src_paths to a list if needed
-    if type(src_paths) != ListType:
+    if not isinstance(src_paths, list):
         src_paths = [src_paths]
 
-    # Pass all params to the unified prepare_resources()
-    toolchain = prepare_toolchain(src_paths, None, target, toolchain_name)
-
-    # Scan src_path for config files
-    resources = toolchain.scan_resources(src_paths[0])
-    for path in src_paths[1:]:
-        resources.add(toolchain.scan_resources(path))
-
-    # Update configuration files until added features creates no changes
-    prev_features = set()
-    while True:
-        # Update the configuration with any .json files found while scanning
-        toolchain.config.add_config_files(resources.json_files)
+    res = Resources(MockNotifier())
+    if toolchain_name:
+        toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
+                                      app_config=app_config)
+        config = toolchain.config
+        res.scan_with_toolchain(src_paths, toolchain, exclude=False)
+    else:
+        config = Config(target, src_paths, app_config=app_config)
+        res.scan_with_config(src_paths, config)
+    if config.has_regions:
+        _ = list(config.regions)
 
-        # Add features while we find new ones
-        features = set(toolchain.config.get_features())
-        if features == prev_features:
-            break
-
-        for feature in features:
-            if feature in resources.features:
-                resources += resources.features[feature]
-
-        prev_features = features
-    toolchain.config.validate_config()
-    if toolchain.config.has_regions:
-        _ = list(toolchain.config.regions)
-
-    cfg, macros = toolchain.config.get_config_data()
-    features = toolchain.config.get_features()
+    cfg, macros = config.get_config_data()
+    features = config.get_features()
     return cfg, macros, features
 
 def is_official_target(target_name, version):
@@ -193,22 +185,22 @@
 
         elif version == '5':
             # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
-            required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
-            required_toolchains_sorted = list(required_toolchains)
-            required_toolchains_sorted.sort()
+            required_toolchains = [
+                set(['ARM', 'GCC_ARM', 'IAR']),
+                set(['ARMC6'])
+            ]
             supported_toolchains = set(target.supported_toolchains)
-            supported_toolchains_sorted = list(supported_toolchains)
-            supported_toolchains_sorted.sort()
 
-            if not required_toolchains.issubset(supported_toolchains):
+            if not any(r.issubset(supported_toolchains)
+                       for r in required_toolchains):
                 result = False
                 reason = ("Target '%s' must support " % target.name) + \
                     ("ALL of the folowing toolchains to be included in the") + \
                     ((" mbed OS 5.0 official release: %s" + linesep) %
-                     ", ".join(required_toolchains_sorted)) + \
+                     ", ".join(sorted(required_toolchains[0]))) + \
                     ("Currently it is only configured to support the ") + \
                     ("following toolchains: %s" %
-                     ", ".join(supported_toolchains_sorted))
+                     ", ".join(sorted(supported_toolchains)))
 
             elif not target.default_lib == 'std':
                 result = False
@@ -284,37 +276,18 @@
 
     return mbed_official_release
 
-def add_regions_to_profile(profile, config, toolchain_class):
-    """Add regions to the build profile, if there are any.
-
-    Positional Arguments:
-    profile - the profile to update
-    config - the configuration object that owns the region
-    toolchain_class - the class of the toolchain being used
-    """
-    if not profile:
-        return
-    regions = list(config.regions)
-    for region in regions:
-        for define in [(region.name.upper() + "_ADDR", region.start),
-                       (region.name.upper() + "_SIZE", region.size)]:
-            profile["common"].append("-D%s=0x%x" %  define)
-    active_region = [r for r in regions if r.active][0]
-    for define in [("MBED_APP_START", active_region.start),
-                   ("MBED_APP_SIZE", active_region.size)]:
-        profile["ld"].append(toolchain_class.make_ld_define(*define))
-
-    print("Using regions in this build:")
-    for region in regions:
-        print("  Region %s size 0x%x, offset 0x%x"
-              % (region.name, region.size, region.start))
+ARM_COMPILERS = ("ARM", "ARMC6", "uARM")
+def target_supports_toolchain(target, toolchain_name):
+    if toolchain_name in ARM_COMPILERS:
+        return any(tc in target.supported_toolchains for tc in ARM_COMPILERS)
+    else:
+        return toolchain_name in target.supported_toolchains
 
 
 def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
                       macros=None, clean=False, jobs=1,
-                      notify=None, silent=False, verbose=False,
-                      extra_verbose=False, config=None,
-                      app_config=None, build_profile=None):
+                      notify=None, config=None, app_config=None,
+                      build_profile=None, ignore=None):
     """ Prepares resource related objects - toolchain, target, config
 
     Positional arguments:
@@ -327,12 +300,10 @@
     clean - Rebuild everything if True
     jobs - how many compilers we can run at once
     notify - Notify function for logs
-    silent - suppress printing of progress indicators
-    verbose - Write the actual tools command lines used if True
-    extra_verbose - even more output!
     config - a Config object to use instead of creating one
     app_config - location of a chosen mbed_app.json file
     build_profile - a list of mergeable build profiles
+    ignore - list of paths to add to mbedignore
     """
 
     # We need to remove all paths which are repeated to avoid
@@ -342,6 +313,13 @@
     # If the configuration object was not yet created, create it now
     config = config or Config(target, src_paths, app_config=app_config)
     target = config.target
+    if not target_supports_toolchain(target, toolchain_name):
+        raise NotSupportedException(
+            "Target {} is not supported by toolchain {}".format(
+                target.name, toolchain_name))
+    if (toolchain_name == "ARM" and CORE_ARCH[target.core] == 8):
+        toolchain_name = "ARMC6"
+
     try:
         cur_tc = TOOLCHAIN_CLASSES[toolchain_name]
     except KeyError:
@@ -350,23 +328,82 @@
     profile = {'c': [], 'cxx': [], 'common': [], 'asm': [], 'ld': []}
     for contents in build_profile or []:
         for key in profile:
-            profile[key].extend(contents[toolchain_name][key])
+            profile[key].extend(contents[toolchain_name].get(key, []))
 
-    if config.has_regions:
-        add_regions_to_profile(profile, config, cur_tc)
-
-    toolchain = cur_tc(target, notify, macros, silent, build_dir=build_dir,
-                       extra_verbose=extra_verbose, build_profile=profile)
+    toolchain = cur_tc(
+        target, notify, macros, build_dir=build_dir, build_profile=profile)
 
     toolchain.config = config
     toolchain.jobs = jobs
     toolchain.build_all = clean
-    toolchain.VERBOSE = verbose
+
+    if ignore:
+        toolchain.add_ignore_patterns(root=".", base_path=".", patterns=ignore)
 
     return toolchain
 
-def merge_region_list(region_list, destination, padding=b'\xFF'):
-    """Merege the region_list into a single image
+def _printihex(ihex):
+    import pprint
+    pprint.PrettyPrinter().pprint(ihex.todict())
+
+def _real_region_size(region):
+    try:
+        part = intelhex_offset(region.filename, offset=region.start)
+        return (part.maxaddr() - part.minaddr()) + 1
+    except AttributeError:
+        return region.size
+
+
+def _fill_header(region_list, current_region):
+    """Fill an application header region
+
+    This is done it three steps:
+     * Fill the whole region with zeros
+     * Fill const, timestamp and size entries with their data
+     * Fill the digests using this header as the header region
+    """
+    region_dict = {r.name: r for r in region_list}
+    header = IntelHex()
+    header.puts(current_region.start, b'\x00' * current_region.size)
+    start = current_region.start
+    for member in current_region.filename:
+        _, type, subtype, data = member
+        member_size = Config.header_member_size(member)
+        if type == "const":
+            fmt = {
+                "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q",
+                "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q"
+            }[subtype]
+            header.puts(start, struct.pack(fmt, integer(data, 0)))
+        elif type == "timestamp":
+            fmt = {"32le": "<L", "64le": "<Q",
+                   "32be": ">L", "64be": ">Q"}[subtype]
+            header.puts(start, struct.pack(fmt, time()))
+        elif type == "size":
+            fmt = {"32le": "<L", "64le": "<Q",
+                   "32be": ">L", "64be": ">Q"}[subtype]
+            size = sum(_real_region_size(region_dict[r]) for r in data)
+            header.puts(start, struct.pack(fmt, size))
+        elif type  == "digest":
+            if data == "header":
+                ih = header[:start]
+            else:
+                ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start)
+            if subtype.startswith("CRCITT32"):
+                fmt = {"CRCITT32be": ">l", "CRCITT32le": "<l"}[subtype]
+                header.puts(start, struct.pack(fmt, zlib.crc32(ih.tobinarray())))
+            elif subtype.startswith("SHA"):
+                if subtype == "SHA256":
+                    hash = hashlib.sha256()
+                elif subtype == "SHA512":
+                    hash = hashlib.sha512()
+                hash.update(ih.tobinarray())
+                header.puts(start, hash.digest())
+        start += Config.header_member_size(member)
+    return header
+
+def merge_region_list(region_list, destination, notify, padding=b'\xFF'):
+    """Merge the region_list into a single image
 
     Positional Arguments:
     region_list - list of regions, which should contain filenames
@@ -374,15 +411,22 @@
     padding - bytes to fill gapps with
     """
     merged = IntelHex()
+    _, format = splitext(destination)
 
-    print("Merging Regions:")
+    notify.info("Merging Regions")
 
     for region in region_list:
         if region.active and not region.filename:
             raise ToolException("Active region has no contents: No file found.")
+        if isinstance(region.filename, list):
+            header_basename, _ = splitext(destination)
+            header_filename = header_basename + "_header.hex"
+            _fill_header(region_list, region).tofile(header_filename, format='hex')
+            region = region._replace(filename=header_filename)
         if region.filename:
-            print("  Filling region %s with %s" % (region.name, region.filename))
+            notify.info("  Filling region %s with %s" % (region.name, region.filename))
             part = intelhex_offset(region.filename, offset=region.start)
+            part.start_addr = None
             part_size = (part.maxaddr() - part.minaddr()) + 1
             if part_size > region.size:
                 raise ToolException("Contents of region %s does not fit"
@@ -390,76 +434,32 @@
             merged.merge(part)
             pad_size = region.size - part_size
             if pad_size > 0 and region != region_list[-1]:
-                print("  Padding region %s with 0x%x bytes" % (region.name, pad_size))
-                merged.puts(merged.maxaddr() + 1, padding * pad_size)
+                notify.info("  Padding region %s with 0x%x bytes" %
+                            (region.name, pad_size))
+                if format is ".hex":
+                    """The offset will be in the hex file generated when we're done,
+                    so we can skip padding here"""
+                else:
+                    merged.puts(merged.maxaddr() + 1, padding * pad_size)
 
     if not exists(dirname(destination)):
         makedirs(dirname(destination))
-    print("Space used after regions merged: 0x%x" %
-          (merged.maxaddr() - merged.minaddr() + 1))
-    with open(destination, "wb+") as output:
-        merged.tofile(output, format='bin')
-
-def scan_resources(src_paths, toolchain, dependencies_paths=None,
-                   inc_dirs=None, base_path=None, collect_ignores=False):
-    """ Scan resources using initialized toolcain
+    notify.info("Space used after regions merged: 0x%x" %
+                (merged.maxaddr() - merged.minaddr() + 1))
+    merged.tofile(destination, format=format.strip("."))
 
-    Positional arguments
-    src_paths - the paths to source directories
-    toolchain - valid toolchain object
-    dependencies_paths - dependency paths that we should scan for include dirs
-    inc_dirs - additional include directories which should be added to
-               the scanner resources
-    """
-
-    # Scan src_path
-    resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
-                                         collect_ignores=collect_ignores)
-    for path in src_paths[1:]:
-        resources.add(toolchain.scan_resources(path, base_path=base_path,
-                                               collect_ignores=collect_ignores))
 
-    # Scan dependency paths for include dirs
-    if dependencies_paths is not None:
-        for path in dependencies_paths:
-            lib_resources = toolchain.scan_resources(path)
-            resources.inc_dirs.extend(lib_resources.inc_dirs)
-
-    # Add additional include directories if passed
-    if inc_dirs:
-        if type(inc_dirs) == ListType:
-            resources.inc_dirs.extend(inc_dirs)
-        else:
-            resources.inc_dirs.append(inc_dirs)
-
-    # Load resources into the config system which might expand/modify resources
-    # based on config data
-    resources = toolchain.config.load_resources(resources)
+UPDATE_WHITELIST = (
+    "application",
+)
 
-    # Set the toolchain's configuration data
-    toolchain.set_config_data(toolchain.config.get_config_data())
-
-    if  (hasattr(toolchain.target, "release_versions") and
-            "5" not in toolchain.target.release_versions and
-            "rtos" in toolchain.config.lib_config_data):
-        if "Cortex-A" in toolchain.target.core:
-            raise NotSupportedException(
-                ("%s Will be supported in mbed OS 5.6. "
-                    "To use the %s, please checkout the mbed OS 5.4 release branch. "
-                    "See https://developer.mbed.org/platforms/Renesas-GR-PEACH/#important-notice "
-                    "for more information") % (toolchain.target.name, toolchain.target.name))
-        else:
-            raise NotSupportedException("Target does not support mbed OS 5")
-
-    return resources
 
 def build_project(src_paths, build_path, target, toolchain_name,
-                  libraries_paths=None, linker_script=None,
-                  clean=False, notify=None, verbose=False, name=None,
-                  macros=None, inc_dirs=None, jobs=1, silent=False,
+                  libraries_paths=None, linker_script=None, clean=False,
+                  notify=None, name=None, macros=None, inc_dirs=None, jobs=1,
                   report=None, properties=None, project_id=None,
-                  project_description=None, extra_verbose=False, config=None,
-                  app_config=None, build_profile=None, stats_depth=None):
+                  project_description=None, config=None,
+                  app_config=None, build_profile=None, stats_depth=None, ignore=None):
     """ Build a project. A project may be a test or a user program.
 
     Positional arguments:
@@ -474,25 +474,22 @@
     linker_script - the file that drives the linker to do it's job
     clean - Rebuild everything if True
     notify - Notify function for logs
-    verbose - Write the actual tools command lines used if True
     name - the name of the project
     macros - additional macros
     inc_dirs - additional directories where include files may be found
     jobs - how many compilers we can run at once
-    silent - suppress printing of progress indicators
     report - a dict where a result may be appended
     properties - UUUUHHHHH beats me
     project_id - the name put in the report
     project_description - the human-readable version of what this thing does
-    extra_verbose - even more output!
     config - a Config object to use instead of creating one
     app_config - location of a chosen mbed_app.json file
     build_profile - a dict of flags that will be passed to the compiler
     stats_depth - depth level for memap to display file/dirs
+    ignore - list of paths to add to mbedignore
     """
-
     # Convert src_path to a list if needed
-    if type(src_paths) != ListType:
+    if not isinstance(src_paths, list):
         src_paths = [src_paths]
     # Extend src_paths wiht libraries_paths
     if libraries_paths is not None:
@@ -528,8 +525,7 @@
             profile_data = get_toolchain_profile(self.name, profile)
             if not profile_data:
                 return
-            if verbose:
-                self.info("Using toolchain %s profile %s" % (self.name, profile))
+            notify.info("Using toolchain %s profile %s" % (self.name, profile))
 
             for k,v in profile_data.items():
                 if self.flags.has_key(k):
@@ -544,15 +540,15 @@
 
     toolchain = prepare_toolchain(
         src_paths, build_path, target, toolchain_name, macros=macros,
-        clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
-        extra_verbose=extra_verbose, config=config, app_config=app_config,
-        build_profile=build_profile)
+        clean=clean, jobs=jobs, notify=notify, config=config,
+        app_config=app_config, build_profile=build_profile, ignore=ignore)
+    toolchain.version_check()
 
     # The first path will give the name to the library
     name = (name or toolchain.config.name or
             basename(normpath(abspath(src_paths[0]))))
-    toolchain.info("Building project %s (%s, %s)" %
-                   (name, toolchain.target.name, toolchain_name))
+    notify.info("Building project %s (%s, %s)" %
+                (name, toolchain.target.name, toolchain_name))
 
     # Initialize reporting
     if report != None:
@@ -569,36 +565,48 @@
                             vendor_label)
 
     try:
-        # Call unified scan_resources
-        resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
+        resources = Resources(notify).scan_with_toolchain(
+            src_paths, toolchain, inc_dirs=inc_dirs)
 
         # Change linker script if specified
         if linker_script is not None:
-            resources.linker_script = linker_script
+            resources.add_file_ref(linker_script, linker_script)
 
         # Compile Sources
-        objects = toolchain.compile_sources(resources, resources.inc_dirs)
-        resources.objects.extend(objects)
+        objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
+        resources.add_files_to_type(FileType.OBJECT, objects)
 
         # Link Program
         if toolchain.config.has_regions:
-            res, _ = toolchain.link_program(resources, build_path, name + "_application")
+            binary, _ = toolchain.link_program(resources, build_path, name + "_application")
             region_list = list(toolchain.config.regions)
-            region_list = [r._replace(filename=res) if r.active else r
+            region_list = [r._replace(filename=binary) if r.active else r
                            for r in region_list]
-            res = join(build_path, name) + ".bin"
-            merge_region_list(region_list, res)
+            res = "%s.%s" % (join(build_path, name),
+                             getattr(toolchain.target, "OUTPUT_EXT", "bin"))
+            merge_region_list(region_list, res, notify)
+            update_regions = [
+                r for r in region_list if r.name in UPDATE_WHITELIST
+            ]
+            if update_regions:
+                update_res = "%s_update.%s" % (
+                    join(build_path, name),
+                    getattr(toolchain.target, "OUTPUT_EXT", "bin")
+                )
+                merge_region_list(update_regions, update_res, notify)
+                res = (res, update_res)
+            else:
+                res = (res, None)
         else:
             res, _ = toolchain.link_program(resources, build_path, name)
+            res = (res, None)
 
         memap_instance = getattr(toolchain, 'memap_instance', None)
         memap_table = ''
         if memap_instance:
             # Write output to stdout in text (pretty table) format
             memap_table = memap_instance.generate_output('table', stats_depth)
-
-            if not silent:
-                print memap_table
+            notify.info(memap_table)
 
             # Write output to file in JSON format
             map_out = join(build_path, name + "_map.json")
@@ -608,16 +616,19 @@
             map_csv = join(build_path, name + "_map.csv")
             memap_instance.generate_output('csv-ci', stats_depth, map_csv)
 
-        resources.detect_duplicates(toolchain)
+            map_html = join(build_path, name + "_map.html")
+            memap_instance.generate_output('html', stats_depth, map_html)
+
+        resources.detect_duplicates()
 
         if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
-            cur_result["output"] = toolchain.get_output() + memap_table
             cur_result["result"] = "OK"
-            cur_result["memory_usage"] = memap_instance.mem_report
-            cur_result["bin"] = res
-            cur_result["elf"] = splitext(res)[0] + ".elf"
+            cur_result["memory_usage"] = (memap_instance.mem_report
+                                          if memap_instance is not None else None)
+            cur_result["bin"] = res[0]
+            cur_result["elf"] = splitext(res[0])[0] + ".elf"
             cur_result.update(toolchain.report)
 
             add_result_to_report(report, cur_result)
@@ -635,22 +646,16 @@
 
             cur_result["elapsed_time"] = end - start
 
-            toolchain_output = toolchain.get_output()
-            if toolchain_output:
-                cur_result["output"] += toolchain_output
-
             add_result_to_report(report, cur_result)
-
         # Let Exception propagate
         raise
 
 def build_library(src_paths, build_path, target, toolchain_name,
                   dependencies_paths=None, name=None, clean=False,
-                  archive=True, notify=None, verbose=False, macros=None,
-                  inc_dirs=None, jobs=1, silent=False, report=None,
-                  properties=None, extra_verbose=False, project_id=None,
+                  archive=True, notify=None, macros=None, inc_dirs=None, jobs=1,
+                  report=None, properties=None, project_id=None,
                   remove_config_header_file=False, app_config=None,
-                  build_profile=None):
+                  build_profile=None, ignore=None):
     """ Build a library
 
     Positional arguments:
@@ -666,23 +671,22 @@
     clean - Rebuild everything if True
     archive - whether the library will create an archive file
     notify - Notify function for logs
-    verbose - Write the actual tools command lines used if True
     macros - additional macros
     inc_dirs - additional directories where include files may be found
     jobs - how many compilers we can run at once
-    silent - suppress printing of progress indicators
     report - a dict where a result may be appended
     properties - UUUUHHHHH beats me
-    extra_verbose - even more output!
     project_id - the name that goes in the report
     remove_config_header_file - delete config header file when done building
     app_config - location of a chosen mbed_app.json file
     build_profile - a dict of flags that will be passed to the compiler
+    ignore - list of paths to add to mbedignore
     """
 
     # Convert src_path to a list if needed
-    if type(src_paths) != ListType:
+    if not isinstance(src_paths, list):
         src_paths = [src_paths]
+    src_paths = [relpath(s) for s in src_paths]
 
     # Build path
     if archive:
@@ -700,14 +704,13 @@
     # Pass all params to the unified prepare_toolchain()
     toolchain = prepare_toolchain(
         src_paths, build_path, target, toolchain_name, macros=macros,
-        clean=clean, jobs=jobs, notify=notify, silent=silent,
-        verbose=verbose, extra_verbose=extra_verbose, app_config=app_config,
-        build_profile=build_profile)
+        clean=clean, jobs=jobs, notify=notify, app_config=app_config,
+        build_profile=build_profile, ignore=ignore)
 
     # The first path will give the name to the library
     if name is None:
         name = basename(normpath(abspath(src_paths[0])))
-    toolchain.info("Building library %s (%s, %s)" %
+    notify.info("Building library %s (%s, %s)" %
                    (name, toolchain.target.name, toolchain_name))
 
     # Initialize reporting
@@ -735,31 +738,25 @@
             raise Exception(error_msg)
 
     try:
-        # Call unified scan_resources
-        resources = scan_resources(src_paths, toolchain,
-                                   dependencies_paths=dependencies_paths,
-                                   inc_dirs=inc_dirs)
-
+        res = Resources(notify).scan_with_toolchain(
+            src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
 
         # Copy headers, objects and static libraries - all files needed for
         # static lib
-        toolchain.copy_files(resources.headers, build_path, resources=resources)
-        toolchain.copy_files(resources.objects, build_path, resources=resources)
-        toolchain.copy_files(resources.libraries, build_path,
-                             resources=resources)
-        toolchain.copy_files(resources.json_files, build_path,
-                             resources=resources)
-        if resources.linker_script:
-            toolchain.copy_files(resources.linker_script, build_path,
-                                 resources=resources)
-
-        if resources.hex_files:
-            toolchain.copy_files(resources.hex_files, build_path,
-                                 resources=resources)
-
+        to_copy = (
+            res.get_file_refs(FileType.HEADER) +
+            res.get_file_refs(FileType.OBJECT) +
+            res.get_file_refs(FileType.LIB) +
+            res.get_file_refs(FileType.JSON) +
+            res.get_file_refs(FileType.LD_SCRIPT) +
+            res.get_file_refs(FileType.HEX) +
+            res.get_file_refs(FileType.BIN)
+        )
+        toolchain.copy_files(to_copy, build_path)
         # Compile Sources
-        objects = toolchain.compile_sources(resources, resources.inc_dirs)
-        resources.objects.extend(objects)
+        objects = toolchain.compile_sources(
+            res, res.get_file_paths(FileType.INC_DIR))
+        res.add_files_to_type(FileType.OBJECT, objects)
 
         if archive:
             toolchain.build_library(objects, build_path, name)
@@ -772,10 +769,7 @@
         if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
-            cur_result["output"] = toolchain.get_output()
             cur_result["result"] = "OK"
-
-
             add_result_to_report(report, cur_result)
         return True
 
@@ -790,10 +784,6 @@
 
             cur_result["elapsed_time"] = end - start
 
-            toolchain_output = toolchain.get_output()
-            if toolchain_output:
-                cur_result["output"] += toolchain_output
-
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
@@ -807,10 +797,9 @@
     real_tc_name = TOOLCHAIN_CLASSES[toolchain_name].__name__
     return join("TARGET_" + target_name, "TOOLCHAIN_" + real_tc_name)
 
-def build_lib(lib_id, target, toolchain_name, verbose=False,
-              clean=False, macros=None, notify=None, jobs=1, silent=False,
-              report=None, properties=None, extra_verbose=False,
-              build_profile=None):
+def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
+              notify=None, jobs=1, report=None, properties=None,
+              build_profile=None, ignore=None):
     """ Legacy method for building mbed libraries
 
     Positional arguments:
@@ -820,15 +809,13 @@
 
     Keyword arguments:
     clean - Rebuild everything if True
-    verbose - Write the actual tools command lines used if True
     macros - additional macros
     notify - Notify function for logs
     jobs - how many compilers we can run at once
-    silent - suppress printing of progress indicators
     report - a dict where a result may be appended
     properties - UUUUHHHHH beats me
-    extra_verbose - even more output!
     build_profile - a dict of flags that will be passed to the compiler
+    ignore - list of paths to add to mbedignore
     """
     lib = Library(lib_id)
     if not lib.is_supported(target, toolchain_name):
@@ -848,15 +835,14 @@
     build_path = lib.build_dir
     dependencies_paths = lib.dependencies
     inc_dirs = lib.inc_dirs
-    inc_dirs_ext = lib.inc_dirs_ext
 
-    if type(src_paths) != ListType:
+    if not isinstance(src_paths, list):
         src_paths = [src_paths]
 
     # The first path will give the name to the library
     name = basename(src_paths[0])
 
-    if report != None:
+    if report is not None:
         start = time()
         id_name = name.upper()
         description = name
@@ -892,52 +878,26 @@
 
         toolchain = prepare_toolchain(
             src_paths, tmp_path, target, toolchain_name, macros=macros,
-            notify=notify, silent=silent, extra_verbose=extra_verbose,
-            build_profile=build_profile, jobs=jobs, clean=clean)
+            notify=notify, build_profile=build_profile, jobs=jobs, clean=clean,
+            ignore=ignore)
 
-        toolchain.info("Building library %s (%s, %s)" %
-                       (name.upper(), target.name, toolchain_name))
+        notify.info("Building library %s (%s, %s)" %
+                    (name.upper(), target.name, toolchain_name))
 
         # Take into account the library configuration (MBED_CONFIG_FILE)
         config = toolchain.config
         config.add_config_files([MBED_CONFIG_FILE])
 
         # Scan Resources
-        resources = []
-        for src_path in src_paths:
-            resources.append(toolchain.scan_resources(src_path))
-
-        # Add extra include directories / files which are required by library
-        # This files usually are not in the same directory as source files so
-        # previous scan will not include them
-        if inc_dirs_ext is not None:
-            for inc_ext in inc_dirs_ext:
-                resources.append(toolchain.scan_resources(inc_ext))
-
-        # Dependencies Include Paths
-        dependencies_include_dir = []
-        if dependencies_paths is not None:
-            for path in dependencies_paths:
-                lib_resources = toolchain.scan_resources(path)
-                dependencies_include_dir.extend(lib_resources.inc_dirs)
-                dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
-
-        if inc_dirs:
-            dependencies_include_dir.extend(inc_dirs)
-
-        # Add other discovered configuration data to the configuration object
-        for res in resources:
-            config.load_resources(res)
-        toolchain.set_config_data(toolchain.config.get_config_data())
-
+        resources = Resources(notify).scan_with_toolchain(
+            src_paths + (lib.inc_dirs_ext or []), toolchain,
+            inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
 
         # Copy Headers
-        for resource in resources:
-            toolchain.copy_files(resource.headers, build_path,
-                                 resources=resource)
+        toolchain.copy_files(
+            resources.get_file_refs(FileType.HEADER), build_path)
 
-        dependencies_include_dir.extend(
-            toolchain.scan_resources(build_path).inc_dirs)
+        dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
 
         # Compile Sources
         objects = []
@@ -949,7 +909,6 @@
         if report != None and needed_update:
             end = time()
             cur_result["elapsed_time"] = end - start
-            cur_result["output"] = toolchain.get_output()
             cur_result["result"] = "OK"
 
             add_result_to_report(report, cur_result)
@@ -961,65 +920,80 @@
             cur_result["result"] = "FAIL"
             cur_result["elapsed_time"] = end - start
 
-            toolchain_output = toolchain.get_output()
-            if toolchain_output:
-                cur_result["output"] += toolchain_output
-
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
         raise
 
-# We do have unique legacy conventions about how we build and package the mbed
-# library
-def build_mbed_libs(target, toolchain_name, verbose=False,
-                    clean=False, macros=None, notify=None, jobs=1, silent=False,
-                    report=None, properties=None, extra_verbose=False,
-                    build_profile=None):
-    """ Function returns True is library was built and false if building was
-    skipped
+
+# A number of compiled files need to be copied as objects as the linker
+# will not search for weak symbol overrides in archives. These are:
+#   - mbed_retarget.o: to make sure that the C standard lib symbols get
+#                      overridden
+#   - mbed_board.o: `mbed_die` is weak
+#   - mbed_overrides.o: this contains platform overrides of various
+#                       weak SDK functions
+#   - mbed_main.o: this contains main redirection
+#   - mbed_sdk_boot.o: this contains the main boot code in
+#   - PeripheralPins.o: PinMap can be weak
+SEPARATE_NAMES = [
+    'PeripheralPins.o',
+    'mbed_retarget.o',
+    'mbed_board.o',
+    'mbed_overrides.o',
+    'mbed_main.o',
+    'mbed_sdk_boot.o',
+]
+
+
+def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
+                    notify=None, jobs=1, report=None, properties=None,
+                    build_profile=None, ignore=None):
+    """ Build legacy libraries for a target and toolchain pair
 
     Positional arguments:
     target - the MCU or board that the project will compile for
     toolchain_name - the name of the build tools
 
     Keyword arguments:
-    verbose - Write the actual tools command lines used if True
     clean - Rebuild everything if True
     macros - additional macros
     notify - Notify function for logs
     jobs - how many compilers we can run at once
-    silent - suppress printing of progress indicators
     report - a dict where a result may be appended
     properties - UUUUHHHHH beats me
-    extra_verbose - even more output!
     build_profile - a dict of flags that will be passed to the compiler
+    ignore - list of paths to add to mbedignore
+
+    Return - True if target + toolchain built correctly, False if not supported
     """
 
-    if report != None:
+    if report is not None:
         start = time()
         id_name = "MBED"
         description = "mbed SDK"
         vendor_label = target.extra_labels[0]
         cur_result = None
         prep_report(report, target.name, toolchain_name, id_name)
-        cur_result = create_result(target.name, toolchain_name, id_name,
-                                   description)
+        cur_result = create_result(
+            target.name, toolchain_name, id_name, description)
+        if properties is not None:
+            prep_properties(
+                properties, target.name, toolchain_name, vendor_label)
 
-        if properties != None:
-            prep_properties(properties, target.name, toolchain_name,
-                            vendor_label)
-
-    # Check toolchain support
     if toolchain_name not in target.supported_toolchains:
         supported_toolchains_text = ", ".join(target.supported_toolchains)
-        print('%s target is not yet supported by toolchain %s' %
-              (target.name, toolchain_name))
-        print('%s target supports %s toolchain%s' %
-              (target.name, supported_toolchains_text, 's'
-               if len(target.supported_toolchains) > 1 else ''))
+        notify.info('The target {} does not support the toolchain {}'.format(
+            target.name,
+            toolchain_name
+        ))
+        notify.info('{} supports {} toolchain{}'.format(
+            target.name,
+            supported_toolchains_text,
+            's' if len(target.supported_toolchains) > 1 else ''
+        ))
 
-        if report != None:
+        if report is not None:
             cur_result["result"] = "SKIP"
             add_result_to_report(report, cur_result)
 
@@ -1027,88 +1001,59 @@
 
     try:
         # Source and Build Paths
-        build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
-        build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
+        build_toolchain = join(
+            MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
         mkdir(build_toolchain)
 
-        # Toolchain
-        tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name))
+        tmp_path = join(
+            MBED_LIBRARIES,
+            '.temp',
+            mbed2_obj_path(target.name, toolchain_name)
+        )
         mkdir(tmp_path)
 
+        # Toolchain and config
         toolchain = prepare_toolchain(
-            [""], tmp_path, target, toolchain_name, macros=macros,verbose=verbose,
-            notify=notify, silent=silent, extra_verbose=extra_verbose,
-            build_profile=build_profile, jobs=jobs, clean=clean)
+            [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
+            build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
 
-        # Take into account the library configuration (MBED_CONFIG_FILE)
         config = toolchain.config
         config.add_config_files([MBED_CONFIG_FILE])
         toolchain.set_config_data(toolchain.config.get_config_data())
 
-        # CMSIS
-        toolchain.info("Building library %s (%s, %s)" %
-                       ('CMSIS', target.name, toolchain_name))
-        cmsis_src = MBED_CMSIS_PATH
-        resources = toolchain.scan_resources(cmsis_src)
-
-        toolchain.copy_files(resources.headers, build_target)
-        toolchain.copy_files(resources.linker_script, build_toolchain)
-        toolchain.copy_files(resources.bin_files, build_toolchain)
-
-        objects = toolchain.compile_sources(resources, tmp_path)
-        toolchain.copy_files(objects, build_toolchain)
-
-        # mbed
-        toolchain.info("Building library %s (%s, %s)" %
-                       ('MBED', target.name, toolchain_name))
-
-        # Common Headers
-        toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
+        # distribute header files
+        toolchain.copy_files(
+            [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
         library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
 
         for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
                           (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
                           (MBED_HAL, MBED_LIBRARIES_HAL)]:
-            resources = toolchain.scan_resources(dir)
-            toolchain.copy_files(resources.headers, dest)
+            resources = Resources(notify).scan_with_toolchain([dir], toolchain)
+            toolchain.copy_files(
+                [FileRef(basename(p), p) for p
+                 in resources.get_file_paths(FileType.HEADER)] ,
+                dest)
             library_incdirs.append(dest)
 
-        # Target specific sources
-        hal_src = MBED_TARGETS_PATH
-        hal_implementation = toolchain.scan_resources(hal_src)
-        toolchain.copy_files(hal_implementation.headers +
-                             hal_implementation.hex_files +
-                             hal_implementation.libraries +
-                             [MBED_CONFIG_FILE],
-                             build_target, resources=hal_implementation)
-        toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
-        toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
-        incdirs = toolchain.scan_resources(build_target).inc_dirs
-        objects = toolchain.compile_sources(hal_implementation,
-                                            library_incdirs + incdirs)
-        toolchain.copy_files(objects, build_toolchain)
+        # collect resources of the libs to compile
+        cmsis_res = Resources(notify).scan_with_toolchain(
+            [MBED_CMSIS_PATH], toolchain)
+        hal_res = Resources(notify).scan_with_toolchain(
+            [MBED_TARGETS_PATH], toolchain)
+        mbed_resources = Resources(notify).scan_with_toolchain(
+            [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain)
 
-        # Common Sources
-        mbed_resources = None
-        for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]:
-            mbed_resources += toolchain.scan_resources(dir)
-
-        objects = toolchain.compile_sources(mbed_resources,
-                                            library_incdirs + incdirs)
+        incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs
 
-        # A number of compiled files need to be copied as objects as opposed to
-        # way the linker search for symbols in archives. These are:
-        #   - mbed_retarget.o: to make sure that the C standard lib symbols get
-        #                 overridden
-        #   - mbed_board.o: mbed_die is weak
-        #   - mbed_overrides.o: this contains platform overrides of various
-        #                       weak SDK functions
-        #   - mbed_main.o: this contains main redirection
-        separate_names, separate_objects = ['mbed_retarget.o', 'mbed_board.o',
-                                            'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
+        # Build Things
+        notify.info("Building library %s (%s, %s)" %
+                    ('MBED', target.name, toolchain_name))
+        objects = toolchain.compile_sources(mbed_resources, incdirs)
+        separate_objects = []
 
         for obj in objects:
-            for name in separate_names:
+            for name in SEPARATE_NAMES:
                 if obj.endswith(name):
                     separate_objects.append(obj)
 
@@ -1116,35 +1061,48 @@
             objects.remove(obj)
 
         toolchain.build_library(objects, build_toolchain, "mbed")
+        notify.info("Building library %s (%s, %s)" %
+                    ('CMSIS', target.name, toolchain_name))
+        cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path])
+        notify.info("Building library %s (%s, %s)" %
+                    ('HAL', target.name, toolchain_name))
+        hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
 
-        for obj in separate_objects:
-            toolchain.copy_files(obj, build_toolchain)
+        # Copy everything into the build directory
+        to_copy_paths = [
+            hal_res.get_file_paths(FileType.HEADER),
+            hal_res.get_file_paths(FileType.HEX),
+            hal_res.get_file_paths(FileType.BIN),
+            hal_res.get_file_paths(FileType.LIB),
+            cmsis_res.get_file_paths(FileType.HEADER),
+            cmsis_res.get_file_paths(FileType.BIN),
+            cmsis_res.get_file_paths(FileType.LD_SCRIPT),
+            hal_res.get_file_paths(FileType.LD_SCRIPT),
+            [MBED_CONFIG_FILE],
+            cmsis_objects,
+            hal_objects,
+            separate_objects,
+        ]
+        to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])]
+        toolchain.copy_files(to_copy, build_toolchain)
 
-        if report != None:
+        if report is not None:
             end = time()
             cur_result["elapsed_time"] = end - start
-            cur_result["output"] = toolchain.get_output()
             cur_result["result"] = "OK"
-
             add_result_to_report(report, cur_result)
 
         return True
 
     except Exception as exc:
-        if report != None:
+        if report is not None:
             end = time()
             cur_result["result"] = "FAIL"
             cur_result["elapsed_time"] = end - start
 
-            toolchain_output = toolchain.get_output()
-            if toolchain_output:
-                cur_result["output"] += toolchain_output
-
             cur_result["output"] += str(exc)
 
             add_result_to_report(report, cur_result)
-
-        # Let Exception propagate
         raise
 
 
@@ -1169,24 +1127,20 @@
                 if toolchain not in unique_supported_toolchains:
                     unique_supported_toolchains.append(toolchain)
 
-    if "ARM" in unique_supported_toolchains:
-        unique_supported_toolchains.append("ARMC6")
+    return unique_supported_toolchains
+
 
-    return unique_supported_toolchains
+def _lowercase_release_version(release_version):
+    try:
+        return release_version.lower()
+    except AttributeError:
+        return 'all'
 
 def mcu_toolchain_list(release_version='5'):
     """  Shows list of toolchains
 
     """
-
-    if isinstance(release_version, basestring):
-        # Force release_version to lowercase if it is a string
-        release_version = release_version.lower()
-    else:
-        # Otherwise default to printing all known targets and toolchains
-        release_version = 'all'
-
-
+    release_version = _lowercase_release_version(release_version)
     version_release_targets = {}
     version_release_target_names = {}
 
@@ -1211,15 +1165,7 @@
     """  Shows target list
 
     """
-
-    if isinstance(release_version, basestring):
-        # Force release_version to lowercase if it is a string
-        release_version = release_version.lower()
-    else:
-        # Otherwise default to printing all known targets and toolchains
-        release_version = 'all'
-
-
+    release_version = _lowercase_release_version(release_version)
     version_release_targets = {}
     version_release_target_names = {}
 
@@ -1254,16 +1200,8 @@
     release_version - get the matrix for this major version number
     """
     # Only use it in this function so building works without extra modules
-    from prettytable import PrettyTable
-
-    if isinstance(release_version, basestring):
-        # Force release_version to lowercase if it is a string
-        release_version = release_version.lower()
-    else:
-        # Otherwise default to printing all known targets and toolchains
-        release_version = 'all'
-
-
+    from prettytable import PrettyTable, HEADER
+    release_version = _lowercase_release_version(release_version)
     version_release_targets = {}
     version_release_target_names = {}
 
@@ -1284,7 +1222,7 @@
 
     # All tests status table print
     columns = prepend_columns + unique_supported_toolchains
-    table_printer = PrettyTable(columns)
+    table_printer = PrettyTable(columns, junction_char="|", hrules=HEADER)
     # Align table
     for col in columns:
         table_printer.align[col] = "c"
@@ -1317,9 +1255,13 @@
             row.append(text)
 
         for unique_toolchain in unique_supported_toolchains:
-            if (unique_toolchain in TARGET_MAP[target].supported_toolchains or
+            tgt_obj = TARGET_MAP[target]
+            if (unique_toolchain in tgt_obj.supported_toolchains or
                 (unique_toolchain == "ARMC6" and
-                 "ARM" in TARGET_MAP[target].supported_toolchains)):
+                 "ARM" in tgt_obj.supported_toolchains) or
+                (unique_toolchain == "ARM" and
+                 "ARMC6" in tgt_obj.supported_toolchains and
+                 CORE_ARCH[tgt_obj.core] == 8)):
                 text = "Supported"
                 perm_counter += 1
             else:
@@ -1368,10 +1310,10 @@
     Positional arguments:
     report - Report generated during build procedure.
     """
-    from prettytable import PrettyTable
+    from prettytable import PrettyTable, HEADER
     columns_text = ['name', 'target', 'toolchain']
     columns_int = ['static_ram', 'total_flash']
-    table = PrettyTable(columns_text + columns_int)
+    table = PrettyTable(columns_text + columns_int, junction_char="|", hrules=HEADER)
 
     for col in columns_text:
         table.align[col] = 'l'
@@ -1445,11 +1387,13 @@
             for project in tc.values():
                 for build in project:
                     try:
+                        build[0]['bin_fullpath'] = build[0]['bin']
+                        build[0]['elf_fullpath'] = build[0]['elf']
                         build[0]['elf'] = relpath(build[0]['elf'], path_to_file)
                         build[0]['bin'] = relpath(build[0]['bin'], path_to_file)
                     except KeyError:
                         pass
                     if 'type' not in build[0]:
                         build[0]['type'] = app_type
-                    build_data['builds'].append(build[0])
+                    build_data['builds'].insert(0, build[0])
     dump(build_data, open(filename, "wb"), indent=4, separators=(',', ': '))