Anders Blomdell / mbed-sdk-tools

Files at this revision

API Documentation at this revision

Comitter:
screamer
Date:
Mon Aug 01 09:10:17 2016 +0100
Parent:
23:fbae331171fa
Child:
25:aef6536015e3
Commit message:
Major update to tools from ARMmbed/mbed-os

Changed in this revision

build.py Show annotated file Show diff for this revision Revisions of this file
build_api.py Show annotated file Show diff for this revision Revisions of this file
build_profiles.py Show annotated file Show diff for this revision Revisions of this file
build_release.py Show annotated file Show diff for this revision Revisions of this file
build_travis.py Show annotated file Show diff for this revision Revisions of this file
config.py Show annotated file Show diff for this revision Revisions of this file
export/coide.py Show annotated file Show diff for this revision Revisions of this file
export/exporters.py Show annotated file Show diff for this revision Revisions of this file
export/gccarm.py Show annotated file Show diff for this revision Revisions of this file
export/iar.py Show annotated file Show diff for this revision Revisions of this file
export/sw4stm32.py Show annotated file Show diff for this revision Revisions of this file
export/uvision4.py Show annotated file Show diff for this revision Revisions of this file
export/uvision5.py Show annotated file Show diff for this revision Revisions of this file
make.py Show annotated file Show diff for this revision Revisions of this file
memap.py Show annotated file Show diff for this revision Revisions of this file
project.py Show annotated file Show diff for this revision Revisions of this file
test.py Show annotated file Show diff for this revision Revisions of this file
test/config_test/test23/test_data.py Show annotated file Show diff for this revision Revisions of this file
test/config_test/test25/test_data.py Show annotated file Show diff for this revision Revisions of this file
test_api.py Show annotated file Show diff for this revision Revisions of this file
tests.py Show annotated file Show diff for this revision Revisions of this file
toolchains/__init__.py Show annotated file Show diff for this revision Revisions of this file
toolchains/arm.py Show annotated file Show diff for this revision Revisions of this file
toolchains/gcc.py Show annotated file Show diff for this revision Revisions of this file
toolchains/iar.py Show annotated file Show diff for this revision Revisions of this file
utils.py Show annotated file Show diff for this revision Revisions of this file
--- a/build.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/build.py	Mon Aug 01 09:10:17 2016 +0100
@@ -38,6 +38,7 @@
 from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
 from utils import argparse_filestring_type
 from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, CLI_COLOR_MAP
+from utils import argparse_filestring_type, argparse_dir_not_parent
 
 if __name__ == '__main__':
     start = time()
@@ -48,7 +49,7 @@
     parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type,
                         default=None, help="The source (input) directory", action="append")
 
-    parser.add_argument("--build", dest="build_dir",
+    parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT),
                       default=None, help="The build (output) directory")
 
     parser.add_argument("--no-archive", dest="no_archive", action="store_true",
--- a/build_api.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/build_api.py	Mon Aug 01 09:10:17 2016 +0100
@@ -23,20 +23,22 @@
 from types import ListType
 from shutil import rmtree
 from os.path import join, exists, basename, abspath, normpath
-from os import getcwd, walk
+from os import getcwd, walk, linesep
 from time import time
 import fnmatch
 
-from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException
+from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException, InvalidReleaseTargetException
 from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
-from tools.targets import TARGET_NAMES, TARGET_MAP
+from tools.targets import TARGET_NAMES, TARGET_MAP, set_targets_json_location
 from tools.libraries import Library
 from tools.toolchains import TOOLCHAIN_CLASSES, mbedToolchain
-from tools.build_profiles import find_build_profile, get_toolchain_profile
+from tools.build_profiles import find_build_profile, get_toolchain_profile, find_targets_json
 from jinja2 import FileSystemLoader
 from jinja2.environment import Environment
 from tools.config import Config
 
+RELEASE_VERSIONS = ['2', '5']
+
 def prep_report(report, target_name, toolchain_name, id_name):
     # Setup report keys
     if not target_name in report:
@@ -78,28 +80,13 @@
     result_wrap = { 0: result }
     report[target][toolchain][id_name].append(result_wrap)
 
-def get_config(src_path, target, toolchain_name):
-    # Convert src_path to a list if needed
-    src_paths = [src_path] if type(src_path) != ListType else src_path
-    # We need to remove all paths which are repeated to avoid
-    # multiple compilations and linking with the same objects
-    src_paths = [src_paths[0]] + list(set(src_paths[1:]))
-
-    # Create configuration object
-    config = Config(target, src_paths)
+def get_config(src_paths, target, toolchain_name):
+    # Convert src_paths to a list if needed
+    if type(src_paths) != ListType:
+        src_paths = [src_paths]
 
-    # If the 'target' argument is a string, convert it to a target instance
-    if isinstance(target, basestring):
-        try:
-            target = TARGET_MAP[target]
-        except KeyError:
-            raise KeyError("Target '%s' not found" % target)
-
-    # Toolchain instance
-    try:
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options=None, notify=None, macros=None, silent=True, extra_verbose=False)
-    except KeyError as e:
-        raise KeyError("Toolchain %s not supported" % toolchain_name)
+    # Pass all params to the unified prepare_resources()
+    toolchain = prepare_toolchain(src_paths, target, toolchain_name)
 
     # Scan src_path for config files
     resources = toolchain.scan_resources(src_paths[0])
@@ -110,10 +97,10 @@
     prev_features = set()
     while True:
         # Update the configuration with any .json files found while scanning
-        config.add_config_files(resources.json_files)
+        toolchain.config.add_config_files(resources.json_files)
 
         # Add features while we find new ones
-        features = config.get_features()
+        features = toolchain.config.get_features()
         if features == prev_features:
             break
 
@@ -122,29 +109,134 @@
                 resources += resources.features[feature]
 
         prev_features = features
-    config.validate_config()
+    toolchain.config.validate_config()
 
-    cfg, macros = config.get_config_data()
-    features = config.get_features()
+    cfg, macros = toolchain.config.get_config_data()
+    features = toolchain.config.get_features()
     return cfg, macros, features
 
-def build_project(src_path, build_path, target, toolchain_name,
-        libraries_paths=None, options=None, linker_script=None,
-        clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
-        jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
-        extra_verbose=False, config=None):
-    """ This function builds project. Project can be for example one test / UT
+def is_official_target(target_name, version):
+    """ Returns True, None if a target is part of the official release for the
+    given version. Return False, 'reason' if a target is not part of the
+    official release for the given version.
+
+    target_name: Name if the target (ex. 'K64F')
+    version: The release version string. Should be a string contained within RELEASE_VERSIONS
+    """
+    
+    result = True
+    reason = None
+    target = TARGET_MAP[target_name]
+    
+    if hasattr(target, 'release_versions') and version in target.release_versions:
+        if version == '2':
+            # For version 2, either ARM or uARM toolchain support is required
+            required_toolchains = set(['ARM', 'uARM'])
+            
+            if not len(required_toolchains.intersection(set(target.supported_toolchains))) > 0:
+                result = False           
+                reason = ("Target '%s' must support " % target.name) + \
+                    ("one of the folowing toolchains to be included in the mbed 2.0 ") + \
+                    (("official release: %s" + linesep) % ", ".join(required_toolchains)) + \
+                    ("Currently it is only configured to support the ") + \
+                    ("following toolchains: %s" % ", ".join(target.supported_toolchains))
+                    
+        elif version == '5':
+            # For version 5, ARM, GCC_ARM, and IAR toolchain support is required
+            required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
+            required_toolchains_sorted = list(required_toolchains)
+            required_toolchains_sorted.sort()
+            supported_toolchains = set(target.supported_toolchains)
+            supported_toolchains_sorted = list(supported_toolchains)
+            supported_toolchains_sorted.sort()
+            
+            if not required_toolchains.issubset(supported_toolchains):
+                result = False
+                reason = ("Target '%s' must support " % target.name) + \
+                    ("ALL of the folowing toolchains to be included in the mbed OS 5.0 ") + \
+                    (("official release: %s" + linesep) % ", ".join(required_toolchains_sorted)) + \
+                    ("Currently it is only configured to support the ") + \
+                    ("following toolchains: %s" % ", ".join(supported_toolchains_sorted))
+
+            elif not target.default_build == 'standard':
+                result = False
+                reason = ("Target '%s' must set the 'default_build' " % target.name) + \
+                    ("to 'standard' to be included in the mbed OS 5.0 ") + \
+                    ("official release." + linesep) + \
+                    ("Currently it is set to '%s'" % target.default_build)
+
+        else:
+            result = False
+            reason = ("Target '%s' has set an invalid release version of '%s'" % version) + \
+                ("Please choose from the following release versions: %s" + ', '.join(RELEASE_VERSIONS))
+
+    else:
+        result = False
+        if not hasattr(target, 'release_versions'):
+            reason = "Target '%s' does not have the 'release_versions' key set" % target.name
+        elif not version in target.release_versions:
+            reason = "Target '%s' does not contain the version '%s' in its 'release_versions' key" % (target.name, version)
+    
+    return result, reason
+
+def transform_release_toolchains(toolchains, version):
+    """ Given a list of toolchains and a release version, return a list of
+    only the supported toolchains for that release
+
+    toolchains: The list of toolchains
+    version: The release version string. Should be a string contained within RELEASE_VERSIONS
+    """
+    toolchains_set = set(toolchains)
+
+    if version == '5':
+        return ['ARM', 'GCC_ARM', 'IAR']
+    else:
+        return toolchains
+
+
+def get_mbed_official_release(version):
+    """ Given a release version string, return a tuple that contains a target
+    and the supported toolchains for that release.
+    Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), ('K64F', ('ARM', 'GCC_ARM')), ...)
+
+    version: The version string. Should be a string contained within RELEASE_VERSIONS
     """
 
-    # Convert src_path to a list if needed
-    src_paths = [src_path] if type(src_path) != ListType else src_path
+    MBED_OFFICIAL_RELEASE = (
+        tuple(
+            tuple(
+                [
+                    TARGET_MAP[target].name,
+                    tuple(transform_release_toolchains(TARGET_MAP[target].supported_toolchains, version))
+                ]
+            ) for target in TARGET_NAMES if (hasattr(TARGET_MAP[target], 'release_versions') and version in TARGET_MAP[target].release_versions)
+        )
+    )
+    
+    for target in MBED_OFFICIAL_RELEASE:
+        is_official, reason = is_official_target(target[0], version)
+        
+        if not is_official:
+            raise InvalidReleaseTargetException(reason)
+            
+    return MBED_OFFICIAL_RELEASE
+
+
+def prepare_toolchain(src_paths, target, toolchain_name,
+        macros=None, options=None, clean=False, jobs=1,
+        notify=None, silent=False, verbose=False, extra_verbose=False, config=None):
+    """ Prepares resource related objects - toolchain, target, config
+    src_paths: the paths to source directories
+    target: ['LPC1768', 'LPC11U24', 'LPC2368']
+    toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
+    clean: Rebuild everything if True
+    notify: Notify function for logs
+    verbose: Write the actual tools command lines if True
+    """
 
     # We need to remove all paths which are repeated to avoid
     # multiple compilations and linking with the same objects
     src_paths = [src_paths[0]] + list(set(src_paths[1:]))
-    first_src_path = src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd()
-    abs_path = abspath(first_src_path)
-    project_name = basename(normpath(abs_path))
 
     # If the configuration object was not yet created, create it now
     config = config or Config(target, src_paths)
@@ -156,17 +248,100 @@
         except KeyError:
             raise KeyError("Target '%s' not found" % target)
 
+    # Toolchain instance
+    try:
+        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
+    except KeyError as e:
+        raise KeyError("Toolchain %s not supported" % toolchain_name)
+
+    toolchain.config = config
+    toolchain.jobs = jobs
+    toolchain.build_all = clean
+    toolchain.VERBOSE = verbose
+
+    return toolchain
+
+def scan_resources(src_paths, toolchain, dependencies_paths=None, inc_dirs=None):
+    """ Scan resources using initialized toolcain
+    src_paths: the paths to source directories
+    toolchain: valid toolchain object
+    dependencies_paths: dependency paths that we should scan for include dirs
+    inc_dirs: additional include directories which should be added to thescanner resources
+    """
+
+    # Scan src_path
+    resources = toolchain.scan_resources(src_paths[0])
+    for path in src_paths[1:]:
+        resources.add(toolchain.scan_resources(path))
+
+    # Scan dependency paths for include dirs
+    if dependencies_paths is not None:
+        for path in dependencies_paths:
+            lib_resources = toolchain.scan_resources(path)
+            resources.inc_dirs.extend(lib_resources.inc_dirs)
+
+    # Add additional include directories if passed
+    if inc_dirs:
+        if type(inc_dirs) == ListType:
+            resources.inc_dirs.extend(inc_dirs)
+        else:
+            resources.inc_dirs.append(inc_dirs)
+
+    # Load resources into the config system which might expand/modify resources based on config data
+    resources = toolchain.config.load_resources(resources)
+
+    # Set the toolchain's configuration data
+    toolchain.set_config_data(toolchain.config.get_config_data())
+
+    return resources
+
+def build_project(src_paths, build_path, target, toolchain_name,
+        libraries_paths=None, options=None, linker_script=None,
+        clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
+        jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
+        extra_verbose=False, config=None):
+    """ This function builds project. Project can be for example one test / UT
+    """
+
+    # Convert src_path to a list if needed
+    if type(src_paths) != ListType:
+        src_paths = [src_paths]
+    # Extend src_paths wiht libraries_paths
+    if libraries_paths is not None:
+        src_paths.extend(libraries_paths)
+
+    # Build Directory
+    if clean:
+        if exists(build_path):
+            rmtree(build_path)
+    mkdir(build_path)
+
+
+    ###################################
+    # mbed Classic/2.0/libary support #
+    ###################################
+
     # Find build system profile
     profile = None
+    targets_json = None
     for path in src_paths:
         profile = find_build_profile(path) or profile
+        targets_json = find_targets_json(path) or targets_json
 
+    # Apply targets.json to active targets
+    if targets_json:
+        if verbose:
+            print("Using targets from %s" % targets_json)
+        set_targets_json_location(targets_json)
+
+    # Apply profile to toolchains
     if profile:
         def init_hook(self):
             profile_data = get_toolchain_profile(self.name, profile)
             if not profile_data:
                 return
-            self.info("Using toolchain %s profile %s" % (self.name, profile))
+            if verbose:
+                self.info("Using toolchain %s profile %s" % (self.name, profile))
 
             for k,v in profile_data.items():
                 if self.flags.has_key(k):
@@ -176,71 +351,37 @@
 
         mbedToolchain.init = init_hook
 
-    # Toolchain instance
-    try:
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
-    except KeyError as e:
-        raise KeyError("Toolchain %s not supported" % toolchain_name)
 
-    toolchain.VERBOSE = verbose
-    toolchain.jobs = jobs
-    toolchain.build_all = clean
+    # Pass all params to the unified prepare_toolchain()
+    toolchain = prepare_toolchain(src_paths, target, toolchain_name,
+        macros=macros, options=options, clean=clean, jobs=jobs,
+        notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config)
 
+    # The first path will give the name to the library
     if name is None:
-        # We will use default project name based on project folder name
-        name = project_name
-        toolchain.info("Building project %s (%s, %s)" % (project_name, target.name, toolchain_name))
-    else:
-        # User used custom global project name to have the same name for the
-        toolchain.info("Building project %s to %s (%s, %s)" % (project_name, name, target.name, toolchain_name))
+        name = basename(normpath(abspath(src_paths[0])))
+    toolchain.info("Building project %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
 
-
+    # Initialize reporting
     if report != None:
         start = time()
-
         # If project_id is specified, use that over the default name
         id_name = project_id.upper() if project_id else name.upper()
         description = project_description if project_description else name
-        vendor_label = target.extra_labels[0]
-        cur_result = None
-        prep_report(report, target.name, toolchain_name, id_name)
-        cur_result = create_result(target.name, toolchain_name, id_name, description)
-
+        vendor_label = toolchain.target.extra_labels[0]
+        prep_report(report, toolchain.target.name, toolchain_name, id_name)
+        cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
         if properties != None:
-            prep_properties(properties, target.name, toolchain_name, vendor_label)
+            prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
 
     try:
-        # Scan src_path and libraries_paths for resources
-        resources = toolchain.scan_resources(src_paths[0])
-        for path in src_paths[1:]:
-            resources.add(toolchain.scan_resources(path))
-        if libraries_paths is not None:
-            src_paths.extend(libraries_paths)
-            for path in libraries_paths:
-                resources.add(toolchain.scan_resources(path))
+        # Call unified scan_resources
+        resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs)
 
+        # Change linker script if specified
         if linker_script is not None:
             resources.linker_script = linker_script
 
-        # Build Directory
-        if clean:
-            if exists(build_path):
-                rmtree(build_path)
-        mkdir(build_path)
-
-        # We need to add if necessary additional include directories
-        if inc_dirs:
-            if type(inc_dirs) == ListType:
-                resources.inc_dirs.extend(inc_dirs)
-            else:
-                resources.inc_dirs.append(inc_dirs)
-
-        # Load resources into the config system which might expand/modify resources based on config data
-        resources = config.load_resources(resources)
-
-        # Set the toolchain's configuration data
-        toolchain.set_config_data(config.get_config_data())
-
         # Compile Sources
         objects = toolchain.compile_sources(resources, build_path, resources.inc_dirs)
         resources.objects.extend(objects)
@@ -281,117 +422,67 @@
 
 def build_library(src_paths, build_path, target, toolchain_name,
          dependencies_paths=None, options=None, name=None, clean=False, archive=True,
-         notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
+         notify=None, verbose=False, macros=None, inc_dirs=None,
          jobs=1, silent=False, report=None, properties=None, extra_verbose=False,
          project_id=None):
-    """ src_path: the path of the source directory
+    """ Prepares resource related objects - toolchain, target, config
+    src_paths: the paths to source directories
     build_path: the path of the build directory
     target: ['LPC1768', 'LPC11U24', 'LPC2368']
-    toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
-    library_paths: List of paths to additional libraries
+    toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
     clean: Rebuild everything if True
     notify: Notify function for logs
     verbose: Write the actual tools command lines if True
     inc_dirs: additional include directories which should be included in build
-    inc_dirs_ext: additional include directories which should be copied to library directory
     """
+
+    # Convert src_path to a list if needed
     if type(src_paths) != ListType:
         src_paths = [src_paths]
 
-    # The first path will give the name to the library
-    project_name = basename(src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd())
-    if name is None:
-        # We will use default project name based on project folder name
-        name = project_name
+    # Build path
+    if archive:
+        # Use temp path when building archive
+        tmp_path = join(build_path, '.temp')
+        mkdir(tmp_path)
+    else:
+        tmp_path = build_path
 
-    # If the configuration object was not yet created, create it now
-    config = Config(target, src_paths)
+    # Pass all params to the unified prepare_toolchain()
+    toolchain = prepare_toolchain(src_paths, target, toolchain_name,
+        macros=macros, options=options, clean=clean, jobs=jobs,
+        notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose)
 
-    # If the 'target' argument is a string, convert it to a target instance
-    if isinstance(target, basestring):
-        try:
-            target = TARGET_MAP[target]
-        except KeyError:
-            raise KeyError("Target '%s' not found" % target)
+    # The first path will give the name to the library
+    if name is None:
+        name = basename(normpath(abspath(src_paths[0])))
+    toolchain.info("Building library %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
 
+    # Initialize reporting
     if report != None:
         start = time()
-
         # If project_id is specified, use that over the default name
         id_name = project_id.upper() if project_id else name.upper()
         description = name
-        vendor_label = target.extra_labels[0]
-        cur_result = None
-        prep_report(report, target.name, toolchain_name, id_name)
-        cur_result = create_result(target.name, toolchain_name, id_name, description)
-
+        vendor_label = toolchain.target.extra_labels[0]
+        prep_report(report, toolchain.target.name, toolchain_name, id_name)
+        cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
         if properties != None:
-            prep_properties(properties, target.name, toolchain_name, vendor_label)
+            prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
 
     for src_path in src_paths:
         if not exists(src_path):
             error_msg = "The library source folder does not exist: %s", src_path
-
             if report != None:
                 cur_result["output"] = error_msg
                 cur_result["result"] = "FAIL"
                 add_result_to_report(report, cur_result)
-
             raise Exception(error_msg)
 
     try:
-        # Toolchain instance
-        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
-        toolchain.VERBOSE = verbose
-        toolchain.jobs = jobs
-        toolchain.build_all = clean
-
-        toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name))
-
-        # Scan Resources
-        resources = None
-        for path in src_paths:
-            # Scan resources
-            resource = toolchain.scan_resources(path)
-
-            # Extend resources collection
-            if not resources:
-                resources = resource
-            else:
-                resources.add(resource)
+        # Call unified scan_resources
+        resources = scan_resources(src_paths, toolchain, dependencies_paths=dependencies_paths, inc_dirs=inc_dirs)
 
-        # We need to add if necessary additional include directories
-        if inc_dirs:
-            if type(inc_dirs) == ListType:
-                resources.inc_dirs.extend(inc_dirs)
-            else:
-                resources.inc_dirs.append(inc_dirs)
-
-        # Add extra include directories / files which are required by library
-        # This files usually are not in the same directory as source files so
-        # previous scan will not include them
-        if inc_dirs_ext is not None:
-            for inc_ext in inc_dirs_ext:
-                resources.add(toolchain.scan_resources(inc_ext))
-
-        # Dependencies Include Paths
-        if dependencies_paths is not None:
-            for path in dependencies_paths:
-                lib_resources = toolchain.scan_resources(path)
-                resources.inc_dirs.extend(lib_resources.inc_dirs)
-
-        if archive:
-            # Use temp path when building archive
-            tmp_path = join(build_path, '.temp')
-            mkdir(tmp_path)
-        else:
-            tmp_path = build_path
-
-        # Load resources into the config system which might expand/modify resources based on config data
-        resources = config.load_resources(resources)
-
-        # Set the toolchain's configuration data
-        toolchain.set_config_data(config.get_config_data())
 
         # Copy headers, objects and static libraries - all files needed for static lib
         toolchain.copy_files(resources.headers, build_path, resources=resources)
@@ -400,7 +491,7 @@
         if resources.linker_script:
             toolchain.copy_files(resources.linker_script, build_path, resources=resources)
 
-        if resource.hex_files:
+        if resources.hex_files:
             toolchain.copy_files(resources.hex_files, build_path, resources=resources)
 
         # Compile Sources
@@ -701,24 +792,57 @@
         raise e
 
 
-def get_unique_supported_toolchains():
-    """ Get list of all unique toolchains supported by targets """
+def get_unique_supported_toolchains(release_targets=None):
+    """ Get list of all unique toolchains supported by targets
+    If release_targets is not specified, then it queries all known targets
+    release_targets: tuple structure returned from get_mbed_official_release()
+    """
     unique_supported_toolchains = []
-    for target in TARGET_NAMES:
-        for toolchain in TARGET_MAP[target].supported_toolchains:
-            if toolchain not in unique_supported_toolchains:
-                unique_supported_toolchains.append(toolchain)
+
+    if not release_targets:
+        for target in TARGET_NAMES:
+            for toolchain in TARGET_MAP[target].supported_toolchains:
+                if toolchain not in unique_supported_toolchains:
+                    unique_supported_toolchains.append(toolchain)
+    else:
+        for target in release_targets:
+            for toolchain in target[1]:
+                if toolchain not in unique_supported_toolchains:
+                    unique_supported_toolchains.append(toolchain)
+
     return unique_supported_toolchains
 
 
-def mcu_toolchain_matrix(verbose_html=False, platform_filter=None):
+def mcu_toolchain_matrix(verbose_html=False, platform_filter=None, release_version='5'):
     """  Shows target map using prettytable """
-    unique_supported_toolchains = get_unique_supported_toolchains()
     from prettytable import PrettyTable # Only use it in this function so building works without extra modules
 
+    if isinstance(release_version, basestring):
+        # Force release_version to lowercase if it is a string
+        release_version = release_version.lower()
+    else:
+        # Otherwise default to printing all known targets and toolchains
+        release_version = 'all'
+
+
+    version_release_targets = {}
+    version_release_target_names = {}
+
+    for version in RELEASE_VERSIONS:
+        version_release_targets[version] = get_mbed_official_release(version)
+        version_release_target_names[version] = [x[0] for x in version_release_targets[version]]
+
+    if release_version in RELEASE_VERSIONS:
+        release_targets = version_release_targets[release_version]
+    else:
+        release_targets = None
+
+    unique_supported_toolchains = get_unique_supported_toolchains(release_targets)
+    prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
+
     # All tests status table print
-    columns = ["Target"] + unique_supported_toolchains
-    pt = PrettyTable(["Target"] + unique_supported_toolchains)
+    columns = prepend_columns + unique_supported_toolchains
+    pt = PrettyTable(columns)
     # Align table
     for col in columns:
         pt.align[col] = "c"
@@ -726,7 +850,15 @@
 
     perm_counter = 0
     target_counter = 0
-    for target in sorted(TARGET_NAMES):
+
+    target_names = []
+
+    if release_targets:
+        target_names = [x[0] for x in release_targets]
+    else:
+        target_names = TARGET_NAMES
+
+    for target in sorted(target_names):
         if platform_filter is not None:
             # FIlter out platforms using regex
             if re.search(platform_filter, target) is None:
@@ -734,6 +866,14 @@
         target_counter += 1
 
         row = [target]  # First column is platform name
+
+        for version in RELEASE_VERSIONS:
+            if target in version_release_target_names[version]:
+                text = "Supported"
+            else:
+                text = "-"
+            row.append(text)
+
         for unique_toolchain in unique_supported_toolchains:
             if unique_toolchain in TARGET_MAP[target].supported_toolchains:
                 text = "Supported"
@@ -1025,63 +1165,3 @@
 
     with open(filename, 'w+') as f:
         f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
-
-
-def scan_for_source_paths(path, exclude_paths=None):
-    ignorepatterns = []
-    paths = []
-
-    def is_ignored(file_path):
-        for pattern in ignorepatterns:
-            if fnmatch.fnmatch(file_path, pattern):
-                return True
-        return False
-
-
-    """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
-    When topdown is True, the caller can modify the dirnames list in-place
-    (perhaps using del or slice assignment), and walk() will only recurse into
-    the subdirectories whose names remain in dirnames; this can be used to prune
-    the search, impose a specific order of visiting, or even to inform walk()
-    about directories the caller creates or renames before it resumes walk()
-    again. Modifying dirnames when topdown is False is ineffective, because in
-    bottom-up mode the directories in dirnames are generated before dirpath
-    itself is generated.
-    """
-    for root, dirs, files in walk(path, followlinks=True):
-        # Remove ignored directories
-        # Check if folder contains .mbedignore
-        if ".mbedignore" in files :
-            with open (join(root,".mbedignore"), "r") as f:
-                lines=f.readlines()
-                lines = [l.strip() for l in lines] # Strip whitespaces
-                lines = [l for l in lines if l != ""] # Strip empty lines
-                lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
-                # Append root path to glob patterns
-                # and append patterns to ignorepatterns
-                ignorepatterns.extend([join(root,line.strip()) for line in lines])
-
-        for d in copy(dirs):
-            dir_path = join(root, d)
-
-            # Always ignore hidden directories
-            if d.startswith('.'):
-                dirs.remove(d)
-
-            # Remove dirs that already match the ignorepatterns
-            # to avoid travelling into them and to prevent them
-            # on appearing in include path.
-            if is_ignored(join(dir_path,"")):
-                dirs.remove(d)
-
-            if exclude_paths:
-                for exclude_path in exclude_paths:
-                    rel_path = relpath(dir_path, exclude_path)
-                    if not (rel_path.startswith('..')):
-                        dirs.remove(d)
-                        break
-
-        # Add root to include paths
-        paths.append(root)
-
-    return paths
--- a/build_profiles.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/build_profiles.py	Mon Aug 01 09:10:17 2016 +0100
@@ -20,7 +20,7 @@
 import colorama
 
 from copy import copy
-from os.path import join
+from os.path import join, abspath, exists
 from os import walk
 import fnmatch
 
@@ -69,6 +69,29 @@
     return builds
 
 
+def find_targets_json(path, depth=1):
+    f = 'targets.json'
+    if exists(join(path, f)):
+        return os.path.abspath(join(path, f))
+
+    if depth > 2:
+        return None
+
+    for root, dirs, files in walk(path):
+        for d in copy(dirs):
+            if d.startswith('.'):
+                dirs.remove(d)
+
+            if exists(join(root, d, f)):
+                return abspath(join(root, d, f))
+            else:
+                found = find_targets_json(join(root, d), depth+1)
+                if found:
+                    return found
+
+    return None
+
+
 # Toolchain profiles for backward compatibility with old mbed SDK library releases
 TOOLCHAIN_PROFILES = {
     'ARM_STD' : {
--- a/build_release.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/build_release.py	Mon Aug 01 09:10:17 2016 +0100
@@ -28,6 +28,7 @@
 
 from tools.build_api import build_mbed_libs
 from tools.build_api import write_build_report
+from tools.build_api import get_mbed_official_release
 from tools.targets import TARGET_MAP, TARGET_NAMES
 from tools.test_exporters import ReportExporter, ResultExporterType
 from tools.test_api import SingleTestRunner
@@ -35,10 +36,7 @@
 from tools.paths import TEST_DIR, MBED_LIBRARIES
 from tools.tests import TEST_MAP
 
-OFFICIAL_MBED_LIBRARY_BUILD = (
-    tuple(tuple([TARGET_MAP[target].name, tuple(TARGET_MAP[target].supported_toolchains)]) for target in TARGET_NAMES if TARGET_MAP[target].release)
-)
-
+OFFICIAL_MBED_LIBRARY_BUILD = get_mbed_official_release('2')
 
 if __name__ == '__main__':
     parser = OptionParser()
--- a/build_travis.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/build_travis.py	Mon Aug 01 09:10:17 2016 +0100
@@ -45,6 +45,7 @@
     { "target": "NUCLEO_F072RB",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
     { "target": "NUCLEO_F091RC",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
     { "target": "NUCLEO_F103RB",     "toolchains": "GCC_ARM", "libs": ["rtos", "fat"] },
+    { "target": "NUCLEO_F207ZG",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
     { "target": "NUCLEO_F302R8",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
     { "target": "NUCLEO_F303K8",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
     { "target": "NUCLEO_F303RE",     "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
--- a/config.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/config.py	Mon Aug 01 09:10:17 2016 +0100
@@ -137,6 +137,42 @@
             self.macro_name = name
             self.macro_value = None
 
+# Representation of overrides for cumulative attributes
+class ConfigCumulativeOverride:
+    def __init__(self, name, additions=set(), removals=set(), strict=False):
+        self.name = name
+        self.additions = set(additions)
+        self.removals = set(removals)
+        self.strict = strict
+
+    # Add attr to the cumulative override
+    def remove_cumulative_overrides(self, overrides):
+        for override in overrides:
+            if override in self.additions:
+                raise ConfigException("Configuration conflict. The %s %s both added and removed." % (self.name[:-1], override))
+
+        self.removals |= set(overrides)
+
+    # Remove attr from the cumulative overrides
+    def add_cumulative_overrides(self, overrides):
+        for override in overrides:
+            if (override in self.removals or (self.strict and override not in self.additions)):
+                raise ConfigException("Configuration conflict. The %s %s both added and removed." % (self.name[:-1], override))
+
+        self.additions |= set(overrides)
+
+    # Enable strict set of cumulative overrides for the specified attr
+    def strict_cumulative_overrides(self, overrides):
+        self.remove_cumulative_overrides(self.additions - set(overrides))
+        self.add_cumulative_overrides(overrides)
+        self.strict = True
+
+    def update_target(self, target):
+        setattr(target, self.name, list(
+                (set(getattr(target, self.name, [])) | self.additions) - self.removals))
+
+
+
 # 'Config' implements the mbed configuration mechanism
 class Config:
     # Libraries and applications have different names for their configuration files
@@ -152,7 +188,7 @@
 
     # Allowed features in configurations
     __allowed_features = [
-        "UVISOR", "BLE", "CLIENT", "IPV4", "IPV6"
+        "UVISOR", "BLE", "CLIENT", "IPV4", "IPV6", "COMMON_PAL", "STORAGE"
     ]
 
     # The initialization arguments for Config are:
@@ -184,9 +220,12 @@
         self.processed_configs = {}
         self.target = target if isinstance(target, basestring) else target.name
         self.target_labels = Target.get_target(self.target).get_labels()
-        self.added_features = set()
-        self.removed_features = set()
-        self.removed_unecessary_features = False
+
+        self.cumulative_overrides = { key: ConfigCumulativeOverride(key) 
+                                      for key in Target._Target__cumulative_attributes }
+
+        self._process_config_and_overrides(self.app_config_data, {}, "app", "application")
+        self.target_labels = Target.get_target(self.target).get_labels()
 
     # Add one or more configuration files
     def add_config_files(self, flist):
@@ -222,23 +261,6 @@
             params[full_name] = ConfigParameter(name, v if isinstance(v, dict) else {"value": v}, unit_name, unit_kind)
         return params
 
-    # Add features to the available features
-    def remove_features(self, features):
-        for feature in features:
-            if feature in self.added_features:
-                raise ConfigException("Configuration conflict. Feature %s both added and removed." % feature)
-
-        self.removed_features |= set(features)
-
-    # Remove features from the available features
-    def add_features(self, features):
-        for feature in features:
-            if (feature in self.removed_features
-                or (self.removed_unecessary_features and feature not in self.added_features)):
-                raise ConfigException("Configuration conflict. Feature %s both added and removed." % feature)
-
-        self.added_features |= set(features)
-
     # Helper function: process "config_parameters" and "target_config_overrides" in a given dictionary
     # data: the configuration data of the library/appliation
     # params: storage for the discovered configuration parameters
@@ -250,21 +272,25 @@
         for label, overrides in data.get("target_overrides", {}).items():
             # If the label is defined by the target or it has the special value "*", process the overrides
             if (label == '*') or (label in self.target_labels):
-                # Parse out features
-                if 'target.features' in overrides:
-                    features = overrides['target.features']
-                    self.remove_features(self.added_features - set(features))
-                    self.add_features(features)
-                    self.removed_unecessary_features = True
-                    del overrides['target.features']
+                # Check for invalid cumulative overrides in libraries
+                if (unit_kind == 'library' and 
+                    any(attr.startswith('target.extra_labels') for attr in overrides.iterkeys())):
+                    raise ConfigException("Target override '%s' in '%s' is only allowed at the application level"
+                        % ("target.extra_labels", ConfigParameter.get_display_name(unit_name, unit_kind, label)))
 
-                if 'target.features_add' in overrides:
-                    self.add_features(overrides['target.features_add'])
-                    del overrides['target.features_add']
+                # Parse out cumulative overrides
+                for attr, cumulatives in self.cumulative_overrides.iteritems():
+                    if 'target.'+attr in overrides:
+                        cumulatives.strict_cumulative_overrides(overrides['target.'+attr])
+                        del overrides['target.'+attr]
 
-                if 'target.features_remove' in overrides:
-                    self.remove_features(overrides['target.features_remove'])
-                    del overrides['target.features_remove']
+                    if 'target.'+attr+'_add' in overrides:
+                        cumulatives.add_cumulative_overrides(overrides['target.'+attr+'_add'])
+                        del overrides['target.'+attr+'_add']
+
+                    if 'target.'+attr+'_remove' in overrides:
+                        cumulatives.remove_cumulative_overrides(overrides['target.'+attr+'_remove'])
+                        del overrides['target.'+attr+'_remove']
 
                 # Consider the others as overrides
                 for name, v in overrides.items():
@@ -275,6 +301,10 @@
                     else:
                         self.config_errors.append(ConfigException("Attempt to override undefined parameter '%s' in '%s'"
                             % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label))))
+
+        for cumulatives in self.cumulative_overrides.itervalues():
+            cumulatives.update_target(Target.get_target(self.target))
+
         return params
 
     # Read and interpret configuration data defined by targets
@@ -389,8 +419,8 @@
     def get_features(self):
         params, _ = self.get_config_data()
         self._check_required_parameters(params)
-        features = ((set(Target.get_target(self.target).features)
-            | self.added_features) - self.removed_features)
+        self.cumulative_overrides['features'].update_target(Target.get_target(self.target))
+        features = Target.get_target(self.target).features
 
         for feature in features:
             if feature not in self.__allowed_features:
--- a/export/coide.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/coide.py	Mon Aug 01 09:10:17 2016 +0100
@@ -38,6 +38,7 @@
         'NUCLEO_F072RB',
         'NUCLEO_F091RC',
         'NUCLEO_F103RB',
+        'NUCLEO_F207ZG',
         'NUCLEO_F302R8',
         'NUCLEO_F303K8',
         'NUCLEO_F303RE',
--- a/export/exporters.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/exporters.py	Mon Aug 01 09:10:17 2016 +0100
@@ -23,6 +23,14 @@
 
 class FailedBuildException(Exception) : pass
 
+# Exporter descriptor for TARGETS
+# TARGETS as class attribute for backward compatibility (allows: if in Exporter.TARGETS)
+class ExporterTargetsProperty(object):
+    def __init__(self, func):
+        self.func = func
+    def __get__(self, inst, cls):
+        return self.func(cls)
+
 class Exporter(object):
     TEMPLATE_DIR = dirname(__file__)
     DOT_IN_RELATIVE_PATH = False
--- a/export/gccarm.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/gccarm.py	Mon Aug 01 09:10:17 2016 +0100
@@ -81,6 +81,7 @@
         'NUCLEO_F072RB',
         'NUCLEO_F091RC',
         'NUCLEO_F103RB',
+        'NUCLEO_F207ZG',
         'NUCLEO_F302R8',
         'NUCLEO_F303K8',
         'NUCLEO_F303RE',
--- a/export/iar.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/iar.py	Mon Aug 01 09:10:17 2016 +0100
@@ -18,7 +18,7 @@
 import os
 from project_generator_definitions.definitions import ProGenDef
 
-from tools.export.exporters import Exporter
+from tools.export.exporters import Exporter, ExporterTargetsProperty
 from tools.targets import TARGET_MAP, TARGET_NAMES
 
 # If you wish to add a new target, add it to project_generator_definitions, and then
@@ -35,19 +35,20 @@
 
     MBED_CONFIG_HEADER_SUPPORTED = True
 
-    @property
-    def TARGETS(self):
-        if not hasattr(self, "_targets_supported"):
-            self._targets_supported = []
+    @ExporterTargetsProperty
+    def TARGETS(cls):
+        if not hasattr(cls, "_targets_supported"):
+            cls._targets_supported = []
+            progendef = ProGenDef('iar')
             for target in TARGET_NAMES:
                 try:
-                    if (ProGenDef('iar').is_supported(str(TARGET_MAP[target])) or
-                        ProGenDef('iar').is_supported(TARGET_MAP[target].progen['target'])):
-                        self._targets_supported.append(target)
+                    if (progendef.is_supported(str(TARGET_MAP[target])) or
+                        progendef.is_supported(TARGET_MAP[target].progen['target'])):
+                        cls._targets_supported.append(target)
                 except AttributeError:
                     # target is not supported yet
                     continue
-        return self._targets_supported
+        return cls._targets_supported
 
     def generate(self, progen_build=False):
         """ Generates the project files """
--- a/export/sw4stm32.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/sw4stm32.py	Mon Aug 01 09:10:17 2016 +0100
@@ -40,6 +40,7 @@
         'NUCLEO_F072RB':    {'name': 'NUCLEO-F072RB',           'mcuId': 'STM32F072RBTx'},
         'NUCLEO_F091RC':    {'name': 'NUCLEO-F091RC',           'mcuId': 'STM32F091RCTx'},
         'NUCLEO_F103RB':    {'name': 'NUCLEO-F103RB',           'mcuId': 'STM32F103RBTx'},
+        'NUCLEO_F207ZG':    {'name': 'NUCLEO-F207ZG',           'mcuId': 'STM32F207ZGTx'},
         'NUCLEO_F302R8':    {'name': 'NUCLEO-F302R8',           'mcuId': 'STM32F302R8Tx'},
         'NUCLEO_F303RE':    {'name': 'NUCLEO-F303RE',           'mcuId': 'STM32F303RETx'},
         'NUCLEO_F334R8':    {'name': 'NUCLEO-F334R8',           'mcuId': 'STM32F334R8Tx'},
--- a/export/uvision4.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/uvision4.py	Mon Aug 01 09:10:17 2016 +0100
@@ -17,7 +17,7 @@
 from os.path import basename, join, dirname
 from project_generator_definitions.definitions import ProGenDef
 
-from tools.export.exporters import Exporter
+from tools.export.exporters import Exporter, ExporterTargetsProperty
 from tools.targets import TARGET_MAP, TARGET_NAMES
 
 # If you wish to add a new target, add it to project_generator_definitions, and then
@@ -35,19 +35,20 @@
 
     MBED_CONFIG_HEADER_SUPPORTED = True
 
-    @property
-    def TARGETS(self):
-        if not hasattr(self, "_targets_supported"):
-            self._targets_supported = []
+    @ExporterTargetsProperty
+    def TARGETS(cls):
+        if not hasattr(cls, "_targets_supported"):
+            cls._targets_supported = []
+            progendef = ProGenDef('uvision')
             for target in TARGET_NAMES:
                 try:
-                    if (ProGenDef('uvision').is_supported(str(TARGET_MAP[target])) or
-                        ProGenDef('uvision').is_supported(TARGET_MAP[target].progen['target'])):
-                        self._targets_supported.append(target)
+                    if (progendef.is_supported(str(TARGET_MAP[target])) or
+                        progendef.is_supported(TARGET_MAP[target].progen['target'])):
+                        cls._targets_supported.append(target)
                 except AttributeError:
                     # target is not supported yet
                     continue
-        return self._targets_supported
+        return cls._targets_supported
 
     def get_toolchain(self):
         return TARGET_MAP[self.target].default_toolchain
--- a/export/uvision5.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/export/uvision5.py	Mon Aug 01 09:10:17 2016 +0100
@@ -17,7 +17,7 @@
 from os.path import basename, join, dirname
 from project_generator_definitions.definitions import ProGenDef
 
-from tools.export.exporters import Exporter
+from tools.export.exporters import Exporter, ExporterTargetsProperty
 from tools.targets import TARGET_MAP, TARGET_NAMES
 
 # If you wish to add a new target, add it to project_generator_definitions, and then
@@ -35,23 +35,20 @@
 
     MBED_CONFIG_HEADER_SUPPORTED = True
 
-    # backward compatibility with our scripts
-    def __init__(self):
-        self._targets = []
-
-    @property
-    def TARGETS(self):
-        if not hasattr(self, "_targets_supported"):
-            self._targets_supported = []
+    @ExporterTargetsProperty
+    def TARGETS(cls):
+        if not hasattr(cls, "_targets_supported"):
+            cls._targets_supported = []
+            progendef = ProGenDef('uvision5')
             for target in TARGET_NAMES:
                 try:
-                    if (ProGenDef('uvision5').is_supported(str(TARGET_MAP[target])) or
-                        ProGenDef('uvision5').is_supported(TARGET_MAP[target].progen['target'])):
-                        self._targets_supported.append(target)
+                    if (progendef.is_supported(str(TARGET_MAP[target])) or
+                        progendef.is_supported(TARGET_MAP[target].progen['target'])):
+                        cls._targets_supported.append(target)
                 except AttributeError:
                     # target is not supported yet
                     continue
-        return self._targets_supported
+        return cls._targets_supported
 
     def get_toolchain(self):
         return TARGET_MAP[self.target].default_toolchain
--- a/make.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/make.py	Mon Aug 01 09:10:17 2016 +0100
@@ -45,6 +45,7 @@
 from tools.build_api import mcu_toolchain_matrix
 from utils import argparse_filestring_type
 from utils import argparse_many
+from utils import argparse_dir_not_parent
 from argparse import ArgumentTypeError
 from tools.toolchains import mbedToolchain
 from tools.settings import CLI_COLOR_MAP
@@ -112,7 +113,7 @@
                        default=None, help="The source (input) directory", action="append")
     parser.add_argument("--duration", type=int, dest="duration",
                       default=None, help="Duration of the test")
-    parser.add_argument("--build", dest="build_dir",
+    parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT),
                       default=None, help="The build (output) directory")
     parser.add_argument("-N", "--artifact-name", dest="artifact_name",
                       default=None, help="The built project's name")
--- a/memap.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/memap.py	Mon Aug 01 09:10:17 2016 +0100
@@ -436,16 +436,16 @@
                     csv_module_section += [i+k]
                     csv_sizes += [self.modules[i][k]]
 
-            csv_module_section += ['total_static_ram']
+            csv_module_section += ['static_ram']
             csv_sizes += [subtotal['.data']+subtotal['.bss']]
 
-            csv_module_section += ['allocated_heap']
+            csv_module_section += ['heap']
             if subtotal['.heap'] == 0:
                 csv_sizes += ['unknown']
             else:
                 csv_sizes += [subtotal['.heap']]
 
-            csv_module_section += ['allocated_stack']
+            csv_module_section += ['stack']
             if subtotal['.stack'] == 0:
                 csv_sizes += ['unknown']
             else:
--- a/project.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/project.py	Mon Aug 01 09:10:17 2016 +0100
@@ -12,8 +12,9 @@
 from tools.tests import TESTS, TEST_MAP
 from tools.tests import test_known, test_name_known
 from tools.targets import TARGET_NAMES
+from tools.libraries import LIBRARIES
 from utils import argparse_filestring_type, argparse_many
-from utils import argparse_force_lowercase_type, argparse_force_uppercase_type
+from utils import argparse_force_lowercase_type, argparse_force_uppercase_type, argparse_dir_not_parent
 from project_api import setup_project, perform_export, print_results, get_lib_symbols
 
 
@@ -57,8 +58,8 @@
 
     parser.add_argument("-b",
                       dest="build",
-                      action="store_true",
                       default=False,
+                      type=argparse_dir_not_parent(ROOT),
                       help="use the mbed library build, instead of the sources")
 
     group.add_argument("-L", "--list-tests",
@@ -134,14 +135,19 @@
     for mcu in options.mcu:
         # Program Number or name
         p, src, ide = options.program, options.source_dir, options.ide
-        project_dir, project_name, project_temp = setup_project(mcu, ide, p, src, options.build)
+        try:
+            project_dir, project_name, project_temp = setup_project(mcu, ide, p, src, options.build)
+            zip = not bool(src) # create zip when no src_dir provided
+            clean = not bool(src) # don't clean when source is provided, use acrual source tree for IDE files
 
-        zip = src is []  # create zip when no src_dir provided
-        clean = src is []  # don't clean when source is provided, use acrual source tree for IDE files
-
-        # Export to selected toolchain
-        lib_symbols = get_lib_symbols(options.macros, src, p)
-        tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir[0], project_temp, clean=clean, make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative)
+            # Export to selected toolchain
+            lib_symbols = get_lib_symbols(options.macros, src, p)
+            tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir[0], project_temp, clean=clean, make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative)
+        except OSError as e:
+            if e.errno == 2:
+                report = dict(success=False, errormsg="Library path '%s' does not exist. Ensure that the library is built." % (e.filename))
+            else:
+                report = dict(success=False, errormsg="An OS error occured: errno #{}".format(e.errno))
         if report['success']:
             if not zip:
                 zip_path = join(project_temp, project_name)
--- a/test.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/test.py	Mon Aug 01 09:10:17 2016 +0100
@@ -31,9 +31,10 @@
 from tools.build_api import build_project, build_library
 from tools.build_api import print_build_memory_usage_results
 from tools.targets import TARGET_MAP
-from tools.utils import mkdir, ToolException, NotSupportedException
+from tools.utils import mkdir, ToolException, NotSupportedException, args_error
 from tools.test_exporters import ReportExporter, ResultExporterType
 from utils import argparse_filestring_type, argparse_lowercase_type, argparse_many
+from utils import argparse_dir_not_parent
 from tools.toolchains import mbedToolchain
 from tools.settings import CLI_COLOR_MAP
 
@@ -57,7 +58,7 @@
                           type=argparse_filestring_type,
                             default=None, help="The source (input) directory (for sources other than tests). Defaults to current directory.", action="append")
 
-        parser.add_argument("--build", dest="build_dir",
+        parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT),
                           default=None, help="The build (output) directory")
 
         parser.add_argument("-l", "--list", action="store_true", dest="list",
@@ -104,9 +105,19 @@
         all_tests = {}
         tests = {}
 
+        # Target
+        if options.mcu is None :
+            args_error(parser, "[ERROR] You should specify an MCU")
+        mcu = options.mcu[0]
+
+        # Toolchain
+        if options.tool is None:
+            args_error(parser, "[ERROR] You should specify a TOOLCHAIN")
+        toolchain = options.tool[0]
+
         # Find all tests in the relevant paths
         for path in all_paths:
-            all_tests.update(find_tests(path))
+            all_tests.update(find_tests(path, mcu, toolchain, options.options))
 
         # Filter tests by name if specified
         if options.names:
@@ -150,16 +161,13 @@
             if not base_source_paths:
                 base_source_paths = ['.']
             
-            
-            target = options.mcu[0]
-            
             build_report = {}
             build_properties = {}
 
             library_build_success = False
             try:
                 # Build sources
-                build_library(base_source_paths, options.build_dir, target, options.tool[0],
+                build_library(base_source_paths, options.build_dir, mcu, toolchain,
                                                 options=options.options,
                                                 jobs=options.jobs,
                                                 clean=options.clean,
@@ -186,7 +194,7 @@
                 print "Failed to build library"
             else:
                 # Build all the tests
-                test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, target, options.tool[0],
+                test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, mcu, toolchain,
                         options=options.options,
                         clean=options.clean,
                         report=build_report,
--- a/test/config_test/test23/test_data.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/test/config_test/test23/test_data.py	Mon Aug 01 09:10:17 2016 +0100
@@ -3,6 +3,6 @@
 expected_results = {
     "K64F": {
         "desc": "test feature collisions",
-        "exception_msg": "Configuration conflict. Feature IPV4 both added and removed." 
+        "exception_msg": "Configuration conflict. The feature IPV4 both added and removed." 
     }
 }
--- a/test/config_test/test25/test_data.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/test/config_test/test25/test_data.py	Mon Aug 01 09:10:17 2016 +0100
@@ -3,6 +3,6 @@
 expected_results = {
     "K64F": {
         "desc": "test recursive feature collisions",
-        "exception_msg": "Configuration conflict. Feature UVISOR both added and removed." 
+        "exception_msg": "Configuration conflict. The feature UVISOR both added and removed." 
     }
 }
--- a/test_api.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/test_api.py	Mon Aug 01 09:10:17 2016 +0100
@@ -31,10 +31,11 @@
 from types import ListType
 from colorama import Fore, Back, Style
 from prettytable import PrettyTable
+from copy import copy
 
 from time import sleep, time
 from Queue import Queue, Empty
-from os.path import join, exists, basename
+from os.path import join, exists, basename, relpath
 from threading import Thread, Lock
 from subprocess import Popen, PIPE
 
@@ -56,7 +57,8 @@
 from tools.build_api import prep_properties
 from tools.build_api import create_result
 from tools.build_api import add_result_to_report
-from tools.build_api import scan_for_source_paths
+from tools.build_api import prepare_toolchain
+from tools.build_api import scan_resources
 from tools.libraries import LIBRARIES, LIBRARY_MAP
 from tools.toolchains import TOOLCHAIN_PATHS
 from tools.toolchains import TOOLCHAINS
@@ -65,6 +67,7 @@
 from tools.utils import argparse_uppercase_type
 from tools.utils import argparse_lowercase_type
 from tools.utils import argparse_many
+from tools.utils import get_path_depth
 
 import tools.host_tests.host_tests_plugins as host_tests_plugins
 
@@ -1987,33 +1990,46 @@
 
     return "-".join(name_parts).lower()
 
-def find_tests(base_dir):
-    """Given any directory, walk through the subdirectories and find all tests"""
+def find_tests(base_dir, target_name, toolchain_name, options=None):
+    """ Finds all tests in a directory recursively
+    base_dir: path to the directory to scan for tests (ex. 'path/to/project')
+    target_name: name of the target to use for scanning (ex. 'K64F')
+    toolchain_name: name of the toolchain to use for scanning (ex. 'GCC_ARM')
+    options: Compile options to pass to the toolchain (ex. ['debug-info'])
+    """
+
+    tests = {}
+
+    # Prepare the toolchain
+    toolchain = prepare_toolchain(base_dir, target_name, toolchain_name, options=options, silent=True)
+
+    # Scan the directory for paths to probe for 'TESTS' folders
+    base_resources = scan_resources(base_dir, toolchain)
+
+    dirs = base_resources.inc_dirs
+    for directory in dirs:
+        subdirs = os.listdir(directory)
 
-    def find_test_in_directory(directory, tests_path):
-        """Given a 'TESTS' directory, return a dictionary of test names and test paths.
-        The formate of the dictionary is {"test-name": "./path/to/test"}"""
-        test = None
-        if tests_path in directory:
-            head, test_case_directory = os.path.split(directory)
-            if test_case_directory != tests_path and test_case_directory != "host_tests":
-                head, test_group_directory = os.path.split(head)
-                if test_group_directory != tests_path and test_case_directory != "host_tests":
-                    test = {
-                        "name": test_path_to_name(directory),
-                        "path": directory
-                    }
+        # If the directory contains a subdirectory called 'TESTS', scan it for test cases
+        if 'TESTS' in subdirs:
+            walk_base_dir = join(directory, 'TESTS')
+            test_resources = toolchain.scan_resources(walk_base_dir, base_path=base_dir)
+
+            # Loop through all subdirectories
+            for d in test_resources.inc_dirs:
 
-        return test
-
-    tests_path = 'TESTS'
-    tests = {}
-    dirs = scan_for_source_paths(base_dir)
-
-    for directory in dirs:
-        test = find_test_in_directory(directory, tests_path)
-        if test:
-            tests[test['name']] = test['path']
+                # If the test case folder is not called 'host_tests' and it is
+                # located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
+                # then add it to the tests
+                path_depth = get_path_depth(relpath(d, walk_base_dir))
+                if path_depth == 2:
+                    test_group_directory_path, test_case_directory = os.path.split(d)
+                    test_group_directory = os.path.basename(test_group_directory_path)
+                    
+                    # Check to make sure discoverd folder is not in a host test directory
+                    if test_case_directory != 'host_tests' and test_group_directory != 'host_tests':
+                        test_name = test_path_to_name(d)
+                        tests[test_name] = d
 
     return tests
 
@@ -2072,13 +2088,17 @@
         test_build_path = os.path.join(build_path, test_path)
         src_path = base_source_paths + [test_path]
         bin_file = None
+        test_case_folder_name = os.path.basename(test_path)
+        
+        
         try:
             bin_file = build_project(src_path, test_build_path, target, toolchain_name,
                                      options=options,
                                      jobs=jobs,
                                      clean=clean,
                                      macros=macros,
-                                     name=test_name,
+                                     name=test_case_folder_name,
+                                     project_id=test_name,
                                      report=report,
                                      properties=properties,
                                      verbose=verbose)
--- a/tests.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/tests.py	Mon Aug 01 09:10:17 2016 +0100
@@ -181,7 +181,7 @@
         "automated": True,
         "peripherals": ["analog_loop"],
         "mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
-                "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE",
+                "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE", "NUCLEO_F207ZG",
                 "NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
                 "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "NUCLEO_F446ZE",
                 "DISCO_F407VG", "DISCO_F746NG", "NUCLEO_F746ZG",
@@ -322,7 +322,7 @@
         "duration": 20,
         "peripherals": ["can_transceiver"],
         "mcu": ["LPC1549", "LPC1768","B96B_F446VE", "VK_RZ_A1H",
-		"NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8",
+                "NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F207ZG",
         "NUCLEO_F303RE", "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE","NUCLEO_F446ZE",
         "DISCO_F469NI", "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG",
         "DISCO_F746NG", "DISCO_L476VG", "NUCLEO_L476RG", "NUCLEO_L432KC"]
@@ -598,7 +598,7 @@
         "dependencies": [MBED_LIBRARIES],
         "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
                 "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
-                "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE","NUCLEO_F446ZE", "DISCO_F469NI",
+                "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE","NUCLEO_F446ZE", "DISCO_F469NI", "NUCLEO_F207ZG",
                 "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
                 "NUCLEO_L476RG", "NUCLEO_L432KC"]
     },
@@ -606,7 +606,7 @@
         "id": "MBED_30", "description": "CAN network test using interrupts",
         "source_dir": join(TEST_DIR, "mbed", "can_interrupt"),
         "dependencies": [MBED_LIBRARIES],
-        "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
+        "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC", "NUCLEO_F207ZG",
                 "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
                 "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE", "NUCLEO_F446ZE", "DISCO_F469NI",
                 "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
@@ -727,7 +727,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
                 "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
@@ -742,7 +742,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
                 "NUCLEO_F446ZE", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
@@ -758,7 +758,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE",  "NUCLEO_F446ZE",
                 "NUCLEO_F103RB", "DISCO_F746NG",
@@ -774,7 +774,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F446ZE",
                 "NUCLEO_F103RB", "DISCO_F746NG",
@@ -790,7 +790,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
                 "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
@@ -805,7 +805,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
                 "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
@@ -822,7 +822,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
                 "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
@@ -837,7 +837,7 @@
         "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
                 "KL25Z", "KL05Z", "K64F", "KL46Z",
                 "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
-                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+                "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
                 "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
                 "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
                 "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
@@ -852,7 +852,7 @@
         "peripherals": ["SD"],
         "mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
                 "KL05Z", "K64F", "KL46Z", "RZ_A1H",
-                "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI"],
+                "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI", "NUCLEO_F207ZG"],
     },
 
     # Networking Tests
--- a/toolchains/__init__.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/toolchains/__init__.py	Mon Aug 01 09:10:17 2016 +0100
@@ -17,7 +17,7 @@
 
 import re
 import sys
-from os import stat, walk, getcwd, sep
+from os import stat, walk, getcwd, sep, remove
 from copy import copy
 from time import time, sleep
 from types import ListType
@@ -26,6 +26,7 @@
 from inspect import getmro
 from copy import deepcopy
 from tools.config import Config
+from abc import ABCMeta, abstractmethod
 
 from multiprocessing import Pool, cpu_count
 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
@@ -188,8 +189,14 @@
 
 
 class mbedToolchain:
+    # Verbose logging
     VERBOSE = True
+
+    # Compile C files as CPP
     COMPILE_C_AS_CPP = False
+
+    # Response files for compiling, includes, linking and archiving.
+    # Not needed on posix systems where the typical arg limit is 2 megabytes
     RESPONSE_FILES = True
 
     CORTEX_SYMBOLS = {
@@ -207,6 +214,8 @@
 
     MBED_CONFIG_FILE_NAME="mbed_config.h"
 
+    __metaclass__ = ABCMeta
+
     def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
         self.target = target
         self.name = self.__class__.__name__
@@ -226,9 +235,18 @@
         # Labels generated from toolchain and target rules/features (used for selective build)
         self.labels = None
 
+        # This will hold the initialized config object
+        self.config = None
+
         # This will hold the configuration data (as returned by Config.get_config_data())
         self.config_data = None
 
+        # This will hold the location of the configuration file or None if there's no configuration available
+        self.config_file = None
+
+        # Call guard for "get_config_data" (see the comments of get_config_data for details)
+        self.config_processed = False
+
         # Non-incremental compile
         self.build_all = False
 
@@ -242,8 +260,6 @@
         # Number of concurrent build jobs. 0 means auto (based on host system cores)
         self.jobs = 0
 
-        self.CHROOT = None
-
         # Ignore patterns from .mbedignore files
         self.ignore_patterns = []
 
@@ -285,12 +301,20 @@
         # uVisor spepcific rules
         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
             self.target.core = re.sub(r"F$", '', self.target.core)
-            
+
+        # Stats cache is used to reduce the amount of IO requests to stat
+        # header files during dependency change. See need_update()
         self.stat_cache = {}
 
+        # Used by the mbed Online Build System to build in chrooted environment
+        self.CHROOT = None
+
+        # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
         self.init()
 
-    # This allows post __init__() hooks. Do not use
+    # Used for post __init__() hooks
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def init(self):
         return True
 
@@ -340,6 +364,8 @@
         elif event['type'] == 'progress':
             self.print_notify(event) # standard handle
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def notify(self, event):
         """ Little closure for notify functions
         """
@@ -392,6 +418,8 @@
             }
         return self.labels
 
+
+    # Determine whether a source file needs updating/compiling
     def need_update(self, target, dependencies):
         if self.build_all:
             return True
@@ -601,6 +629,8 @@
                 mkdir(dirname(target))
                 copyfile(source, target)
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def relative_object_path(self, build_path, base_dir, source):
         source_dir, name, _ = split_path(source)
 
@@ -610,6 +640,8 @@
             mkdir(obj_dir)
         return join(obj_dir, name + '.o')
 
+    # Generate response file for all includes.
+    # ARM, GCC, IAR cross compatible
     def get_inc_file(self, includes):
         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
         if not exists(include_file):
@@ -625,6 +657,8 @@
                 f.write(string)
         return include_file
 
+    # Generate response file for all objects when linking.
+    # ARM, GCC, IAR cross compatible
     def get_link_file(self, cmd):
         link_file = join(self.build_dir, ".link_files.txt")
         with open(link_file, "wb") as f:
@@ -639,6 +673,8 @@
             f.write(string)
         return link_file
  
+    # Generate response file for all objects when archiving.
+    # ARM, GCC, IAR cross compatible
     def get_arch_file(self, objects):
         archive_file = join(self.build_dir, ".archive_files.txt")
         with open(archive_file, "wb") as f:
@@ -649,6 +685,8 @@
             f.write(string)
         return archive_file
 
+    # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def compile_sources(self, resources, build_path, inc_dirs=None):
         # Web IDE progress bar for project build
         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
@@ -674,6 +712,9 @@
         work_dir = getcwd()
         self.prev_dir = None
 
+        # Generate configuration header (this will update self.build_all if needed)
+        self.get_config_header()
+
         # Sort compile queue for consistency
         files_to_compile.sort()
         for source in files_to_compile:
@@ -699,6 +740,7 @@
         else:
             return self.compile_seq(queue, objects)
 
+    # Compile source files queue in sequential order
     def compile_seq(self, queue, objects):
         for item in queue:
             result = compile_worker(item)
@@ -715,6 +757,7 @@
             objects.append(result['object'])
         return objects
 
+    # Compile source files queue in parallel by creating pool of worker threads
     def compile_queue(self, queue, objects):
         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
         p = Pool(processes=jobs_count)
@@ -764,6 +807,7 @@
 
         return objects
 
+    # Determine the compile command based on type of source file
     def compile_command(self, source, object, includes):
         # Check dependencies
         _, ext = splitext(source)
@@ -787,9 +831,39 @@
 
         return None
 
+    @abstractmethod
+    def parse_dependencies(self, dep_path):
+        """Parse the dependency information generated by the compiler.
+
+        Positional arguments:
+        dep_path -- the path to a file generated by a previous run of the compiler
+
+        Return value:
+        A list of all source files that the dependency file indicated were dependencies
+
+        Side effects:
+        None
+        """
+        raise NotImplemented
+
     def is_not_supported_error(self, output):
         return "#error directive: [NOT_SUPPORTED]" in output
 
+    @abstractmethod
+    def parse_output(self, output):
+        """Take in compiler output and extract sinlge line warnings and errors from it.
+
+        Positional arguments:
+        output -- a string of all the messages emitted by a run of the compiler
+
+        Return value:
+        None
+
+        Side effects:
+        call self.cc_info or self.notify with a description of the event generated by the compiler
+        """
+        raise NotImplemented
+
     def compile_output(self, output=[]):
         _rc = output[0]
         _stderr = output[1]
@@ -858,6 +932,8 @@
 
         return bin, needed_update
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def default_cmd(self, command):
         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
         self.debug("Return: %s"% _rc)
@@ -876,6 +952,8 @@
     def info(self, message):
         self.notify({'type': 'info', 'message': message})
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def debug(self, message):
         if self.VERBOSE:
             if type(message) is ListType:
@@ -883,11 +961,15 @@
             message = "[DEBUG] " + message
             self.notify({'type': 'debug', 'message': message})
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def cc_info(self, info=None):
         if info is not None:
             info['type'] = 'cc'
             self.notify(info)
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def cc_verbose(self, message, file=""):
         self.debug(message)
 
@@ -903,6 +985,8 @@
     def var(self, key, value):
         self.notify({'type': 'var', 'key': key, 'val': value})
 
+    # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
+    # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
     def mem_stats(self, map):
         """! Creates parser object
         @param map Path to linker map file to parse and decode
@@ -939,25 +1023,201 @@
     def set_config_data(self, config_data):
         self.config_data = config_data
 
-    # Return the location of the config header. This function will create the config
-    # header first if needed. The header will be written in a file called "mbed_conf.h"
-    # located in the project's build directory.
-    # If config headers are not used (self.config_header_content is None), the function
-    # returns None
+    # Creates the configuration header if needed:
+    # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
+    # - if there is configuration data and "mbed_config.h" does not exist, it is created.
+    # - if there is configuration data similar to the previous configuration data,
+    #   "mbed_config.h" is left untouched.
+    # - if there is new configuration data, "mbed_config.h" is overriden.
+    # The function needs to be called exactly once for the lifetime of this toolchain instance.
+    # The "config_processed" variable (below) ensures this behaviour.
+    # The function returns the location of the configuration file, or None if there is no
+    # configuration data available (and thus no configuration file)
     def get_config_header(self):
-        if self.config_data is None:
-            return None
-        config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
-        if not exists(config_file):
-            with open(config_file, "wt") as f:
-                f.write(Config.config_to_header(self.config_data))
-        return config_file
+        if self.config_processed: # this function was already called, return its result
+            return self.config_file
+        # The config file is located in the build directory
+        self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
+        # If the file exists, read its current content in prev_data
+        if exists(self.config_file):
+            with open(self.config_file, "rt") as f:
+                prev_data = f.read()
+        else:
+            prev_data = None
+        # Get the current configuration data
+        crt_data = Config.config_to_header(self.config_data) if self.config_data else None
+        # "changed" indicates if a configuration change was detected
+        changed = False
+        if prev_data is not None: # a previous mbed_config.h exists
+            if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
+                remove(self.config_file)
+                self.config_file = None # this means "config file not present"
+                changed = True
+            elif crt_data != prev_data: # different content of config file
+                with open(self.config_file, "wt") as f:
+                    f.write(crt_data)
+                changed = True
+        else: # a previous mbed_config.h does not exist
+            if crt_data is not None: # there's configuration data available
+                with open(self.config_file, "wt") as f:
+                    f.write(crt_data)
+                changed = True
+            else:
+                self.config_file = None # this means "config file not present"
+        # If there was a change in configuration, rebuild everything
+        self.build_all = changed
+        # Make sure that this function will only return the location of the configuration
+        # file for subsequent calls, without trying to manipulate its content in any way.
+        self.config_processed = True
+        return self.config_file
+
+    @abstractmethod
+    def get_config_option(self, config_header):
+        """Generate the compiler option that forces the inclusion of the configuration
+        header file.
+
+        Positional arguments:
+        config_header -- The configuration header that will be included within all source files
+
+        Return value:
+        A list of the command line arguments that will force the inclusion the specified header
+
+        Side effects:
+        None
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def assemble(self, source, object, includes):
+        """Generate the command line that assembles.
+
+        Positional arguments:
+        source -- a file path that is the file to assemble
+        object -- a file path that is the destination object
+        includes -- a list of all directories where header files may be found
+
+        Return value:
+        The complete command line, as a list, that would invoke the assembler
+        on the source file, include all the include paths, and generate
+        the specified object file.
+
+        Side effects:
+        None
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def compile_c(self, source, object, includes):
+        """Generate the command line that compiles a C source file.
+
+        Positional arguments:
+        source -- the C source file to compile
+        object -- the destination object file
+        includes -- a list of all the directories where header files may be found
+
+        Return value:
+        The complete command line, as a list, that would invoke the C compiler
+        on the source file, include all the include paths, and generate the
+        specified object file.
+
+        Side effects:
+        None
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def compile_cpp(self, source, object, includes):
+        """Generate the command line that compiles a C++ source file.
+
+        Positional arguments:
+        source -- the C++ source file to compile
+        object -- the destination object file
+        includes -- a list of all the directories where header files may be found
+
+        Return value:
+        The complete command line, as a list, that would invoke the C++ compiler
+        on the source file, include all the include paths, and generate the
+        specified object file.
+
+        Side effects:
+        None
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def link(self, output, objects, libraries, lib_dirs, mem_map):
+        """Run the linker to create an executable and memory map.
+
+        Positional arguments:
+        output -- the file name to place the executable in
+        objects -- all of the object files to link
+        libraries -- all of the required libraries
+        lib_dirs -- where the required libraries are located
+        mem_map -- the location where the memory map file should be stored
+
+        Return value:
+        None
+
+        Side effect:
+        Runs the linker to produce the executable.
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def archive(self, objects, lib_path):
+        """Run the command line that creates an archive.
+
+        Positional arguhments:
+        objects -- a list of all the object files that should be archived
+        lib_path -- the file name of the resulting library file
+
+        Return value:
+        None
+
+        Side effect:
+        Runs the archiving tool to produce the library file.
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
+
+    @abstractmethod
+    def binary(self, resources, elf, bin):
+        """Run the command line that will Extract a simplified binary file.
+
+        Positional arguments:
+        resources -- A resources object (Is not used in any of the toolchains)
+        elf -- the executable file that is to be converted
+        bin -- the file name of the to be created simplified binary file
+
+        Return value:
+        None
+
+        Side effect:
+        Runs the elf2bin tool to produce the simplified binary file.
+
+        Note:
+        This method should be decorated with @hook_tool.
+        """
+        raise NotImplemented
 
     # Return the list of macros geenrated by the build system
     def get_config_macros(self):
         return Config.config_to_macros(self.config_data) if self.config_data else []
 
-
 from tools.settings import ARM_PATH
 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
 from tools.settings import IAR_PATH
--- a/toolchains/arm.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/toolchains/arm.py	Mon Aug 01 09:10:17 2016 +0100
@@ -108,6 +108,7 @@
                     'toolchain_name': self.name
                 }
             elif msg is not None:
+                # Determine the warning/error column by calculating the ^ position
                 match = ARM.INDEX_PATTERN.match(line)
                 if match is not None:
                     msg['col'] = len(match.group('col'))
--- a/toolchains/gcc.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/toolchains/gcc.py	Mon Aug 01 09:10:17 2016 +0100
@@ -93,7 +93,7 @@
             self.flags["common"].append("-g")
             self.flags["common"].append("-O0")
         else:
-            self.flags["common"].append("-O2")
+            self.flags["common"].append("-Os")
 
         main_cc = join(tool_path, "arm-none-eabi-gcc")
         main_cppc = join(tool_path, "arm-none-eabi-g++")
@@ -153,6 +153,7 @@
                     'toolchain_name': self.name
                 }
             elif msg is not None:
+                # Determine the warning/error column by calculating the ^ position
                 match = GCC.INDEX_PATTERN.match(line)
                 if match is not None:
                     msg['col'] = len(match.group('col'))
@@ -288,14 +289,6 @@
             self.cc += ["-DMBED_RTOS_SINGLE_THREAD"]
             self.cppc += ["-DMBED_RTOS_SINGLE_THREAD"]
             self.macros.extend(["MBED_RTOS_SINGLE_THREAD"])
-
-        if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368", "ARM_BEETLE_SOC"]:
-            self.ld.extend(["-u _printf_float", "-u _scanf_float"])
-            self.flags['ld'].extend(["-u _printf_float", "-u _scanf_float"])
-        elif target.name in ["RZ_A1H", "VK_RZ_A1H", "ARCH_MAX", "DISCO_F407VG", "DISCO_F429ZI", "DISCO_F469NI", "NUCLEO_F401RE", "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F429ZI", "NUCLEO_F446RE", "NUCLEO_F446ZE", "ELMO_F411RE", "MTS_MDOT_F411RE", "MTS_DRAGONFLY_F411RE", "DISCO_F746NG"]:
-            self.ld.extend(["-u_printf_float", "-u_scanf_float"])
-            self.flags['ld'].extend(["-u_printf_float", "-u_scanf_float"])
-
         self.sys_libs.append("nosys")
 
 
--- a/toolchains/iar.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/toolchains/iar.py	Mon Aug 01 09:10:17 2016 +0100
@@ -123,6 +123,7 @@
                     'toolchain_name': self.name
                 }
             elif msg is not None:
+                # Determine the warning/error column by calculating the ^ position
                 match = IAR.INDEX_PATTERN.match(line)
                 if match is not None:
                     msg['col'] = len(match.group('col'))
--- a/utils.py	Sat Jul 16 22:51:17 2016 +0100
+++ b/utils.py	Mon Aug 01 09:10:17 2016 +0100
@@ -21,7 +21,7 @@
 import math
 from os import listdir, remove, makedirs
 from shutil import copyfile
-from os.path import isdir, join, exists, split, relpath, splitext
+from os.path import isdir, join, exists, split, relpath, splitext, abspath, commonprefix, normpath
 from subprocess import Popen, PIPE, STDOUT, call
 import json
 from collections import OrderedDict
@@ -167,12 +167,32 @@
 class NotSupportedException(Exception):
     pass
 
+class InvalidReleaseTargetException(Exception):
+    pass
+
 def split_path(path):
     base, file = split(path)
     name, ext = splitext(file)
     return base, name, ext
 
 
+def get_path_depth(path):
+    """ Given a path, return the number of directory levels present.
+        This roughly translates to the number of path separators (os.sep) + 1.
+        Ex. Given "path/to/dir", this would return 3
+        Special cases: "." and "/" return 0
+    """
+    normalized_path = normpath(path)
+    path_depth = 0
+    head, tail = split(normalized_path)
+
+    while(tail and tail != '.'):
+        path_depth += 1
+        head, tail = split(head)
+
+    return path_depth
+
+
 def args_error(parser, message):
     print "\n\n%s\n\n" % message
     parser.print_help()
@@ -307,3 +327,14 @@
             append = append.ljust(total_width)
         output += append
     return output
+
+# fail if argument provided is a parent of the specified directory
+def argparse_dir_not_parent(other):
+    def parse_type(not_parent):
+        abs_other = abspath(other)
+        abs_not_parent = abspath(not_parent)
+        if abs_not_parent == commonprefix([abs_not_parent, abs_other]):
+            raise argparse.ArgumentTypeError("{0} may not be a parent directory of {1}".format(not_parent, other))
+        else:
+            return not_parent
+    return parse_type