Clone of official tools

Revision:
9:2d27d77ada5c
Parent:
8:a8ac6ed29081
Child:
13:ab47a20b66f0
--- a/build_api.py	Tue Jun 07 11:35:02 2016 +0100
+++ b/build_api.py	Tue Jun 14 11:07:30 2016 +0100
@@ -19,13 +19,13 @@
 import tempfile
 import colorama
 
-
+from copy import copy
 from types import ListType
 from shutil import rmtree
 from os.path import join, exists, basename, abspath, normpath
 from os import getcwd, walk
 from time import time
-from copy import copy
+import fnmatch
 
 from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException
 from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
@@ -128,7 +128,7 @@
 
     for path in src_paths:
         profile = get_build_profile(path)
-
+ 
     # If the configuration object was not yet created, create it now
     config = config or Config(target, src_paths)
 
@@ -210,9 +210,9 @@
             resources.objects.extend(objects)
 
         # Link Program
-        res, needed_update = toolchain.link_program(resources, build_path, name)
+        res, _ = toolchain.link_program(resources, build_path, name)
 
-        if report != None and needed_update:
+        if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
             cur_result["output"] = toolchain.get_output()
@@ -237,8 +237,6 @@
             if toolchain_output:
                 cur_result["output"] += toolchain_output
 
-            cur_result["output"] += str(e)
-
             add_result_to_report(report, cur_result)
 
         # Let Exception propagate
@@ -307,26 +305,43 @@
         toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name))
 
         # Scan Resources
-        resources = []
-        for src_path in src_paths:
-            resources.append(toolchain.scan_resources(src_path))
+        resources = None
+        for path in src_paths:
+            # Scan resources
+            resource = toolchain.scan_resources(path)
+            
+            # Copy headers, objects and static libraries - all files needed for static lib
+            toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
+            toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path)
+            toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path)
+            if resource.linker_script:
+                toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path)
+
+            # Extend resources collection
+            if not resources:
+                resources = resource
+            else:
+                resources.add(resource)
+
+        # We need to add if necessary additional include directories
+        if inc_dirs:
+            if type(inc_dirs) == ListType:
+                resources.inc_dirs.extend(inc_dirs)
+            else:
+                resources.inc_dirs.append(inc_dirs)
 
         # Add extra include directories / files which are required by library
         # This files usually are not in the same directory as source files so
         # previous scan will not include them
         if inc_dirs_ext is not None:
             for inc_ext in inc_dirs_ext:
-                resources.append(toolchain.scan_resources(inc_ext))
+                resources.add(toolchain.scan_resources(inc_ext))
 
         # Dependencies Include Paths
-        dependencies_include_dir = []
         if dependencies_paths is not None:
             for path in dependencies_paths:
                 lib_resources = toolchain.scan_resources(path)
-                dependencies_include_dir.extend(lib_resources.inc_dirs)
-
-        if inc_dirs:
-            dependencies_include_dir.extend(inc_dirs)
+                resources.inc_dirs.extend(lib_resources.inc_dirs)
 
         if archive:
             # Use temp path when building archive
@@ -337,29 +352,21 @@
 
         # Handle configuration
         config = Config(target)
-
-        # Copy headers, objects and static libraries
-        for resource in resources:
-            toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
-            toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path)
-            toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path)
-            if resource.linker_script:
-                toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path)
-            config.add_config_files(resource.json_files)
-
+        # Update the configuration with any .json files found while scanning
+        config.add_config_files(resources.json_files)
+        # And add the configuration macros to the toolchain
         toolchain.add_macros(config.get_config_data_macros())
 
         # Compile Sources
-        objects = []
-        for resource in resources:
-            objects.extend(toolchain.compile_sources(resource, abspath(tmp_path), dependencies_include_dir))
+        for path in src_paths:
+            src = toolchain.scan_resources(path)
+            objects = toolchain.compile_sources(src, abspath(tmp_path), resources.inc_dirs)
+            resources.objects.extend(objects)
 
         if archive:
-            needed_update = toolchain.build_library(objects, build_path, name)
-        else:
-            needed_update = True
+            toolchain.build_library(objects, build_path, name)
 
-        if report != None and needed_update:
+        if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
             cur_result["output"] = toolchain.get_output()
@@ -382,6 +389,144 @@
             if toolchain_output:
                 cur_result["output"] += toolchain_output
 
+            add_result_to_report(report, cur_result)
+
+        # Let Exception propagate
+        raise e
+
+######################
+### Legacy methods ###
+######################
+
+def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+    """ Legacy method for building mbed libraries
+        Function builds library in proper directory using all dependencies and macros defined by user.
+    """
+    lib = Library(lib_id)
+    if not lib.is_supported(target, toolchain_name):
+        print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
+        return False
+    
+    # We need to combine macros from parameter list with macros from library definition
+    MACROS = lib.macros if lib.macros else []
+    if macros:
+        macros.extend(MACROS)
+    else:
+        macros = MACROS
+
+    src_paths = lib.source_dir
+    build_path = lib.build_dir
+    dependencies_paths = lib.dependencies
+    inc_dirs = lib.inc_dirs
+    inc_dirs_ext = lib.inc_dirs_ext
+    
+    """ src_path: the path of the source directory
+    build_path: the path of the build directory
+    target: ['LPC1768', 'LPC11U24', 'LPC2368']
+    toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
+    library_paths: List of paths to additional libraries
+    clean: Rebuild everything if True
+    notify: Notify function for logs
+    verbose: Write the actual tools command lines if True
+    inc_dirs: additional include directories which should be included in build
+    inc_dirs_ext: additional include directories which should be copied to library directory
+    """
+    if type(src_paths) != ListType:
+        src_paths = [src_paths]
+
+    # The first path will give the name to the library
+    name = basename(src_paths[0])
+
+    if report != None:
+        start = time()
+        id_name = name.upper()
+        description = name
+        vendor_label = target.extra_labels[0]
+        cur_result = None
+        prep_report(report, target.name, toolchain_name, id_name)
+        cur_result = create_result(target.name, toolchain_name, id_name, description)
+
+        if properties != None:
+            prep_properties(properties, target.name, toolchain_name, vendor_label)
+
+    for src_path in src_paths:
+        if not exists(src_path):
+            error_msg = "The library source folder does not exist: %s", src_path
+
+            if report != None:
+                cur_result["output"] = error_msg
+                cur_result["result"] = "FAIL"
+                add_result_to_report(report, cur_result)
+
+            raise Exception(error_msg)
+
+    try:
+        # Toolchain instance
+        toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose, profile=profile)
+        toolchain.VERBOSE = verbose
+        toolchain.jobs = jobs
+        toolchain.build_all = clean
+
+        toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+
+        # Scan Resources
+        resources = []
+        for src_path in src_paths:
+            resources.append(toolchain.scan_resources(src_path))
+
+        # Add extra include directories / files which are required by library
+        # This files usually are not in the same directory as source files so
+        # previous scan will not include them
+        if inc_dirs_ext is not None:
+            for inc_ext in inc_dirs_ext:
+                resources.append(toolchain.scan_resources(inc_ext))
+
+        # Dependencies Include Paths
+        dependencies_include_dir = []
+        if dependencies_paths is not None:
+            for path in dependencies_paths:
+                lib_resources = toolchain.scan_resources(path)
+                dependencies_include_dir.extend(lib_resources.inc_dirs)
+
+        if inc_dirs:
+            dependencies_include_dir.extend(inc_dirs)
+
+        # Create the desired build directory structure
+        bin_path = join(build_path, toolchain.obj_path)
+        mkdir(bin_path)
+        tmp_path = join(build_path, '.temp', toolchain.obj_path)
+        mkdir(tmp_path)
+
+        # Copy Headers
+        for resource in resources:
+            toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
+        dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+
+        # Compile Sources
+        objects = []
+        for resource in resources:
+            objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
+
+        needed_update = toolchain.build_library(objects, bin_path, name)
+
+        if report != None and needed_update:
+            end = time()
+            cur_result["elapsed_time"] = end - start
+            cur_result["output"] = toolchain.get_output()
+            cur_result["result"] = "OK"
+
+            add_result_to_report(report, cur_result)
+
+    except Exception, e:
+        if report != None:
+            end = time()
+            cur_result["result"] = "FAIL"
+            cur_result["elapsed_time"] = end - start
+
+            toolchain_output = toolchain.get_output()
+            if toolchain_output:
+                cur_result["output"] += toolchain_output
+
             cur_result["output"] += str(e)
 
             add_result_to_report(report, cur_result)
@@ -389,34 +534,6 @@
         # Let Exception propagate
         raise e
 
-def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
-    """ Wrapper for build_library function.
-        Function builds library in proper directory using all dependencies and macros defined by user.
-    """
-    lib = Library(lib_id)
-    if lib.is_supported(target, toolchain):
-        # We need to combine macros from parameter list with macros from library definition
-        MACROS = lib.macros if lib.macros else []
-        if macros:
-            MACROS.extend(macros)
-
-        return build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options,
-                      verbose=verbose,
-                      silent=silent,
-                      clean=clean,
-                      macros=MACROS,
-                      notify=notify,
-                      inc_dirs=lib.inc_dirs,
-                      inc_dirs_ext=lib.inc_dirs_ext,
-                      jobs=jobs,
-                      report=report,
-                      properties=properties,
-                      extra_verbose=extra_verbose)
-    else:
-        print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
-        return False
-
-
 # We do have unique legacy conventions about how we build and package the mbed library
 def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
     """ Function returns True is library was built and false if building was skipped """
@@ -445,6 +562,9 @@
 
         return False
 
+    for path in src_paths:
+        profile = get_build_profile(path)
+
     try:
         # Toolchain
         toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
@@ -506,12 +626,12 @@
         for o in separate_objects:
             objects.remove(o)
 
-        needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
+        toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
 
         for o in separate_objects:
             toolchain.copy_files(o, BUILD_TOOLCHAIN)
 
-        if report != None and needed_update:
+        if report != None:
             end = time()
             cur_result["elapsed_time"] = end - start
             cur_result["output"] = toolchain.get_output()
@@ -538,6 +658,7 @@
         # Let Exception propagate
         raise e
 
+
 def get_unique_supported_toolchains():
     """ Get list of all unique toolchains supported by targets """
     unique_supported_toolchains = []
@@ -554,12 +675,12 @@
     from prettytable import PrettyTable # Only use it in this function so building works without extra modules
 
     # All tests status table print
-    columns = ["Platform"] + unique_supported_toolchains
-    pt = PrettyTable(["Platform"] + unique_supported_toolchains)
+    columns = ["Target"] + unique_supported_toolchains
+    pt = PrettyTable(["Target"] + unique_supported_toolchains)
     # Align table
     for col in columns:
         pt.align[col] = "c"
-    pt.align["Platform"] = "l"
+    pt.align["Target"] = "l"
 
     perm_counter = 0
     target_counter = 0
@@ -571,25 +692,21 @@
         target_counter += 1
 
         row = [target]  # First column is platform name
-        default_toolchain = TARGET_MAP[target].default_toolchain
         for unique_toolchain in unique_supported_toolchains:
-            text = "-"
-            if default_toolchain == unique_toolchain:
-                text = "Default"
-                perm_counter += 1
-            elif unique_toolchain in TARGET_MAP[target].supported_toolchains:
+            if unique_toolchain in TARGET_MAP[target].supported_toolchains:
                 text = "Supported"
                 perm_counter += 1
+            else:
+                text = "-"
+            
             row.append(text)
         pt.add_row(row)
 
     result = pt.get_html_string() if verbose_html else pt.get_string()
     result += "\n"
-    result += "*Default - default on-line compiler\n"
-    result += "*Supported - supported off-line compiler\n"
-    result += "\n"
-    result += "Total platforms: %d\n"% (target_counter)
-    result += "Total permutations: %d"% (perm_counter)
+    result += "Supported targets: %d\n"% (target_counter)
+    if target_counter == 1:
+        result += "Supported toolchains: %d"% (perm_counter)
     return result
 
 
@@ -824,6 +941,66 @@
     with open(filename, 'w+') as f:
         f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
 
+
+def scan_for_source_paths(path, exclude_paths=None):
+    ignorepatterns = []
+    paths = []
+    
+    def is_ignored(file_path):
+        for pattern in ignorepatterns:
+            if fnmatch.fnmatch(file_path, pattern):
+                return True
+        return False
+    
+    
+    """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
+    When topdown is True, the caller can modify the dirnames list in-place
+    (perhaps using del or slice assignment), and walk() will only recurse into
+    the subdirectories whose names remain in dirnames; this can be used to prune
+    the search, impose a specific order of visiting, or even to inform walk()
+    about directories the caller creates or renames before it resumes walk()
+    again. Modifying dirnames when topdown is False is ineffective, because in
+    bottom-up mode the directories in dirnames are generated before dirpath
+    itself is generated.
+    """
+    for root, dirs, files in walk(path, followlinks=True):
+        # Remove ignored directories
+        # Check if folder contains .mbedignore
+        if ".mbedignore" in files :
+            with open (join(root,".mbedignore"), "r") as f:
+                lines=f.readlines()
+                lines = [l.strip() for l in lines] # Strip whitespaces
+                lines = [l for l in lines if l != ""] # Strip empty lines
+                lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
+                # Append root path to glob patterns
+                # and append patterns to ignorepatterns
+                ignorepatterns.extend([join(root,line.strip()) for line in lines])
+
+        for d in copy(dirs):
+            dir_path = join(root, d)
+
+            # Always ignore hidden directories
+            if d.startswith('.'):
+                dirs.remove(d)
+
+            # Remove dirs that already match the ignorepatterns
+            # to avoid travelling into them and to prevent them
+            # on appearing in include path.
+            if is_ignored(join(dir_path,"")):
+                dirs.remove(d)
+
+            if exclude_paths:
+                for exclude_path in exclude_paths:
+                    rel_path = relpath(dir_path, exclude_path)
+                    if not (rel_path.startswith('..')):
+                        dirs.remove(d)
+                        break
+
+        # Add root to include paths
+        paths.append(root)
+
+    return paths
+
 def get_build_profile(path):
     profile = None
     builds = get_build_ids(path)