Clone of official tools

Revision:
7:5af61d55adbe
Parent:
6:744106007ff3
Child:
9:2d27d77ada5c
--- a/toolchains/__init__.py	Sat May 21 20:17:44 2016 +0100
+++ b/toolchains/__init__.py	Tue Jun 07 11:21:44 2016 +0100
@@ -17,19 +17,21 @@
 
 import re
 import sys
-from os import stat, walk, getcwd
+from os import stat, walk, getcwd, sep
 from copy import copy
 from time import time, sleep
 from types import ListType
 from shutil import copyfile
-from os.path import join, splitext, exists, relpath, dirname, basename, split
+from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath
 from inspect import getmro
 
 from multiprocessing import Pool, cpu_count
 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
 from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
 import tools.hooks as hooks
+from tools.memap import MemmapParser
 from hashlib import md5
+import fnmatch
 
 
 #Disables multiprocessing if set to higher number than the host machine CPUs
@@ -52,6 +54,7 @@
         'results': results
     }
 
+
 class Resources:
     def __init__(self, base_path=None):
         self.base_path = base_path
@@ -79,6 +82,19 @@
         # Other files
         self.hex_files = []
         self.bin_files = []
+        self.json_files = []
+
+    def __add__(self, resources):
+        if resources is None:
+            return self
+        else:
+            return self.add(resources)
+
+    def __radd__(self, resources):
+        if resources is None:
+            return self
+        else:
+            return self.add(resources)
 
     def add(self, resources):
         self.inc_dirs += resources.inc_dirs
@@ -103,11 +119,15 @@
 
         self.hex_files += resources.hex_files
         self.bin_files += resources.bin_files
+        self.json_files += resources.json_files
+
+        return self
 
     def relative_to(self, base, dot=False):
         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
-                      'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
+                      'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
+                      'hex_files', 'bin_files', 'json_files']:
             v = [rel_path(f, base, dot) for f in getattr(self, field)]
             setattr(self, field, v)
         if self.linker_script is not None:
@@ -116,7 +136,8 @@
     def win_to_unix(self):
         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
-                      'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
+                      'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
+                      'hex_files', 'bin_files', 'json_files']:
             v = [f.replace('\\', '/') for f in getattr(self, field)]
             setattr(self, field, v)
         if self.linker_script is not None:
@@ -148,7 +169,6 @@
 
         return '\n'.join(s)
 
-
 # Support legacy build conventions: the original mbed build system did not have
 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
 # had the knowledge of a list of these directories to be ignored.
@@ -167,6 +187,7 @@
 class mbedToolchain:
     PROFILE = None
     VERBOSE = True
+    ignorepatterns = []
 
     CORTEX_SYMBOLS = {
         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
@@ -218,6 +239,7 @@
         self.jobs = 1
 
         self.CHROOT = None
+
         self.mp_pool = None
 
         if profile and (TOOLCHAIN_PROFILES.get(self.name, None) and
@@ -309,6 +331,11 @@
 
             # Add target's symbols
             self.symbols += self.target.macros
+            # Add target's hardware
+            try :
+                self.symbols += ["DEVICE_" + feature + "=1" for feature in self.target.features]
+            except AttributeError :
+                pass
             # Add extra symbols passed via 'macros' parameter
             self.symbols += self.macros
 
@@ -318,6 +345,10 @@
 
         return list(set(self.symbols))  # Return only unique symbols
 
+    # Extend the internal list of macros
+    def add_macros(self, new_macros):
+        self.macros.extend(new_macros)
+
     def get_labels(self):
         if self.labels is None:
             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
@@ -349,6 +380,12 @@
 
         return False
 
+    def is_ignored(self, file_path):
+        for pattern in self.ignorepatterns:
+            if fnmatch.fnmatch(file_path, pattern):
+                return True
+        return False
+
     def scan_resources(self, path, exclude_paths=None):
         labels = self.get_labels()
         resources = Resources(path)
@@ -366,20 +403,36 @@
         """
         for root, dirs, files in walk(path, followlinks=True):
             # Remove ignored directories
+            # Check if folder contains .mbedignore
+            if ".mbedignore" in files :
+                with open (join(root,".mbedignore"), "r") as f:
+                    lines=f.readlines()
+                    lines = [l.strip() for l in lines] # Strip whitespaces
+                    lines = [l for l in lines if l != ""] # Strip empty lines
+                    lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
+                    # Append root path to glob patterns
+                    # and append patterns to ignorepatterns
+                    self.ignorepatterns.extend([join(root,line.strip()) for line in lines])
+
             for d in copy(dirs):
                 dir_path = join(root, d)
-                
                 if d == '.hg':
                     resources.repo_dirs.append(dir_path)
                     resources.repo_files.extend(self.scan_repository(dir_path))
 
                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
-                    ((d.upper().startswith('TARGET_') or d.upper().startswith('TARGET-')) and d[7:] not in labels['TARGET']) or
-                    ((d.upper().startswith('TOOLCHAIN_') or d.upper().startswith('TOOLCHAIN-')) and d[10:] not in labels['TOOLCHAIN']) or
-                    (d.upper() == 'TESTS') or
-                    exists(join(dir_path, '.buildignore'))):
+                    (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
+                    (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
+                    (d == 'TESTS')):
                     dirs.remove(d)
-                
+
+
+                # Remove dirs that already match the ignorepatterns
+                # to avoid travelling into them and to prevent them
+                # on appearing in include path.
+                if self.is_ignored(join(dir_path,"")):
+                    dirs.remove(d)
+
                 if exclude_paths:
                     for exclude_path in exclude_paths:
                         rel_path = relpath(dir_path, exclude_path)
@@ -392,6 +445,10 @@
 
             for file in files:
                 file_path = join(root, file)
+
+                if self.is_ignored(file_path):
+                    continue
+
                 _, ext = splitext(file)
                 ext = ext.lower()
 
@@ -436,6 +493,9 @@
                 elif ext == '.bin':
                     resources.bin_files.append(file_path)
 
+                elif ext == '.json':
+                    resources.json_files.append(file_path)
+
         return resources
 
     def scan_repository(self, path):
@@ -695,6 +755,7 @@
         filename = name+'.'+ext
         elf = join(tmp_path, name + '.elf')
         bin = join(tmp_path, filename)
+        map = join(tmp_path, name + '.map')
 
         if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
             needed_update = True
@@ -707,6 +768,8 @@
 
             self.binary(r, elf, bin)
 
+        self.mem_stats(map)
+
         self.var("compile_succeded", True)
         self.var("binary", filename)
 
@@ -763,6 +826,35 @@
     def var(self, key, value):
         self.notify({'type': 'var', 'key': key, 'val': value})
 
+    def mem_stats(self, map):
+        # Creates parser object
+        toolchain = self.__class__.__name__
+        t = MemmapParser()
+
+        try:
+            with open(map, 'rt') as f:
+                # Decode map file depending on the toolchain
+                if toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
+                    t.search_objects(abspath(map), "ARM")
+                    t.parse_map_file_armcc(f)
+                elif toolchain == "GCC_ARM":
+                    t.parse_map_file_gcc(f)
+                elif toolchain == "IAR":
+                    self.info("[WARNING] IAR Compiler not fully supported (yet)")
+                    t.search_objects(abspath(map), toolchain)
+                    t.parse_map_file_iar(f)
+                else:
+                    self.info("Unknown toolchain for memory statistics %s" % toolchain)
+                    return
+
+                t.generate_output(sys.stdout, False)
+                map_out = splitext(map)[0] + "_map.json"
+                with open(map_out, 'w') as fo:
+                    t.generate_output(fo, True)
+        except OSError:
+            return
+            
+    
 from tools.settings import ARM_BIN
 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
 from tools.settings import IAR_PATH