Denislam Valeev / Mbed OS Nucleo_rtos_basic
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import sys
00021 from os import stat, walk, getcwd, sep, remove
00022 from copy import copy
00023 from time import time, sleep
00024 from shutil import copyfile
00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split,
00026                      abspath, isfile, isdir, normcase)
00027 from itertools import chain
00028 from inspect import getmro
00029 from copy import deepcopy
00030 from abc import ABCMeta, abstractmethod
00031 from distutils.spawn import find_executable
00032 from multiprocessing import Pool, cpu_count
00033 from hashlib import md5
00034 import fnmatch
00035 
00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException,
00037                     NotSupportedException, split_path, compile_worker)
00038 from ..settings import MBED_ORG_USER
00039 from .. import hooks
00040 from ..memap import MemapParser
00041 
00042 
00043 #Disables multiprocessing if set to higher number than the host machine CPUs
00044 CPU_COUNT_MIN = 1
00045 CPU_COEF = 1
00046 
00047 class LazyDict(dict):
00048     def __init__(self):
00049         self.eager = {}
00050         self.lazy = {}
00051 
00052     def add_lazy(self, key, thunk):
00053         if key in self.eager:
00054             del self.eager[key]
00055         self.lazy[key] = thunk
00056 
00057     def __getitem__(self, key):
00058         if  (key not in self.eager
00059              and key in self.lazy):
00060             self.eager[key] = self.lazy[key]()
00061             del self.lazy[key]
00062         return self.eager[key]
00063 
00064     def __setitem__(self, key, value):
00065         self.eager[key] = value
00066 
00067     def __delitem__(self, key):
00068         if key in self.eager:
00069             del self.eager[key]
00070         else:
00071             del self.lazy[key]
00072 
00073     def __contains__(self, key):
00074         return key in self.eager or key in self.lazy
00075 
00076     def __iter__(self):
00077         return chain(iter(self.eager), iter(self.lazy))
00078 
00079     def __len__(self):
00080         return len(self.eager) + len(self.lazy)
00081 
00082     def __str__(self):
00083         return "Lazy{%s}" % (
00084             ", ".join("%r: %r" % (k, v) for k, v in
00085                       chain(self.eager.items(), ((k, "not evaluated")
00086                                                      for k in self.lazy))))
00087 
00088     def update(self, other):
00089         if isinstance(other, LazyDict):
00090             self.eager.update(other.eager)
00091             self.lazy.update(other.lazy)
00092         else:
00093             self.eager.update(other)
00094 
00095     def items(self):
00096         """Warning: This forces the evaluation all of the items in this LazyDict
00097         that are iterated over."""
00098         for k, v in self.eager.items():
00099             yield k, v
00100         for k in self.lazy.keys():
00101             yield k, self[k]
00102 
00103     def apply(self, fn):
00104         """Delay the application of a computation to all items of the lazy dict.
00105         Does no computation now. Instead the comuptation is performed when a
00106         consumer attempts to access a value in this LazyDict"""
00107         new_lazy = {}
00108         for k, f in self.lazy.items():
00109             def closure(f=f):
00110                 return fn(f())
00111             new_lazy[k] = closure
00112         for k, v in self.eager.items():
00113             def closure(v=v):
00114                 return fn(v)
00115             new_lazy[k] = closure
00116         self.lazy = new_lazy
00117         self.eager = {}
00118 
00119 class Resources:
00120     def __init__(self, base_path=None, collect_ignores=False):
00121         self.base_path = base_path
00122         self.collect_ignores = collect_ignores
00123 
00124         self.file_basepath = {}
00125 
00126         self.inc_dirs = []
00127         self.headers = []
00128 
00129         self.s_sources = []
00130         self.c_sources = []
00131         self.cpp_sources = []
00132 
00133         self.lib_dirs = set([])
00134         self.objects = []
00135         self.libraries = []
00136 
00137         # mbed special files
00138         self.lib_builds = []
00139         self.lib_refs = []
00140 
00141         self.repo_dirs = []
00142         self.repo_files = []
00143 
00144         self.linker_script = None
00145 
00146         # Other files
00147         self.hex_files = []
00148         self.bin_files = []
00149         self.json_files = []
00150 
00151         # Features
00152         self.features = LazyDict()
00153         self.ignored_dirs = []
00154 
00155     def __add__(self, resources):
00156         if resources is None:
00157             return self
00158         else:
00159             return self.add(resources)
00160 
00161     def __radd__(self, resources):
00162         if resources is None:
00163             return self
00164         else:
00165             return self.add(resources)
00166 
00167     def ignore_dir(self, directory):
00168         if self.collect_ignores:
00169             self.ignored_dirs.append(directory)
00170 
00171     def add(self, resources):
00172         for f,p in resources.file_basepath.items():
00173             self.file_basepath[f] = p
00174 
00175         self.inc_dirs += resources.inc_dirs
00176         self.headers += resources.headers
00177 
00178         self.s_sources += resources.s_sources
00179         self.c_sources += resources.c_sources
00180         self.cpp_sources += resources.cpp_sources
00181 
00182         self.lib_dirs |= resources.lib_dirs
00183         self.objects += resources.objects
00184         self.libraries += resources.libraries
00185 
00186         self.lib_builds += resources.lib_builds
00187         self.lib_refs += resources.lib_refs
00188 
00189         self.repo_dirs += resources.repo_dirs
00190         self.repo_files += resources.repo_files
00191 
00192         if resources.linker_script is not None:
00193             self.linker_script = resources.linker_script
00194 
00195         self.hex_files += resources.hex_files
00196         self.bin_files += resources.bin_files
00197         self.json_files += resources.json_files
00198 
00199         self.features.update(resources.features)
00200         self.ignored_dirs += resources.ignored_dirs
00201 
00202         return self
00203 
00204     def _collect_duplicates(self, dupe_dict, dupe_headers):
00205         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00206             objname, _ = splitext(basename(filename))
00207             dupe_dict.setdefault(objname, set())
00208             dupe_dict[objname] |= set([filename])
00209         for filename in self.headers:
00210             headername = basename(filename)
00211             dupe_headers.setdefault(headername, set())
00212             dupe_headers[headername] |= set([headername])
00213         for res in self.features.values():
00214             res._collect_duplicates(dupe_dict, dupe_headers)
00215         return dupe_dict, dupe_headers
00216 
00217     def detect_duplicates(self, toolchain):
00218         """Detect all potential ambiguities in filenames and report them with
00219         a toolchain notification
00220 
00221         Positional Arguments:
00222         toolchain - used for notifications
00223         """
00224         count = 0
00225         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00226         for objname, filenames in dupe_dict.items():
00227             if len(filenames) > 1:
00228                 count+=1
00229                 toolchain.tool_error(
00230                     "Object file %s.o is not unique! It could be made from: %s"\
00231                     % (objname, " ".join(filenames)))
00232         for headername, locations in dupe_headers.items():
00233             if len(locations) > 1:
00234                 count+=1
00235                 toolchain.tool_error(
00236                     "Header file %s is not unique! It could be: %s" %\
00237                     (headername, " ".join(locations)))
00238         return count
00239 
00240 
00241     def relative_to(self, base, dot=False):
00242         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00243                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00244                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00245                       'hex_files', 'bin_files', 'json_files']:
00246             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00247             setattr(self, field, v)
00248 
00249         def to_apply(feature, base=base, dot=dot):
00250             feature.relative_to(base, dot)
00251         self.features.apply(to_apply)
00252 
00253         if self.linker_script is not None:
00254             self.linker_script = rel_path(self.linker_script, base, dot)
00255 
00256     def win_to_unix(self):
00257         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00258                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00259                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00260                       'hex_files', 'bin_files', 'json_files']:
00261             v = [f.replace('\\', '/') for f in getattr(self, field)]
00262             setattr(self, field, v)
00263 
00264         def to_apply(feature):
00265             feature.win_to_unix()
00266         self.features.apply(to_apply)
00267 
00268         if self.linker_script is not None:
00269             self.linker_script = self.linker_script.replace('\\', '/')
00270 
00271     def __str__(self):
00272         s = []
00273 
00274         for (label, resources) in (
00275                 ('Include Directories', self.inc_dirs),
00276                 ('Headers', self.headers),
00277 
00278                 ('Assembly sources', self.s_sources),
00279                 ('C sources', self.c_sources),
00280                 ('C++ sources', self.cpp_sources),
00281 
00282                 ('Library directories', self.lib_dirs),
00283                 ('Objects', self.objects),
00284                 ('Libraries', self.libraries),
00285 
00286                 ('Hex files', self.hex_files),
00287                 ('Bin files', self.bin_files),
00288 
00289                 ('Features', self.features),
00290             ):
00291             if resources:
00292                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00293 
00294         if self.linker_script:
00295             s.append('Linker Script: ' + self.linker_script)
00296 
00297         return '\n'.join(s)
00298 
00299 # Support legacy build conventions: the original mbed build system did not have
00300 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00301 # had the knowledge of a list of these directories to be ignored.
00302 LEGACY_IGNORE_DIRS = set([
00303     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00304     'ARM', 'uARM', 'IAR',
00305     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00306     'ARMC6'
00307 ])
00308 LEGACY_TOOLCHAIN_NAMES = {
00309     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00310     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00311     'IAR': 'IAR',
00312     'ARMC6': 'ARMC6',
00313 }
00314 
00315 
00316 class mbedToolchain:
00317     # Verbose logging
00318     VERBOSE = True
00319 
00320     # Compile C files as CPP
00321     COMPILE_C_AS_CPP = False
00322 
00323     # Response files for compiling, includes, linking and archiving.
00324     # Not needed on posix systems where the typical arg limit is 2 megabytes
00325     RESPONSE_FILES = True
00326 
00327     CORTEX_SYMBOLS = {
00328         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00329         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00330         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00331         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00332         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00333         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00334         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00335         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00336         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00337         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00338         "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00339         "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00340         "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00341         "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00342         "Cortex-M33F-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00343         "Cortex-M33F": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00344     }
00345 
00346     MBED_CONFIG_FILE_NAME="mbed_config.h"
00347 
00348     PROFILE_FILE_NAME = ".profile"
00349 
00350     __metaclass__ = ABCMeta
00351 
00352     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00353 
00354     def __init__(self, target, notify=None, macros=None, silent=False,
00355                  extra_verbose=False, build_profile=None, build_dir=None):
00356         self.target = target
00357         self.name = self.__class__.__name__
00358 
00359         # compile/assemble/link/binary hooks
00360         self.hook = hooks.Hook(target, self)
00361 
00362         # Toolchain flags
00363         self.flags = deepcopy(build_profile or self.profile_template)
00364 
00365         # System libraries provided by the toolchain
00366         self.sys_libs = []
00367 
00368         # User-defined macros
00369         self.macros = macros or []
00370 
00371         # Macros generated from toolchain and target rules/features
00372         self.asm_symbols = None
00373         self.cxx_symbols = None
00374 
00375         # Labels generated from toolchain and target rules/features (used for selective build)
00376         self.labels = None
00377 
00378         # This will hold the initialized config object
00379         self.config = None
00380 
00381         # This will hold the configuration data (as returned by Config.get_config_data())
00382         self.config_data = None
00383 
00384         # This will hold the location of the configuration file or None if there's no configuration available
00385         self.config_file = None
00386 
00387         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00388         self.config_processed = False
00389 
00390         # Non-incremental compile
00391         self.build_all = False
00392 
00393         # Build output dir
00394         self.build_dir = build_dir
00395         self.timestamp = time()
00396 
00397         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00398         self.jobs = 0
00399 
00400         # Ignore patterns from .mbedignore files
00401         self.ignore_patterns = []
00402         self._ignore_regex = re.compile("$^")
00403 
00404         # Pre-mbed 2.0 ignore dirs
00405         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00406 
00407         # Output notify function
00408         # This function is passed all events, and expected to handle notification of the
00409         # user, emit the events to a log, etc.
00410         # The API for all notify methods passed into the notify parameter is as follows:
00411         # def notify(Event, Silent)
00412         # Where *Event* is a dict representing the toolchain event that was generated
00413         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00414         #                  or an application was linked
00415         #       *Silent* is a boolean
00416         if notify:
00417             self.notify_fun = notify
00418         elif extra_verbose:
00419             self.notify_fun = self.print_notify_verbose
00420         else:
00421             self.notify_fun = self.print_notify
00422 
00423         # Silent builds (no output)
00424         self.silent = silent
00425 
00426         # Print output buffer
00427         self.output = str()
00428 
00429         # uVisor spepcific rules
00430         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00431             self.target.core = re.sub(r"F$", '', self.target.core)
00432 
00433         # Stats cache is used to reduce the amount of IO requests to stat
00434         # header files during dependency change. See need_update()
00435         self.stat_cache = {}
00436 
00437         # Used by the mbed Online Build System to build in chrooted environment
00438         self.CHROOT = None
00439 
00440         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00441         self.init()
00442 
00443     # Used for post __init__() hooks
00444     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00445     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00446     def init(self):
00447         return True
00448 
00449     def get_output(self):
00450         return self.output
00451 
00452     def print_notify(self, event, silent=False):
00453         """ Default command line notification
00454         """
00455         msg = None
00456 
00457         if not self.VERBOSE and event['type'] == 'tool_error':
00458             msg = event['message']
00459 
00460         elif event['type'] in ['info', 'debug']:
00461             msg = event['message']
00462 
00463         elif event['type'] == 'cc':
00464             event['severity'] = event['severity'].title()
00465             event['file'] = basename(event['file'])
00466             msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00467 
00468         elif event['type'] == 'progress':
00469             if 'percent' in event:
00470                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00471                                                   event['percent'],
00472                                                   basename(event['file']))
00473             else:
00474                 msg = '{}: {}'.format(event['action'].title(),
00475                                       basename(event['file']))
00476 
00477         if msg:
00478             if not silent:
00479                 print(msg)
00480             self.output += msg + "\n"
00481 
00482     def print_notify_verbose(self, event, silent=False):
00483         """ Default command line notification with more verbose mode
00484         """
00485         if event['type'] in ['info', 'debug']:
00486             self.print_notify(event, silent=silent) # standard handle
00487 
00488         elif event['type'] == 'cc':
00489             event['severity'] = event['severity'].title()
00490             event['file'] = basename(event['file'])
00491             event['mcu_name'] = "None"
00492             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00493             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00494             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00495             if not silent:
00496                 print(msg)
00497             self.output += msg + "\n"
00498 
00499         elif event['type'] == 'progress':
00500             self.print_notify(event) # standard handle
00501 
00502     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00503     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00504     def notify(self, event):
00505         """ Little closure for notify functions
00506         """
00507         event['toolchain'] = self
00508         return self.notify_fun(event, self.silent)
00509 
00510     def get_symbols(self, for_asm=False):
00511         if for_asm:
00512             if self.asm_symbols is None:
00513                 self.asm_symbols = []
00514 
00515                 # Cortex CPU symbols
00516                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00517                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00518 
00519                 # Add target's symbols
00520                 self.asm_symbols += self.target.macros
00521                 # Add extra symbols passed via 'macros' parameter
00522                 self.asm_symbols += self.macros
00523             return list(set(self.asm_symbols))  # Return only unique symbols
00524         else:
00525             if self.cxx_symbols is None:
00526                 # Target and Toolchain symbols
00527                 labels = self.get_labels()
00528                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00529                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00530 
00531                 # Cortex CPU symbols
00532                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00533                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00534 
00535                 # Symbols defined by the on-line build.system
00536                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00537                 if MBED_ORG_USER:
00538                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00539 
00540                 # Add target's symbols
00541                 self.cxx_symbols += self.target.macros
00542                 # Add target's hardware
00543                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00544                 # Add target's features
00545                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00546                 # Add extra symbols passed via 'macros' parameter
00547                 self.cxx_symbols += self.macros
00548 
00549                 # Form factor variables
00550                 if hasattr(self.target, 'supported_form_factors'):
00551                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00552 
00553             return list(set(self.cxx_symbols))  # Return only unique symbols
00554 
00555     # Extend the internal list of macros
00556     def add_macros(self, new_macros):
00557         self.macros.extend(new_macros)
00558 
00559     def get_labels(self):
00560         if self.labels is None:
00561             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00562             toolchain_labels.remove('mbedToolchain')
00563             self.labels = {
00564                 'TARGET': self.target.labels,
00565                 'FEATURE': self.target.features,
00566                 'TOOLCHAIN': toolchain_labels
00567             }
00568 
00569             # This is a policy decision and it should /really/ be in the config system
00570             # ATM it's here for backward compatibility
00571             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00572                  "-O0" in self.flags['common']) or
00573                 ("-r" in self.flags['common'] and
00574                  "-On" in self.flags['common'])):
00575                 self.labels['TARGET'].append("DEBUG")
00576             else:
00577                 self.labels['TARGET'].append("RELEASE")
00578         return self.labels
00579 
00580 
00581     # Determine whether a source file needs updating/compiling
00582     def need_update(self, target, dependencies):
00583         if self.build_all:
00584             return True
00585 
00586         if not exists(target):
00587             return True
00588 
00589         target_mod_time = stat(target).st_mtime
00590 
00591         for d in dependencies:
00592             # Some objects are not provided with full path and here we do not have
00593             # information about the library paths. Safe option: assume an update
00594             if not d or not exists(d):
00595                 return True
00596 
00597             if d not in self.stat_cache:
00598                 self.stat_cache[d] = stat(d).st_mtime
00599 
00600             if self.stat_cache[d] >= target_mod_time:
00601                 return True
00602 
00603         return False
00604 
00605     def is_ignored(self, file_path):
00606         """Check if file path is ignored by any .mbedignore thus far"""
00607         return self._ignore_regex.match(normcase(file_path))
00608 
00609     def add_ignore_patterns(self, root, base_path, patterns):
00610         """Add a series of patterns to the ignored paths
00611 
00612         Positional arguments:
00613         root - the directory containing the ignore file
00614         base_path - the location that the scan started from
00615         patterns - the list of patterns we will ignore in the future
00616         """
00617         real_base = relpath(root, base_path)
00618         if real_base == ".":
00619             self.ignore_patterns.extend(normcase(p) for p in patterns)
00620         else:
00621             self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
00622         if self.ignore_patterns:
00623             self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
00624 
00625     # Create a Resources object from the path pointed to by *path* by either traversing a
00626     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00627     # when *path* is a file.
00628     # The parameter *base_path* is used to set the base_path attribute of the Resources
00629     # object and the parameter *exclude_paths* is used by the directory traversal to
00630     # exclude certain paths from the traversal.
00631     def scan_resources(self, path, exclude_paths=None, base_path=None,
00632                        collect_ignores=False):
00633         self.progress("scan", path)
00634 
00635         resources = Resources(path, collect_ignores=collect_ignores)
00636         if not base_path:
00637             if isfile(path):
00638                 base_path = dirname(path)
00639             else:
00640                 base_path = path
00641         resources.base_path = base_path
00642 
00643         if isfile(path):
00644             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00645         else:
00646             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00647         return resources
00648 
00649     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00650     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00651     # on every file it considers adding to the resources object.
00652     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00653         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00654         When topdown is True, the caller can modify the dirnames list in-place
00655         (perhaps using del or slice assignment), and walk() will only recurse into
00656         the subdirectories whose names remain in dirnames; this can be used to prune
00657         the search, impose a specific order of visiting, or even to inform walk()
00658         about directories the caller creates or renames before it resumes walk()
00659         again. Modifying dirnames when topdown is False is ineffective, because in
00660         bottom-up mode the directories in dirnames are generated before dirpath
00661         itself is generated.
00662         """
00663         labels = self.get_labels()
00664         for root, dirs, files in walk(path, followlinks=True):
00665             # Check if folder contains .mbedignore
00666             if ".mbedignore" in files:
00667                 with open (join(root,".mbedignore"), "r") as f:
00668                     lines=f.readlines()
00669                     lines = [l.strip() for l in lines] # Strip whitespaces
00670                     lines = [l for l in lines if l != ""] # Strip empty lines
00671                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00672                     # Append root path to glob patterns and append patterns to ignore_patterns
00673                     self.add_ignore_patterns(root, base_path, lines)
00674 
00675             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00676             root_path =join(relpath(root, base_path))
00677             if  (self.is_ignored(join(root_path,"")) or
00678                  self.build_dir == root_path):
00679                 resources.ignore_dir(root_path)
00680                 dirs[:] = []
00681                 continue
00682 
00683             for d in copy(dirs):
00684                 dir_path = join(root, d)
00685                 # Add internal repo folders/files. This is needed for exporters
00686                 if d == '.hg' or d == '.git':
00687                     resources.repo_dirs.append(dir_path)
00688 
00689                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00690                     # Ignore targets that do not match the TARGET in extra_labels list
00691                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00692                     # Ignore toolchain that do not match the current TOOLCHAIN
00693                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00694                     # Ignore .mbedignore files
00695                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00696                     # Ignore TESTS dir
00697                     (d == 'TESTS')):
00698                         resources.ignore_dir(dir_path)
00699                         dirs.remove(d)
00700                 elif d.startswith('FEATURE_'):
00701                     # Recursively scan features but ignore them in the current scan.
00702                     # These are dynamically added by the config system if the conditions are matched
00703                     def closure (dir_path=dir_path, base_path=base_path):
00704                         return self.scan_resources(dir_path, base_path=base_path,
00705                                                    collect_ignores=resources.collect_ignores)
00706                     resources.features.add_lazy(d[8:], closure)
00707                     resources.ignore_dir(dir_path)
00708                     dirs.remove(d)
00709                 elif exclude_paths:
00710                     for exclude_path in exclude_paths:
00711                         rel_path = relpath(dir_path, exclude_path)
00712                         if not (rel_path.startswith('..')):
00713                             resources.ignore_dir(dir_path)
00714                             dirs.remove(d)
00715                             break
00716 
00717             # Add root to include paths
00718             root = root.rstrip("/")
00719             resources.inc_dirs.append(root)
00720             resources.file_basepath[root] = base_path
00721 
00722             for file in files:
00723                 file_path = join(root, file)
00724                 self._add_file(file_path, resources, base_path)
00725 
00726     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00727     # (*file_path*) to the resources object based on the file type.
00728     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00729         resources.file_basepath[file_path] = base_path
00730 
00731         if self.is_ignored(relpath(file_path, base_path)):
00732             resources.ignore_dir(relpath(file_path, base_path))
00733             return
00734 
00735         _, ext = splitext(file_path)
00736         ext = ext.lower()
00737 
00738         if   ext == '.s':
00739             resources.s_sources.append(file_path)
00740 
00741         elif ext == '.c':
00742             resources.c_sources.append(file_path)
00743 
00744         elif ext == '.cpp':
00745             resources.cpp_sources.append(file_path)
00746 
00747         elif ext == '.h' or ext == '.hpp':
00748             resources.headers.append(file_path)
00749 
00750         elif ext == '.o':
00751             resources.objects.append(file_path)
00752 
00753         elif ext == self.LIBRARY_EXT:
00754             resources.libraries.append(file_path)
00755             resources.lib_dirs.add(dirname(file_path))
00756 
00757         elif ext == self.LINKER_EXT:
00758             if resources.linker_script is not None:
00759                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00760             resources.linker_script = file_path
00761 
00762         elif ext == '.lib':
00763             resources.lib_refs.append(file_path)
00764 
00765         elif ext == '.bld':
00766             resources.lib_builds.append(file_path)
00767 
00768         elif basename(file_path) == '.hgignore':
00769             resources.repo_files.append(file_path)
00770 
00771         elif basename(file_path) == '.gitignore':
00772             resources.repo_files.append(file_path)
00773 
00774         elif ext == '.hex':
00775             resources.hex_files.append(file_path)
00776 
00777         elif ext == '.bin':
00778             resources.bin_files.append(file_path)
00779 
00780         elif ext == '.json':
00781             resources.json_files.append(file_path)
00782 
00783 
00784     def scan_repository(self, path):
00785         resources = []
00786 
00787         for root, dirs, files in walk(path):
00788             # Remove ignored directories
00789             for d in copy(dirs):
00790                 if d == '.' or d == '..':
00791                     dirs.remove(d)
00792 
00793             for file in files:
00794                 file_path = join(root, file)
00795                 resources.append(file_path)
00796 
00797         return resources
00798 
00799     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00800         # Handle a single file
00801         if not isinstance(files_paths, list):
00802             files_paths = [files_paths]
00803 
00804         for source in files_paths:
00805             if source is None:
00806                 files_paths.remove(source)
00807 
00808         for source in files_paths:
00809             if resources is not None and source in resources.file_basepath:
00810                 relative_path = relpath(source, resources.file_basepath[source])
00811             elif rel_path is not None:
00812                 relative_path = relpath(source, rel_path)
00813             else:
00814                 _, relative_path = split(source)
00815 
00816             target = join(trg_path, relative_path)
00817 
00818             if (target != source) and (self.need_update(target, [source])):
00819                 self.progress("copy", relative_path)
00820                 mkdir(dirname(target))
00821                 copyfile(source, target)
00822 
00823     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00824     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00825     def relative_object_path(self, build_path, base_dir, source):
00826         source_dir, name, _ = split_path(source)
00827 
00828         obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
00829         if obj_dir is not self.prev_dir:
00830             self.prev_dir = obj_dir
00831             mkdir(obj_dir)
00832         return join(obj_dir, name + '.o')
00833 
00834     # Generate response file for all includes.
00835     # ARM, GCC, IAR cross compatible
00836     def get_inc_file(self, includes):
00837         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00838         if not exists(include_file):
00839             with open(include_file, "w") as f:
00840                 cmd_list = []
00841                 for c in includes:
00842                     if c:
00843                         c = c.replace("\\", "/")
00844                         if self.CHROOT:
00845                             c = c.replace(self.CHROOT, '')
00846                         cmd_list.append('"-I%s"' % c)
00847                 string = " ".join(cmd_list)
00848                 f.write(string)
00849         return include_file
00850 
00851     # Generate response file for all objects when linking.
00852     # ARM, GCC, IAR cross compatible
00853     def get_link_file(self, cmd):
00854         link_file = join(self.build_dir, ".link_files.txt")
00855         with open(link_file, "w") as f:
00856             cmd_list = []
00857             for c in cmd:
00858                 if c:
00859                     c = c.replace("\\", "/")
00860                     if self.CHROOT:
00861                         c = c.replace(self.CHROOT, '')
00862                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00863             string = " ".join(cmd_list)
00864             f.write(string)
00865         return link_file
00866 
00867     # Generate response file for all objects when archiving.
00868     # ARM, GCC, IAR cross compatible
00869     def get_arch_file(self, objects):
00870         archive_file = join(self.build_dir, ".archive_files.txt")
00871         with open(archive_file, "w") as f:
00872             o_list = []
00873             for o in objects:
00874                 o_list.append('"%s"' % o)
00875             string = " ".join(o_list).replace("\\", "/")
00876             f.write(string)
00877         return archive_file
00878 
00879     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00880     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00881     def compile_sources(self, resources, inc_dirs=None):
00882         # Web IDE progress bar for project build
00883         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00884         self.to_be_compiled = len(files_to_compile)
00885         self.compiled = 0
00886 
00887         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00888 
00889         inc_paths = resources.inc_dirs
00890         if inc_dirs is not None:
00891             if isinstance(inc_dirs, list):
00892                 inc_paths.extend(inc_dirs)
00893             else:
00894                 inc_paths.append(inc_dirs)
00895         # De-duplicate include paths
00896         inc_paths = set(inc_paths)
00897         # Sort include paths for consistency
00898         inc_paths = sorted(set(inc_paths))
00899         # Unique id of all include paths
00900         self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
00901 
00902         objects = []
00903         queue = []
00904         work_dir = getcwd()
00905         self.prev_dir = None
00906 
00907         # Generate configuration header (this will update self.build_all if needed)
00908         self.get_config_header()
00909         self.dump_build_profile()
00910 
00911         # Sort compile queue for consistency
00912         files_to_compile.sort()
00913         for source in files_to_compile:
00914             object = self.relative_object_path(
00915                 self.build_dir, resources.file_basepath[source], source)
00916 
00917             # Queue mode (multiprocessing)
00918             commands = self.compile_command(source, object, inc_paths)
00919             if commands is not None:
00920                 queue.append({
00921                     'source': source,
00922                     'object': object,
00923                     'commands': commands,
00924                     'work_dir': work_dir,
00925                     'chroot': self.CHROOT
00926                 })
00927             else:
00928                 self.compiled += 1
00929                 objects.append(object)
00930 
00931         # Use queues/multiprocessing if cpu count is higher than setting
00932         jobs = self.jobs if self.jobs else cpu_count()
00933         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00934             return self.compile_queue(queue, objects)
00935         else:
00936             return self.compile_seq(queue, objects)
00937 
00938     # Compile source files queue in sequential order
00939     def compile_seq(self, queue, objects):
00940         for item in queue:
00941             result = compile_worker(item)
00942 
00943             self.compiled += 1
00944             self.progress("compile", item['source'], build_update=True)
00945             for res in result['results']:
00946                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00947                 self.compile_output([
00948                     res['code'],
00949                     res['output'],
00950                     res['command']
00951                 ])
00952             objects.append(result['object'])
00953         return objects
00954 
00955     # Compile source files queue in parallel by creating pool of worker threads
00956     def compile_queue(self, queue, objects):
00957         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00958         p = Pool(processes=jobs_count)
00959 
00960         results = []
00961         for i in range(len(queue)):
00962             results.append(p.apply_async(compile_worker, [queue[i]]))
00963         p.close()
00964 
00965         itr = 0
00966         while len(results):
00967             itr += 1
00968             if itr > 180000:
00969                 p.terminate()
00970                 p.join()
00971                 raise ToolException("Compile did not finish in 5 minutes")
00972 
00973             sleep(0.01)
00974             pending = 0
00975             for r in results:
00976                 if r.ready():
00977                     try:
00978                         result = r.get()
00979                         results.remove(r)
00980 
00981                         self.compiled += 1
00982                         self.progress("compile", result['source'], build_update=True)
00983                         for res in result['results']:
00984                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00985                             self.compile_output([
00986                                 res['code'],
00987                                 res['output'],
00988                                 res['command']
00989                             ])
00990                         objects.append(result['object'])
00991                     except ToolException as err:
00992                         if p._taskqueue.queue:
00993                             p._taskqueue.queue.clear()
00994                             sleep(0.5)
00995                         p.terminate()
00996                         p.join()
00997                         raise ToolException(err)
00998                 else:
00999                     pending += 1
01000                     if pending >= jobs_count:
01001                         break
01002 
01003         results = None
01004         p.join()
01005 
01006         return objects
01007 
01008     # Determine the compile command based on type of source file
01009     def compile_command(self, source, object, includes):
01010         # Check dependencies
01011         _, ext = splitext(source)
01012         ext = ext.lower()
01013 
01014         if ext == '.c' or  ext == '.cpp':
01015             base, _ = splitext(object)
01016             dep_path = base + '.d'
01017             try:
01018                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
01019             except (IOError, IndexError):
01020                 deps = []
01021             config_file = ([self.config.app_config_location]
01022                            if self.config.app_config_location else [])
01023             deps.extend(config_file)
01024             if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01025                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
01026             else:
01027                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
01028             if len(deps) == 0 or self.need_update(object, deps):
01029                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01030                     return self.compile_cpp(source, object, includes)
01031                 else:
01032                     return self.compile_c(source, object, includes)
01033         elif ext == '.s':
01034             deps = [source]
01035             deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
01036             if self.need_update(object, deps):
01037                 return self.assemble(source, object, includes)
01038         else:
01039             return False
01040 
01041         return None
01042 
01043     def parse_dependencies(self, dep_path):
01044         """Parse the dependency information generated by the compiler.
01045 
01046         Positional arguments:
01047         dep_path -- the path to a file generated by a previous run of the compiler
01048 
01049         Return value:
01050         A list of all source files that the dependency file indicated were dependencies
01051 
01052         Side effects:
01053         None
01054 
01055         Note: A default implementation is provided for make-like file formats
01056         """
01057         dependencies = []
01058         buff = open(dep_path).readlines()
01059         if buff:
01060             buff[0] = re.sub('^(.*?)\: ', '', buff[0])
01061             for line in buff:
01062                 filename = line.replace('\\\n', '').strip()
01063                 if filename:
01064                     filename = filename.replace('\\ ', '\a')
01065                     dependencies.extend(((self.CHROOT if self.CHROOT else '') +
01066                                          f.replace('\a', ' '))
01067                                         for f in filename.split(" "))
01068         return list(filter(None, dependencies))
01069 
01070     def is_not_supported_error(self, output):
01071         return "#error directive: [NOT_SUPPORTED]" in output
01072 
01073     @abstractmethod
01074     def parse_output(self, output):
01075         """Take in compiler output and extract sinlge line warnings and errors from it.
01076 
01077         Positional arguments:
01078         output -- a string of all the messages emitted by a run of the compiler
01079 
01080         Return value:
01081         None
01082 
01083         Side effects:
01084         call self.cc_info or self.notify with a description of the event generated by the compiler
01085         """
01086         raise NotImplemented
01087 
01088     def compile_output(self, output=[]):
01089         _rc = output[0]
01090         _stderr = output[1].decode("utf-8")
01091         command = output[2]
01092 
01093         # Parse output for Warnings and Errors
01094         self.parse_output(_stderr)
01095         self.debug("Return: %s"% _rc)
01096         for error_line in _stderr.splitlines():
01097             self.debug("Output: %s"% error_line)
01098 
01099         # Check return code
01100         if _rc != 0:
01101             if self.is_not_supported_error(_stderr):
01102                 raise NotSupportedException(_stderr)
01103             else:
01104                 raise ToolException(_stderr)
01105 
01106     def build_library(self, objects, dir, name):
01107         needed_update = False
01108         lib = self.STD_LIB_NAME % name
01109         fout = join(dir, lib)
01110         if self.need_update(fout, objects):
01111             self.info("Library: %s" % lib)
01112             self.archive(objects, fout)
01113             needed_update = True
01114 
01115         return needed_update
01116 
01117     def link_program(self, r, tmp_path, name):
01118         needed_update = False
01119         ext = 'bin'
01120         if hasattr(self.target, 'OUTPUT_EXT'):
01121             ext = self.target.OUTPUT_EXT
01122 
01123         if hasattr(self.target, 'OUTPUT_NAMING'):
01124             self.var("binary_naming", self.target.OUTPUT_NAMING)
01125             if self.target.OUTPUT_NAMING == "8.3":
01126                 name = name[0:8]
01127                 ext = ext[0:3]
01128 
01129         # Create destination directory
01130         head, tail =  split(name)
01131         new_path = join(tmp_path, head)
01132         mkdir(new_path)
01133 
01134         filename = name+'.'+ext
01135         # Absolute path of the final linked file
01136         full_path = join(tmp_path, filename)
01137         elf = join(tmp_path, name + '.elf')
01138         bin = None if ext == 'elf' else full_path
01139         map = join(tmp_path, name + '.map')
01140 
01141         r.objects = sorted(set(r.objects))
01142         config_file = ([self.config.app_config_location]
01143                        if self.config.app_config_location else [])
01144         dependencies = r.objects + r.libraries + [r.linker_script] + config_file
01145         dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
01146         if self.need_update(elf, dependencies):
01147             needed_update = True
01148             self.progress("link", name)
01149             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01150 
01151         if bin and self.need_update(bin, [elf]):
01152             needed_update = True
01153             self.progress("elf2bin", name)
01154             self.binary(r, elf, bin)
01155 
01156         # Initialize memap and process map file. This doesn't generate output.
01157         self.mem_stats(map)
01158 
01159         self.var("compile_succeded", True)
01160         self.var("binary", filename)
01161 
01162         return full_path, needed_update
01163 
01164     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01165     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01166     def default_cmd(self, command):
01167         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01168         self.debug("Return: %s"% _rc)
01169 
01170         for output_line in _stdout.splitlines():
01171             self.debug("Output: %s"% output_line)
01172         for error_line in _stderr.splitlines():
01173             self.debug("Errors: %s"% error_line)
01174 
01175         if _rc != 0:
01176             for line in _stderr.splitlines():
01177                 self.tool_error(line)
01178             raise ToolException(_stderr)
01179 
01180     ### NOTIFICATIONS ###
01181     def info(self, message):
01182         self.notify({'type': 'info', 'message': message})
01183 
01184     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01185     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01186     def debug(self, message):
01187         if self.VERBOSE:
01188             if isinstance(message, list):
01189                 message = ' '.join(message)
01190             message = "[DEBUG] " + message
01191             self.notify({'type': 'debug', 'message': message})
01192 
01193     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01194     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01195     def cc_info(self, info=None):
01196         if info is not None:
01197             info['type'] = 'cc'
01198             self.notify(info)
01199 
01200     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01201     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01202     def cc_verbose(self, message, file=""):
01203         self.debug(message)
01204 
01205     def progress(self, action, file, build_update=False):
01206         msg = {'type': 'progress', 'action': action, 'file': file}
01207         if build_update:
01208             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01209         self.notify(msg)
01210 
01211     def tool_error(self, message):
01212         self.notify({'type': 'tool_error', 'message': message})
01213 
01214     def var(self, key, value):
01215         self.notify({'type': 'var', 'key': key, 'val': value})
01216 
01217     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01218     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01219     def mem_stats(self, map):
01220         """! Creates parser object
01221         @param map Path to linker map file to parse and decode
01222         @return None
01223         """
01224         toolchain = self.__class__.__name__
01225 
01226         # Create memap object
01227         memap = MemapParser()
01228 
01229         # Parse and decode a map file
01230         if memap.parse(abspath(map), toolchain) is False:
01231             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01232             return None
01233 
01234         # Store the memap instance for later use
01235         self.memap_instance = memap
01236 
01237         # Note: memory statistics are not returned.
01238         # Need call to generate_output later (depends on depth & output format)
01239 
01240         return None
01241 
01242     def add_regions(self):
01243         """Add regions to the build profile, if there are any.
01244         """
01245         print("Using regions in this build:")
01246         for region in self.config.regions:
01247             for define in [(region.name.upper() + "_ADDR", region.start),
01248                            (region.name.upper() + "_SIZE", region.size)]:
01249                 define_string = "-D%s=0x%x" %  define
01250                 self.cc.append(define_string)
01251                 self.cppc.append(define_string)
01252                 self.flags["common"].append(define_string)
01253             if region.active:
01254                 for define in [("MBED_APP_START", region.start),
01255                                ("MBED_APP_SIZE", region.size)]:
01256                     define_string = self.make_ld_define(*define)
01257                     self.ld.append(define_string)
01258                     self.flags["ld"].append(define_string)
01259             print("  Region %s size 0x%x, offset 0x%x"
01260                     % (region.name, region.size, region.start))
01261 
01262     # Set the configuration data
01263     def set_config_data(self, config_data):
01264         self.config_data = config_data
01265         if self.config.has_regions:
01266             self.add_regions()
01267 
01268     # Creates the configuration header if needed:
01269     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01270     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01271     # - if there is configuration data similar to the previous configuration data,
01272     #   "mbed_config.h" is left untouched.
01273     # - if there is new configuration data, "mbed_config.h" is overriden.
01274     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01275     # The "config_processed" variable (below) ensures this behaviour.
01276     # The function returns the location of the configuration file, or None if there is no
01277     # configuration data available (and thus no configuration file)
01278     def get_config_header(self):
01279         if self.config_processed: # this function was already called, return its result
01280             return self.config_file
01281         # The config file is located in the build directory
01282         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01283         # If the file exists, read its current content in prev_data
01284         if exists(self.config_file):
01285             with open(self.config_file, "r") as f:
01286                 prev_data = f.read()
01287         else:
01288             prev_data = None
01289         # Get the current configuration data
01290         crt_data = self.config.config_to_header(self.config_data) if self.config_data else None
01291         # "changed" indicates if a configuration change was detected
01292         changed = False
01293         if prev_data is not None: # a previous mbed_config.h exists
01294             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01295                 remove(self.config_file)
01296                 self.config_file = None # this means "config file not present"
01297                 changed = True
01298             elif crt_data != prev_data: # different content of config file
01299                 with open(self.config_file, "w") as f:
01300                     f.write(crt_data)
01301                 changed = True
01302         else: # a previous mbed_config.h does not exist
01303             if crt_data is not None: # there's configuration data available
01304                 with open(self.config_file, "w") as f:
01305                     f.write(crt_data)
01306                 changed = True
01307             else:
01308                 self.config_file = None # this means "config file not present"
01309         # If there was a change in configuration, rebuild everything
01310         self.build_all = changed
01311         # Make sure that this function will only return the location of the configuration
01312         # file for subsequent calls, without trying to manipulate its content in any way.
01313         self.config_processed = True
01314         return self.config_file
01315 
01316     def dump_build_profile(self):
01317         """Dump the current build profile and macros into the `.profile` file
01318         in the build directory"""
01319         for key in ["cxx", "c", "asm", "ld"]:
01320             to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
01321             if key in ["cxx", "c"]:
01322                 to_dump += str(self.flags['common'])
01323             where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
01324             self._overwrite_when_not_equal(where, to_dump)
01325 
01326     @staticmethod
01327     def _overwrite_when_not_equal(filename, content):
01328         if not exists(filename) or content != open(filename).read():
01329             with open(filename, "w") as out:
01330                 out.write(content)
01331 
01332     @staticmethod
01333     def generic_check_executable(tool_key, executable_name, levels_up,
01334                                  nested_dir=None):
01335         """
01336         Positional args:
01337         tool_key: the key to index TOOLCHAIN_PATHS
01338         executable_name: the toolchain's named executable (ex. armcc)
01339         levels_up: each toolchain joins the toolchain_path, some
01340         variable directories (bin, include), and the executable name,
01341         so the TOOLCHAIN_PATH value must be appropriately distanced
01342 
01343         Keyword args:
01344         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01345           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01346           that will be used by toolchain's compile)
01347 
01348         Returns True if the executable location specified by the user
01349         exists and is valid OR the executable can be found on the PATH.
01350         Returns False otherwise.
01351         """
01352         # Search PATH if user did not specify a path or specified path doesn't
01353         # exist.
01354         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01355             exe = find_executable(executable_name)
01356             if not exe:
01357                 return False
01358             for level in range(levels_up):
01359                 # move up the specified number of directories
01360                 exe = dirname(exe)
01361             TOOLCHAIN_PATHS[tool_key] = exe
01362         if nested_dir:
01363             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01364                           executable_name)
01365         else:
01366             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01367         # User could have specified a path that exists but does not contain exe
01368         return exists(subdir) or exists(subdir +'.exe')
01369 
01370     @abstractmethod
01371     def check_executable(self):
01372         """Returns True if the executable (armcc) location specified by the
01373          user exists OR the executable can be found on the PATH.
01374          Returns False otherwise."""
01375         raise NotImplemented
01376 
01377     @abstractmethod
01378     def get_config_option(self, config_header):
01379         """Generate the compiler option that forces the inclusion of the configuration
01380         header file.
01381 
01382         Positional arguments:
01383         config_header -- The configuration header that will be included within all source files
01384 
01385         Return value:
01386         A list of the command line arguments that will force the inclusion the specified header
01387 
01388         Side effects:
01389         None
01390         """
01391         raise NotImplemented
01392 
01393     @abstractmethod
01394     def get_compile_options(self, defines, includes, for_asm=False):
01395         """Generate the compiler options from the defines and includes
01396 
01397         Positional arguments:
01398         defines -- The preprocessor macros defined on the command line
01399         includes -- The include file search paths
01400 
01401         Keyword arguments:
01402         for_asm -- generate the assembler options instead of the compiler options
01403 
01404         Return value:
01405         A list of the command line arguments that will force the inclusion the specified header
01406 
01407         Side effects:
01408         None
01409         """
01410         raise NotImplemented
01411 
01412     @abstractmethod
01413     def assemble(self, source, object, includes):
01414         """Generate the command line that assembles.
01415 
01416         Positional arguments:
01417         source -- a file path that is the file to assemble
01418         object -- a file path that is the destination object
01419         includes -- a list of all directories where header files may be found
01420 
01421         Return value:
01422         The complete command line, as a list, that would invoke the assembler
01423         on the source file, include all the include paths, and generate
01424         the specified object file.
01425 
01426         Side effects:
01427         None
01428 
01429         Note:
01430         This method should be decorated with @hook_tool.
01431         """
01432         raise NotImplemented
01433 
01434     @abstractmethod
01435     def compile_c(self, source, object, includes):
01436         """Generate the command line that compiles a C source file.
01437 
01438         Positional arguments:
01439         source -- the C source file to compile
01440         object -- the destination object file
01441         includes -- a list of all the directories where header files may be found
01442 
01443         Return value:
01444         The complete command line, as a list, that would invoke the C compiler
01445         on the source file, include all the include paths, and generate the
01446         specified object file.
01447 
01448         Side effects:
01449         None
01450 
01451         Note:
01452         This method should be decorated with @hook_tool.
01453         """
01454         raise NotImplemented
01455 
01456     @abstractmethod
01457     def compile_cpp(self, source, object, includes):
01458         """Generate the command line that compiles a C++ source file.
01459 
01460         Positional arguments:
01461         source -- the C++ source file to compile
01462         object -- the destination object file
01463         includes -- a list of all the directories where header files may be found
01464 
01465         Return value:
01466         The complete command line, as a list, that would invoke the C++ compiler
01467         on the source file, include all the include paths, and generate the
01468         specified object file.
01469 
01470         Side effects:
01471         None
01472 
01473         Note:
01474         This method should be decorated with @hook_tool.
01475         """
01476         raise NotImplemented
01477 
01478     @abstractmethod
01479     def link(self, output, objects, libraries, lib_dirs, mem_map):
01480         """Run the linker to create an executable and memory map.
01481 
01482         Positional arguments:
01483         output -- the file name to place the executable in
01484         objects -- all of the object files to link
01485         libraries -- all of the required libraries
01486         lib_dirs -- where the required libraries are located
01487         mem_map -- the location where the memory map file should be stored
01488 
01489         Return value:
01490         None
01491 
01492         Side effect:
01493         Runs the linker to produce the executable.
01494 
01495         Note:
01496         This method should be decorated with @hook_tool.
01497         """
01498         raise NotImplemented
01499 
01500     @abstractmethod
01501     def archive(self, objects, lib_path):
01502         """Run the command line that creates an archive.
01503 
01504         Positional arguhments:
01505         objects -- a list of all the object files that should be archived
01506         lib_path -- the file name of the resulting library file
01507 
01508         Return value:
01509         None
01510 
01511         Side effect:
01512         Runs the archiving tool to produce the library file.
01513 
01514         Note:
01515         This method should be decorated with @hook_tool.
01516         """
01517         raise NotImplemented
01518 
01519     @abstractmethod
01520     def binary(self, resources, elf, bin):
01521         """Run the command line that will Extract a simplified binary file.
01522 
01523         Positional arguments:
01524         resources -- A resources object (Is not used in any of the toolchains)
01525         elf -- the executable file that is to be converted
01526         bin -- the file name of the to be created simplified binary file
01527 
01528         Return value:
01529         None
01530 
01531         Side effect:
01532         Runs the elf2bin tool to produce the simplified binary file.
01533 
01534         Note:
01535         This method should be decorated with @hook_tool.
01536         """
01537         raise NotImplemented
01538 
01539     @staticmethod
01540     @abstractmethod
01541     def name_mangle(name):
01542         """Mangle a name based on the conventional name mangling of this toolchain
01543 
01544         Positional arguments:
01545         name -- the name to mangle
01546 
01547         Return:
01548         the mangled name as a string
01549         """
01550         raise NotImplemented
01551 
01552     @staticmethod
01553     @abstractmethod
01554     def make_ld_define(name, value):
01555         """Create an argument to the linker that would define a symbol
01556 
01557         Positional arguments:
01558         name -- the symbol to define
01559         value -- the value to give the symbol
01560 
01561         Return:
01562         The linker flag as a string
01563         """
01564         raise NotImplemented
01565 
01566     @staticmethod
01567     @abstractmethod
01568     def redirect_symbol(source, sync, build_dir):
01569         """Redirect a symbol at link time to point at somewhere else
01570 
01571         Positional arguments:
01572         source -- the symbol doing the pointing
01573         sync -- the symbol being pointed to
01574         build_dir -- the directory to put "response files" if needed by the toolchain
01575 
01576         Side Effects:
01577         Possibly create a file in the build directory
01578 
01579         Return:
01580         The linker flag to redirect the symbol, as a string
01581         """
01582         raise NotImplemented
01583 
01584     # Return the list of macros geenrated by the build system
01585     def get_config_macros(self):
01586         return self.config.config_to_macros(self.config_data) if self.config_data else []
01587 
01588     @property
01589     def report(self):
01590         to_ret = {}
01591         to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
01592                                 'symbols': self.get_symbols()}
01593         to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
01594                                   'symbols': self.get_symbols()}
01595         to_ret['assembler'] = {'flags': copy(self.flags['asm']),
01596                                'symbols': self.get_symbols(True)}
01597         to_ret['linker'] = {'flags': copy(self.flags['ld'])}
01598         to_ret.update(self.config.report)
01599         return to_ret
01600 
01601 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH
01602 
01603 TOOLCHAIN_PATHS = {
01604     'ARM': ARM_PATH,
01605     'uARM': ARM_PATH,
01606     'ARMC6': ARMC6_PATH,
01607     'GCC_ARM': GCC_ARM_PATH,
01608     'IAR': IAR_PATH
01609 }
01610 
01611 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
01612 from tools.toolchains.gcc import GCC_ARM
01613 from tools.toolchains.iar import IAR
01614 
01615 TOOLCHAIN_CLASSES = {
01616     u'ARM': ARM_STD,
01617     u'uARM': ARM_MICRO,
01618     u'ARMC6': ARMC6,
01619     u'GCC_ARM': GCC_ARM,
01620     u'IAR': IAR
01621 }
01622 
01623 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())