Nathan Yonkee / Mbed 2 deprecated Nucleo_sinewave_output_copy

Dependencies:   mbed

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import sys
00021 from os import stat, walk, getcwd, sep, remove
00022 from copy import copy
00023 from time import time, sleep
00024 from shutil import copyfile
00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split,
00026                      abspath, isfile, isdir, normcase)
00027 from itertools import chain
00028 from inspect import getmro
00029 from copy import deepcopy
00030 from abc import ABCMeta, abstractmethod
00031 from distutils.spawn import find_executable
00032 from multiprocessing import Pool, cpu_count
00033 from hashlib import md5
00034 import fnmatch
00035 
00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException,
00037                     NotSupportedException, split_path, compile_worker)
00038 from ..settings import MBED_ORG_USER
00039 from .. import hooks
00040 from ..memap import MemapParser
00041 
00042 
00043 #Disables multiprocessing if set to higher number than the host machine CPUs
00044 CPU_COUNT_MIN = 1
00045 CPU_COEF = 1
00046 
00047 class LazyDict(dict):
00048     def __init__(self):
00049         self.eager = {}
00050         self.lazy = {}
00051 
00052     def add_lazy(self, key, thunk):
00053         if key in self.eager:
00054             del self.eager[key]
00055         self.lazy[key] = thunk
00056 
00057     def __getitem__(self, key):
00058         if  (key not in self.eager
00059              and key in self.lazy):
00060             self.eager[key] = self.lazy[key]()
00061             del self.lazy[key]
00062         return self.eager[key]
00063 
00064     def __setitem__(self, key, value):
00065         self.eager[key] = value
00066 
00067     def __delitem__(self, key):
00068         if key in self.eager:
00069             del self.eager[key]
00070         else:
00071             del self.lazy[key]
00072 
00073     def __contains__(self, key):
00074         return key in self.eager or key in self.lazy
00075 
00076     def __iter__(self):
00077         return chain(iter(self.eager), iter(self.lazy))
00078 
00079     def __len__(self):
00080         return len(self.eager) + len(self.lazy)
00081 
00082     def __str__(self):
00083         return "Lazy{%s}" % (
00084             ", ".join("%r: %r" % (k, v) for k, v in
00085                       chain(self.eager.items(), ((k, "not evaluated")
00086                                                      for k in self.lazy))))
00087 
00088     def update(self, other):
00089         if isinstance(other, LazyDict):
00090             self.eager.update(other.eager)
00091             self.lazy.update(other.lazy)
00092         else:
00093             self.eager.update(other)
00094 
00095     def items(self):
00096         """Warning: This forces the evaluation all of the items in this LazyDict
00097         that are iterated over."""
00098         for k, v in self.eager.items():
00099             yield k, v
00100         for k in self.lazy.keys():
00101             yield k, self[k]
00102 
00103     def apply(self, fn):
00104         """Delay the application of a computation to all items of the lazy dict.
00105         Does no computation now. Instead the comuptation is performed when a
00106         consumer attempts to access a value in this LazyDict"""
00107         new_lazy = {}
00108         for k, f in self.lazy.items():
00109             def closure(f=f):
00110                 return fn(f())
00111             new_lazy[k] = closure
00112         for k, v in self.eager.items():
00113             def closure(v=v):
00114                 return fn(v)
00115             new_lazy[k] = closure
00116         self.lazy = new_lazy
00117         self.eager = {}
00118 
00119 class Resources:
00120     def __init__(self, base_path=None, collect_ignores=False):
00121         self.base_path = base_path
00122         self.collect_ignores = collect_ignores
00123 
00124         self.file_basepath = {}
00125 
00126         self.inc_dirs = []
00127         self.headers = []
00128 
00129         self.s_sources = []
00130         self.c_sources = []
00131         self.cpp_sources = []
00132 
00133         self.lib_dirs = set([])
00134         self.objects = []
00135         self.libraries = []
00136 
00137         # mbed special files
00138         self.lib_builds = []
00139         self.lib_refs = []
00140 
00141         self.repo_dirs = []
00142         self.repo_files = []
00143 
00144         self.linker_script = None
00145 
00146         # Other files
00147         self.hex_files = []
00148         self.bin_files = []
00149         self.json_files = []
00150 
00151         # Features
00152         self.features = LazyDict()
00153         self.ignored_dirs = []
00154 
00155     def __add__(self, resources):
00156         if resources is None:
00157             return self
00158         else:
00159             return self.add(resources)
00160 
00161     def __radd__(self, resources):
00162         if resources is None:
00163             return self
00164         else:
00165             return self.add(resources)
00166 
00167     def ignore_dir(self, directory):
00168         if self.collect_ignores:
00169             self.ignored_dirs.append(directory)
00170 
00171     def add(self, resources):
00172         for f,p in resources.file_basepath.items():
00173             self.file_basepath[f] = p
00174 
00175         self.inc_dirs += resources.inc_dirs
00176         self.headers += resources.headers
00177 
00178         self.s_sources += resources.s_sources
00179         self.c_sources += resources.c_sources
00180         self.cpp_sources += resources.cpp_sources
00181 
00182         self.lib_dirs |= resources.lib_dirs
00183         self.objects += resources.objects
00184         self.libraries += resources.libraries
00185 
00186         self.lib_builds += resources.lib_builds
00187         self.lib_refs += resources.lib_refs
00188 
00189         self.repo_dirs += resources.repo_dirs
00190         self.repo_files += resources.repo_files
00191 
00192         if resources.linker_script is not None:
00193             self.linker_script = resources.linker_script
00194 
00195         self.hex_files += resources.hex_files
00196         self.bin_files += resources.bin_files
00197         self.json_files += resources.json_files
00198 
00199         self.features.update(resources.features)
00200         self.ignored_dirs += resources.ignored_dirs
00201 
00202         return self
00203 
00204     def _collect_duplicates(self, dupe_dict, dupe_headers):
00205         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00206             objname, _ = splitext(basename(filename))
00207             dupe_dict.setdefault(objname, set())
00208             dupe_dict[objname] |= set([filename])
00209         for filename in self.headers:
00210             headername = basename(filename)
00211             dupe_headers.setdefault(headername, set())
00212             dupe_headers[headername] |= set([headername])
00213         for res in self.features.values():
00214             res._collect_duplicates(dupe_dict, dupe_headers)
00215         return dupe_dict, dupe_headers
00216 
00217     def detect_duplicates(self, toolchain):
00218         """Detect all potential ambiguities in filenames and report them with
00219         a toolchain notification
00220 
00221         Positional Arguments:
00222         toolchain - used for notifications
00223         """
00224         count = 0
00225         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00226         for objname, filenames in dupe_dict.items():
00227             if len(filenames) > 1:
00228                 count+=1
00229                 toolchain.tool_error(
00230                     "Object file %s.o is not unique! It could be made from: %s"\
00231                     % (objname, " ".join(filenames)))
00232         for headername, locations in dupe_headers.items():
00233             if len(locations) > 1:
00234                 count+=1
00235                 toolchain.tool_error(
00236                     "Header file %s is not unique! It could be: %s" %\
00237                     (headername, " ".join(locations)))
00238         return count
00239 
00240 
00241     def relative_to(self, base, dot=False):
00242         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00243                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00244                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00245                       'hex_files', 'bin_files', 'json_files']:
00246             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00247             setattr(self, field, v)
00248 
00249         def to_apply(feature, base=base, dot=dot):
00250             feature.relative_to(base, dot)
00251         self.features.apply(to_apply)
00252 
00253         if self.linker_script is not None:
00254             self.linker_script = rel_path(self.linker_script, base, dot)
00255 
00256     def win_to_unix(self):
00257         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00258                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00259                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00260                       'hex_files', 'bin_files', 'json_files']:
00261             v = [f.replace('\\', '/') for f in getattr(self, field)]
00262             setattr(self, field, v)
00263 
00264         def to_apply(feature):
00265             feature.win_to_unix()
00266         self.features.apply(to_apply)
00267 
00268         if self.linker_script is not None:
00269             self.linker_script = self.linker_script.replace('\\', '/')
00270 
00271     def __str__(self):
00272         s = []
00273 
00274         for (label, resources) in (
00275                 ('Include Directories', self.inc_dirs),
00276                 ('Headers', self.headers),
00277 
00278                 ('Assembly sources', self.s_sources),
00279                 ('C sources', self.c_sources),
00280                 ('C++ sources', self.cpp_sources),
00281 
00282                 ('Library directories', self.lib_dirs),
00283                 ('Objects', self.objects),
00284                 ('Libraries', self.libraries),
00285 
00286                 ('Hex files', self.hex_files),
00287                 ('Bin files', self.bin_files),
00288 
00289                 ('Features', self.features),
00290             ):
00291             if resources:
00292                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00293 
00294         if self.linker_script:
00295             s.append('Linker Script: ' + self.linker_script)
00296 
00297         return '\n'.join(s)
00298 
00299 # Support legacy build conventions: the original mbed build system did not have
00300 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00301 # had the knowledge of a list of these directories to be ignored.
00302 LEGACY_IGNORE_DIRS = set([
00303     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00304     'ARM', 'uARM', 'IAR',
00305     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00306     'ARMC6'
00307 ])
00308 LEGACY_TOOLCHAIN_NAMES = {
00309     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00310     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00311     'IAR': 'IAR',
00312     'ARMC6': 'ARMC6',
00313 }
00314 
00315 
00316 class mbedToolchain:
00317     # Verbose logging
00318     VERBOSE = True
00319 
00320     # Compile C files as CPP
00321     COMPILE_C_AS_CPP = False
00322 
00323     # Response files for compiling, includes, linking and archiving.
00324     # Not needed on posix systems where the typical arg limit is 2 megabytes
00325     RESPONSE_FILES = True
00326 
00327     CORTEX_SYMBOLS = {
00328         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00329         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00330         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00331         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00332         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00333         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00334         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00335         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00336         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00337         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00338         "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00339         "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00340         "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00341         "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00342     }
00343 
00344     MBED_CONFIG_FILE_NAME="mbed_config.h"
00345 
00346     PROFILE_FILE_NAME = ".profile"
00347 
00348     __metaclass__ = ABCMeta
00349 
00350     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00351 
00352     def __init__(self, target, notify=None, macros=None, silent=False,
00353                  extra_verbose=False, build_profile=None, build_dir=None):
00354         self.target = target
00355         self.name = self.__class__.__name__
00356 
00357         # compile/assemble/link/binary hooks
00358         self.hook = hooks.Hook(target, self)
00359 
00360         # Toolchain flags
00361         self.flags = deepcopy(build_profile or self.profile_template)
00362 
00363         # System libraries provided by the toolchain
00364         self.sys_libs = []
00365 
00366         # User-defined macros
00367         self.macros = macros or []
00368 
00369         # Macros generated from toolchain and target rules/features
00370         self.asm_symbols = None
00371         self.cxx_symbols = None
00372 
00373         # Labels generated from toolchain and target rules/features (used for selective build)
00374         self.labels = None
00375 
00376         # This will hold the initialized config object
00377         self.config = None
00378 
00379         # This will hold the configuration data (as returned by Config.get_config_data())
00380         self.config_data = None
00381 
00382         # This will hold the location of the configuration file or None if there's no configuration available
00383         self.config_file = None
00384 
00385         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00386         self.config_processed = False
00387 
00388         # Non-incremental compile
00389         self.build_all = False
00390 
00391         # Build output dir
00392         self.build_dir = build_dir
00393         self.timestamp = time()
00394 
00395         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00396         self.jobs = 0
00397 
00398         # Ignore patterns from .mbedignore files
00399         self.ignore_patterns = []
00400         self._ignore_regex = re.compile("$^")
00401 
00402         # Pre-mbed 2.0 ignore dirs
00403         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00404 
00405         # Output notify function
00406         # This function is passed all events, and expected to handle notification of the
00407         # user, emit the events to a log, etc.
00408         # The API for all notify methods passed into the notify parameter is as follows:
00409         # def notify(Event, Silent)
00410         # Where *Event* is a dict representing the toolchain event that was generated
00411         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00412         #                  or an application was linked
00413         #       *Silent* is a boolean
00414         if notify:
00415             self.notify_fun = notify
00416         elif extra_verbose:
00417             self.notify_fun = self.print_notify_verbose
00418         else:
00419             self.notify_fun = self.print_notify
00420 
00421         # Silent builds (no output)
00422         self.silent = silent
00423 
00424         # Print output buffer
00425         self.output = str()
00426 
00427         # uVisor spepcific rules
00428         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00429             self.target.core = re.sub(r"F$", '', self.target.core)
00430 
00431         # Stats cache is used to reduce the amount of IO requests to stat
00432         # header files during dependency change. See need_update()
00433         self.stat_cache = {}
00434 
00435         # Used by the mbed Online Build System to build in chrooted environment
00436         self.CHROOT = None
00437 
00438         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00439         self.init()
00440 
00441     # Used for post __init__() hooks
00442     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00443     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00444     def init(self):
00445         return True
00446 
00447     def get_output(self):
00448         return self.output
00449 
00450     def print_notify(self, event, silent=False):
00451         """ Default command line notification
00452         """
00453         msg = None
00454 
00455         if not self.VERBOSE and event['type'] == 'tool_error':
00456             msg = event['message']
00457 
00458         elif event['type'] in ['info', 'debug']:
00459             msg = event['message']
00460 
00461         elif event['type'] == 'cc':
00462             event['severity'] = event['severity'].title()
00463             event['file'] = basename(event['file'])
00464             msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00465 
00466         elif event['type'] == 'progress':
00467             if 'percent' in event:
00468                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00469                                                   event['percent'],
00470                                                   basename(event['file']))
00471             else:
00472                 msg = '{}: {}'.format(event['action'].title(),
00473                                       basename(event['file']))
00474 
00475         if msg:
00476             if not silent:
00477                 print(msg)
00478             self.output += msg + "\n"
00479 
00480     def print_notify_verbose(self, event, silent=False):
00481         """ Default command line notification with more verbose mode
00482         """
00483         if event['type'] in ['info', 'debug']:
00484             self.print_notify(event, silent=silent) # standard handle
00485 
00486         elif event['type'] == 'cc':
00487             event['severity'] = event['severity'].title()
00488             event['file'] = basename(event['file'])
00489             event['mcu_name'] = "None"
00490             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00491             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00492             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00493             if not silent:
00494                 print(msg)
00495             self.output += msg + "\n"
00496 
00497         elif event['type'] == 'progress':
00498             self.print_notify(event) # standard handle
00499 
00500     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00501     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00502     def notify(self, event):
00503         """ Little closure for notify functions
00504         """
00505         event['toolchain'] = self
00506         return self.notify_fun(event, self.silent)
00507 
00508     def get_symbols(self, for_asm=False):
00509         if for_asm:
00510             if self.asm_symbols is None:
00511                 self.asm_symbols = []
00512 
00513                 # Cortex CPU symbols
00514                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00515                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00516 
00517                 # Add target's symbols
00518                 self.asm_symbols += self.target.macros
00519                 # Add extra symbols passed via 'macros' parameter
00520                 self.asm_symbols += self.macros
00521             return list(set(self.asm_symbols))  # Return only unique symbols
00522         else:
00523             if self.cxx_symbols is None:
00524                 # Target and Toolchain symbols
00525                 labels = self.get_labels()
00526                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00527                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00528 
00529                 # Cortex CPU symbols
00530                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00531                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00532 
00533                 # Symbols defined by the on-line build.system
00534                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00535                 if MBED_ORG_USER:
00536                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00537 
00538                 # Add target's symbols
00539                 self.cxx_symbols += self.target.macros
00540                 # Add target's hardware
00541                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00542                 # Add target's features
00543                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00544                 # Add extra symbols passed via 'macros' parameter
00545                 self.cxx_symbols += self.macros
00546 
00547                 # Form factor variables
00548                 if hasattr(self.target, 'supported_form_factors'):
00549                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00550 
00551             return list(set(self.cxx_symbols))  # Return only unique symbols
00552 
00553     # Extend the internal list of macros
00554     def add_macros(self, new_macros):
00555         self.macros.extend(new_macros)
00556 
00557     def get_labels(self):
00558         if self.labels is None:
00559             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00560             toolchain_labels.remove('mbedToolchain')
00561             self.labels = {
00562                 'TARGET': self.target.labels,
00563                 'FEATURE': self.target.features,
00564                 'TOOLCHAIN': toolchain_labels
00565             }
00566 
00567             # This is a policy decision and it should /really/ be in the config system
00568             # ATM it's here for backward compatibility
00569             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00570                  "-O0" in self.flags['common']) or
00571                 ("-r" in self.flags['common'] and
00572                  "-On" in self.flags['common'])):
00573                 self.labels['TARGET'].append("DEBUG")
00574             else:
00575                 self.labels['TARGET'].append("RELEASE")
00576         return self.labels
00577 
00578 
00579     # Determine whether a source file needs updating/compiling
00580     def need_update(self, target, dependencies):
00581         if self.build_all:
00582             return True
00583 
00584         if not exists(target):
00585             return True
00586 
00587         target_mod_time = stat(target).st_mtime
00588 
00589         for d in dependencies:
00590             # Some objects are not provided with full path and here we do not have
00591             # information about the library paths. Safe option: assume an update
00592             if not d or not exists(d):
00593                 return True
00594 
00595             if d not in self.stat_cache:
00596                 self.stat_cache[d] = stat(d).st_mtime
00597 
00598             if self.stat_cache[d] >= target_mod_time:
00599                 return True
00600 
00601         return False
00602 
00603     def is_ignored(self, file_path):
00604         """Check if file path is ignored by any .mbedignore thus far"""
00605         return self._ignore_regex.match(normcase(file_path))
00606 
00607     def add_ignore_patterns(self, root, base_path, patterns):
00608         """Add a series of patterns to the ignored paths
00609 
00610         Positional arguments:
00611         root - the directory containing the ignore file
00612         base_path - the location that the scan started from
00613         patterns - the list of patterns we will ignore in the future
00614         """
00615         real_base = relpath(root, base_path)
00616         if real_base == ".":
00617             self.ignore_patterns.extend(normcase(p) for p in patterns)
00618         else:
00619             self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
00620         if self.ignore_patterns:
00621             self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
00622 
00623     # Create a Resources object from the path pointed to by *path* by either traversing a
00624     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00625     # when *path* is a file.
00626     # The parameter *base_path* is used to set the base_path attribute of the Resources
00627     # object and the parameter *exclude_paths* is used by the directory traversal to
00628     # exclude certain paths from the traversal.
00629     def scan_resources(self, path, exclude_paths=None, base_path=None,
00630                        collect_ignores=False):
00631         self.progress("scan", path)
00632 
00633         resources = Resources(path, collect_ignores=collect_ignores)
00634         if not base_path:
00635             if isfile(path):
00636                 base_path = dirname(path)
00637             else:
00638                 base_path = path
00639         resources.base_path = base_path
00640 
00641         if isfile(path):
00642             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00643         else:
00644             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00645         return resources
00646 
00647     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00648     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00649     # on every file it considers adding to the resources object.
00650     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00651         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00652         When topdown is True, the caller can modify the dirnames list in-place
00653         (perhaps using del or slice assignment), and walk() will only recurse into
00654         the subdirectories whose names remain in dirnames; this can be used to prune
00655         the search, impose a specific order of visiting, or even to inform walk()
00656         about directories the caller creates or renames before it resumes walk()
00657         again. Modifying dirnames when topdown is False is ineffective, because in
00658         bottom-up mode the directories in dirnames are generated before dirpath
00659         itself is generated.
00660         """
00661         labels = self.get_labels()
00662         for root, dirs, files in walk(path, followlinks=True):
00663             # Check if folder contains .mbedignore
00664             if ".mbedignore" in files:
00665                 with open (join(root,".mbedignore"), "r") as f:
00666                     lines=f.readlines()
00667                     lines = [l.strip() for l in lines] # Strip whitespaces
00668                     lines = [l for l in lines if l != ""] # Strip empty lines
00669                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00670                     # Append root path to glob patterns and append patterns to ignore_patterns
00671                     self.add_ignore_patterns(root, base_path, lines)
00672 
00673             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00674             root_path =join(relpath(root, base_path))
00675             if  (self.is_ignored(join(root_path,"")) or
00676                  self.build_dir == root_path):
00677                 resources.ignore_dir(root_path)
00678                 dirs[:] = []
00679                 continue
00680 
00681             for d in copy(dirs):
00682                 dir_path = join(root, d)
00683                 # Add internal repo folders/files. This is needed for exporters
00684                 if d == '.hg' or d == '.git':
00685                     resources.repo_dirs.append(dir_path)
00686 
00687                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00688                     # Ignore targets that do not match the TARGET in extra_labels list
00689                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00690                     # Ignore toolchain that do not match the current TOOLCHAIN
00691                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00692                     # Ignore .mbedignore files
00693                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00694                     # Ignore TESTS dir
00695                     (d == 'TESTS')):
00696                         resources.ignore_dir(dir_path)
00697                         dirs.remove(d)
00698                 elif d.startswith('FEATURE_'):
00699                     # Recursively scan features but ignore them in the current scan.
00700                     # These are dynamically added by the config system if the conditions are matched
00701                     def closure (dir_path=dir_path, base_path=base_path):
00702                         return self.scan_resources(dir_path, base_path=base_path,
00703                                                    collect_ignores=resources.collect_ignores)
00704                     resources.features.add_lazy(d[8:], closure)
00705                     resources.ignore_dir(dir_path)
00706                     dirs.remove(d)
00707                 elif exclude_paths:
00708                     for exclude_path in exclude_paths:
00709                         rel_path = relpath(dir_path, exclude_path)
00710                         if not (rel_path.startswith('..')):
00711                             resources.ignore_dir(dir_path)
00712                             dirs.remove(d)
00713                             break
00714 
00715             # Add root to include paths
00716             root = root.rstrip("/")
00717             resources.inc_dirs.append(root)
00718             resources.file_basepath[root] = base_path
00719 
00720             for file in files:
00721                 file_path = join(root, file)
00722                 self._add_file(file_path, resources, base_path)
00723 
00724     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00725     # (*file_path*) to the resources object based on the file type.
00726     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00727         resources.file_basepath[file_path] = base_path
00728 
00729         if self.is_ignored(relpath(file_path, base_path)):
00730             return
00731 
00732         _, ext = splitext(file_path)
00733         ext = ext.lower()
00734 
00735         if   ext == '.s':
00736             resources.s_sources.append(file_path)
00737 
00738         elif ext == '.c':
00739             resources.c_sources.append(file_path)
00740 
00741         elif ext == '.cpp':
00742             resources.cpp_sources.append(file_path)
00743 
00744         elif ext == '.h' or ext == '.hpp':
00745             resources.headers.append(file_path)
00746 
00747         elif ext == '.o':
00748             resources.objects.append(file_path)
00749 
00750         elif ext == self.LIBRARY_EXT:
00751             resources.libraries.append(file_path)
00752             resources.lib_dirs.add(dirname(file_path))
00753 
00754         elif ext == self.LINKER_EXT:
00755             if resources.linker_script is not None:
00756                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00757             resources.linker_script = file_path
00758 
00759         elif ext == '.lib':
00760             resources.lib_refs.append(file_path)
00761 
00762         elif ext == '.bld':
00763             resources.lib_builds.append(file_path)
00764 
00765         elif basename(file_path) == '.hgignore':
00766             resources.repo_files.append(file_path)
00767 
00768         elif basename(file_path) == '.gitignore':
00769             resources.repo_files.append(file_path)
00770 
00771         elif ext == '.hex':
00772             resources.hex_files.append(file_path)
00773 
00774         elif ext == '.bin':
00775             resources.bin_files.append(file_path)
00776 
00777         elif ext == '.json':
00778             resources.json_files.append(file_path)
00779 
00780 
00781     def scan_repository(self, path):
00782         resources = []
00783 
00784         for root, dirs, files in walk(path):
00785             # Remove ignored directories
00786             for d in copy(dirs):
00787                 if d == '.' or d == '..':
00788                     dirs.remove(d)
00789 
00790             for file in files:
00791                 file_path = join(root, file)
00792                 resources.append(file_path)
00793 
00794         return resources
00795 
00796     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00797         # Handle a single file
00798         if not isinstance(files_paths, list):
00799             files_paths = [files_paths]
00800 
00801         for source in files_paths:
00802             if source is None:
00803                 files_paths.remove(source)
00804 
00805         for source in files_paths:
00806             if resources is not None and source in resources.file_basepath:
00807                 relative_path = relpath(source, resources.file_basepath[source])
00808             elif rel_path is not None:
00809                 relative_path = relpath(source, rel_path)
00810             else:
00811                 _, relative_path = split(source)
00812 
00813             target = join(trg_path, relative_path)
00814 
00815             if (target != source) and (self.need_update(target, [source])):
00816                 self.progress("copy", relative_path)
00817                 mkdir(dirname(target))
00818                 copyfile(source, target)
00819 
00820     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00821     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00822     def relative_object_path(self, build_path, base_dir, source):
00823         source_dir, name, _ = split_path(source)
00824 
00825         obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
00826         if obj_dir is not self.prev_dir:
00827             self.prev_dir = obj_dir
00828             mkdir(obj_dir)
00829         return join(obj_dir, name + '.o')
00830 
00831     # Generate response file for all includes.
00832     # ARM, GCC, IAR cross compatible
00833     def get_inc_file(self, includes):
00834         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00835         if not exists(include_file):
00836             with open(include_file, "w") as f:
00837                 cmd_list = []
00838                 for c in includes:
00839                     if c:
00840                         c = c.replace("\\", "/")
00841                         if self.CHROOT:
00842                             c = c.replace(self.CHROOT, '')
00843                         cmd_list.append('"-I%s"' % c)
00844                 string = " ".join(cmd_list)
00845                 f.write(string)
00846         return include_file
00847 
00848     # Generate response file for all objects when linking.
00849     # ARM, GCC, IAR cross compatible
00850     def get_link_file(self, cmd):
00851         link_file = join(self.build_dir, ".link_files.txt")
00852         with open(link_file, "w") as f:
00853             cmd_list = []
00854             for c in cmd:
00855                 if c:
00856                     c = c.replace("\\", "/")
00857                     if self.CHROOT:
00858                         c = c.replace(self.CHROOT, '')
00859                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00860             string = " ".join(cmd_list)
00861             f.write(string)
00862         return link_file
00863 
00864     # Generate response file for all objects when archiving.
00865     # ARM, GCC, IAR cross compatible
00866     def get_arch_file(self, objects):
00867         archive_file = join(self.build_dir, ".archive_files.txt")
00868         with open(archive_file, "w") as f:
00869             o_list = []
00870             for o in objects:
00871                 o_list.append('"%s"' % o)
00872             string = " ".join(o_list).replace("\\", "/")
00873             f.write(string)
00874         return archive_file
00875 
00876     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00877     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00878     def compile_sources(self, resources, inc_dirs=None):
00879         # Web IDE progress bar for project build
00880         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00881         self.to_be_compiled = len(files_to_compile)
00882         self.compiled = 0
00883 
00884         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00885 
00886         inc_paths = resources.inc_dirs
00887         if inc_dirs is not None:
00888             if isinstance(inc_dirs, list):
00889                 inc_paths.extend(inc_dirs)
00890             else:
00891                 inc_paths.append(inc_dirs)
00892         # De-duplicate include paths
00893         inc_paths = set(inc_paths)
00894         # Sort include paths for consistency
00895         inc_paths = sorted(set(inc_paths))
00896         # Unique id of all include paths
00897         self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
00898 
00899         objects = []
00900         queue = []
00901         work_dir = getcwd()
00902         self.prev_dir = None
00903 
00904         # Generate configuration header (this will update self.build_all if needed)
00905         self.get_config_header()
00906         self.dump_build_profile()
00907 
00908         # Sort compile queue for consistency
00909         files_to_compile.sort()
00910         for source in files_to_compile:
00911             object = self.relative_object_path(
00912                 self.build_dir, resources.file_basepath[source], source)
00913 
00914             # Queue mode (multiprocessing)
00915             commands = self.compile_command(source, object, inc_paths)
00916             if commands is not None:
00917                 queue.append({
00918                     'source': source,
00919                     'object': object,
00920                     'commands': commands,
00921                     'work_dir': work_dir,
00922                     'chroot': self.CHROOT
00923                 })
00924             else:
00925                 self.compiled += 1
00926                 objects.append(object)
00927 
00928         # Use queues/multiprocessing if cpu count is higher than setting
00929         jobs = self.jobs if self.jobs else cpu_count()
00930         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00931             return self.compile_queue(queue, objects)
00932         else:
00933             return self.compile_seq(queue, objects)
00934 
00935     # Compile source files queue in sequential order
00936     def compile_seq(self, queue, objects):
00937         for item in queue:
00938             result = compile_worker(item)
00939 
00940             self.compiled += 1
00941             self.progress("compile", item['source'], build_update=True)
00942             for res in result['results']:
00943                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00944                 self.compile_output([
00945                     res['code'],
00946                     res['output'],
00947                     res['command']
00948                 ])
00949             objects.append(result['object'])
00950         return objects
00951 
00952     # Compile source files queue in parallel by creating pool of worker threads
00953     def compile_queue(self, queue, objects):
00954         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00955         p = Pool(processes=jobs_count)
00956 
00957         results = []
00958         for i in range(len(queue)):
00959             results.append(p.apply_async(compile_worker, [queue[i]]))
00960         p.close()
00961 
00962         itr = 0
00963         while len(results):
00964             itr += 1
00965             if itr > 180000:
00966                 p.terminate()
00967                 p.join()
00968                 raise ToolException("Compile did not finish in 5 minutes")
00969 
00970             sleep(0.01)
00971             pending = 0
00972             for r in results:
00973                 if r.ready():
00974                     try:
00975                         result = r.get()
00976                         results.remove(r)
00977 
00978                         self.compiled += 1
00979                         self.progress("compile", result['source'], build_update=True)
00980                         for res in result['results']:
00981                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00982                             self.compile_output([
00983                                 res['code'],
00984                                 res['output'],
00985                                 res['command']
00986                             ])
00987                         objects.append(result['object'])
00988                     except ToolException as err:
00989                         if p._taskqueue.queue:
00990                             p._taskqueue.queue.clear()
00991                             sleep(0.5)
00992                         p.terminate()
00993                         p.join()
00994                         raise ToolException(err)
00995                 else:
00996                     pending += 1
00997                     if pending >= jobs_count:
00998                         break
00999 
01000         results = None
01001         p.join()
01002 
01003         return objects
01004 
01005     # Determine the compile command based on type of source file
01006     def compile_command(self, source, object, includes):
01007         # Check dependencies
01008         _, ext = splitext(source)
01009         ext = ext.lower()
01010 
01011         if ext == '.c' or  ext == '.cpp':
01012             base, _ = splitext(object)
01013             dep_path = base + '.d'
01014             try:
01015                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
01016             except (IOError, IndexError):
01017                 deps = []
01018             config_file = ([self.config.app_config_location]
01019                            if self.config.app_config_location else [])
01020             deps.extend(config_file)
01021             if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01022                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
01023             else:
01024                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
01025             if len(deps) == 0 or self.need_update(object, deps):
01026                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01027                     return self.compile_cpp(source, object, includes)
01028                 else:
01029                     return self.compile_c(source, object, includes)
01030         elif ext == '.s':
01031             deps = [source]
01032             deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
01033             if self.need_update(object, deps):
01034                 return self.assemble(source, object, includes)
01035         else:
01036             return False
01037 
01038         return None
01039 
01040     def parse_dependencies(self, dep_path):
01041         """Parse the dependency information generated by the compiler.
01042 
01043         Positional arguments:
01044         dep_path -- the path to a file generated by a previous run of the compiler
01045 
01046         Return value:
01047         A list of all source files that the dependency file indicated were dependencies
01048 
01049         Side effects:
01050         None
01051 
01052         Note: A default implementation is provided for make-like file formats
01053         """
01054         dependencies = []
01055         buff = open(dep_path).readlines()
01056         if buff:
01057             buff[0] = re.sub('^(.*?)\: ', '', buff[0])
01058             for line in buff:
01059                 filename = line.replace('\\\n', '').strip()
01060                 if filename:
01061                     filename = filename.replace('\\ ', '\a')
01062                     dependencies.extend(((self.CHROOT if self.CHROOT else '') +
01063                                          f.replace('\a', ' '))
01064                                         for f in filename.split(" "))
01065         return list(filter(None, dependencies))
01066 
01067     def is_not_supported_error(self, output):
01068         return "#error directive: [NOT_SUPPORTED]" in output
01069 
01070     @abstractmethod
01071     def parse_output(self, output):
01072         """Take in compiler output and extract sinlge line warnings and errors from it.
01073 
01074         Positional arguments:
01075         output -- a string of all the messages emitted by a run of the compiler
01076 
01077         Return value:
01078         None
01079 
01080         Side effects:
01081         call self.cc_info or self.notify with a description of the event generated by the compiler
01082         """
01083         raise NotImplemented
01084 
01085     def compile_output(self, output=[]):
01086         _rc = output[0]
01087         _stderr = output[1].decode("utf-8")
01088         command = output[2]
01089 
01090         # Parse output for Warnings and Errors
01091         self.parse_output(_stderr)
01092         self.debug("Return: %s"% _rc)
01093         for error_line in _stderr.splitlines():
01094             self.debug("Output: %s"% error_line)
01095 
01096         # Check return code
01097         if _rc != 0:
01098             if self.is_not_supported_error(_stderr):
01099                 raise NotSupportedException(_stderr)
01100             else:
01101                 raise ToolException(_stderr)
01102 
01103     def build_library(self, objects, dir, name):
01104         needed_update = False
01105         lib = self.STD_LIB_NAME % name
01106         fout = join(dir, lib)
01107         if self.need_update(fout, objects):
01108             self.info("Library: %s" % lib)
01109             self.archive(objects, fout)
01110             needed_update = True
01111 
01112         return needed_update
01113 
01114     def link_program(self, r, tmp_path, name):
01115         needed_update = False
01116         ext = 'bin'
01117         if hasattr(self.target, 'OUTPUT_EXT'):
01118             ext = self.target.OUTPUT_EXT
01119 
01120         if hasattr(self.target, 'OUTPUT_NAMING'):
01121             self.var("binary_naming", self.target.OUTPUT_NAMING)
01122             if self.target.OUTPUT_NAMING == "8.3":
01123                 name = name[0:8]
01124                 ext = ext[0:3]
01125 
01126         # Create destination directory
01127         head, tail =  split(name)
01128         new_path = join(tmp_path, head)
01129         mkdir(new_path)
01130 
01131         filename = name+'.'+ext
01132         elf = join(tmp_path, name + '.elf')
01133         bin = None if ext is 'elf' else join(tmp_path, filename)
01134         map = join(tmp_path, name + '.map')
01135 
01136         r.objects = sorted(set(r.objects))
01137         config_file = ([self.config.app_config_location]
01138                        if self.config.app_config_location else [])
01139         dependencies = r.objects + r.libraries + [r.linker_script] + config_file
01140         dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
01141         if self.need_update(elf, dependencies):
01142             needed_update = True
01143             self.progress("link", name)
01144             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01145 
01146         if bin and self.need_update(bin, [elf]):
01147             needed_update = True
01148             self.progress("elf2bin", name)
01149             self.binary(r, elf, bin)
01150 
01151         # Initialize memap and process map file. This doesn't generate output.
01152         self.mem_stats(map)
01153 
01154         self.var("compile_succeded", True)
01155         self.var("binary", filename)
01156 
01157         return bin, needed_update
01158 
01159     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01160     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01161     def default_cmd(self, command):
01162         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01163         self.debug("Return: %s"% _rc)
01164 
01165         for output_line in _stdout.splitlines():
01166             self.debug("Output: %s"% output_line)
01167         for error_line in _stderr.splitlines():
01168             self.debug("Errors: %s"% error_line)
01169 
01170         if _rc != 0:
01171             for line in _stderr.splitlines():
01172                 self.tool_error(line)
01173             raise ToolException(_stderr)
01174 
01175     ### NOTIFICATIONS ###
01176     def info(self, message):
01177         self.notify({'type': 'info', 'message': message})
01178 
01179     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01180     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01181     def debug(self, message):
01182         if self.VERBOSE:
01183             if isinstance(message, list):
01184                 message = ' '.join(message)
01185             message = "[DEBUG] " + message
01186             self.notify({'type': 'debug', 'message': message})
01187 
01188     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01189     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01190     def cc_info(self, info=None):
01191         if info is not None:
01192             info['type'] = 'cc'
01193             self.notify(info)
01194 
01195     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01196     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01197     def cc_verbose(self, message, file=""):
01198         self.debug(message)
01199 
01200     def progress(self, action, file, build_update=False):
01201         msg = {'type': 'progress', 'action': action, 'file': file}
01202         if build_update:
01203             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01204         self.notify(msg)
01205 
01206     def tool_error(self, message):
01207         self.notify({'type': 'tool_error', 'message': message})
01208 
01209     def var(self, key, value):
01210         self.notify({'type': 'var', 'key': key, 'val': value})
01211 
01212     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01213     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01214     def mem_stats(self, map):
01215         """! Creates parser object
01216         @param map Path to linker map file to parse and decode
01217         @return None
01218         """
01219         toolchain = self.__class__.__name__
01220 
01221         # Create memap object
01222         memap = MemapParser()
01223 
01224         # Parse and decode a map file
01225         if memap.parse(abspath(map), toolchain) is False:
01226             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01227             return None
01228 
01229         # Store the memap instance for later use
01230         self.memap_instance = memap
01231 
01232         # Note: memory statistics are not returned.
01233         # Need call to generate_output later (depends on depth & output format)
01234 
01235         return None
01236 
01237     def add_regions(self):
01238         """Add regions to the build profile, if there are any.
01239         """
01240         print("Using regions in this build:")
01241         for region in self.config.regions:
01242             for define in [(region.name.upper() + "_ADDR", region.start),
01243                            (region.name.upper() + "_SIZE", region.size)]:
01244                 define_string = "-D%s=0x%x" %  define
01245                 self.cc.append(define_string)
01246                 self.cppc.append(define_string)
01247                 self.flags["common"].append(define_string)
01248             if region.active:
01249                 for define in [("MBED_APP_START", region.start),
01250                                ("MBED_APP_SIZE", region.size)]:
01251                     define_string = self.make_ld_define(*define)
01252                     self.ld.append(define_string)
01253                     self.flags["ld"].append(define_string)
01254             print("  Region %s size 0x%x, offset 0x%x"
01255                     % (region.name, region.size, region.start))
01256 
01257     # Set the configuration data
01258     def set_config_data(self, config_data):
01259         self.config_data = config_data
01260         if self.config.has_regions:
01261             self.add_regions()
01262 
01263     # Creates the configuration header if needed:
01264     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01265     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01266     # - if there is configuration data similar to the previous configuration data,
01267     #   "mbed_config.h" is left untouched.
01268     # - if there is new configuration data, "mbed_config.h" is overriden.
01269     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01270     # The "config_processed" variable (below) ensures this behaviour.
01271     # The function returns the location of the configuration file, or None if there is no
01272     # configuration data available (and thus no configuration file)
01273     def get_config_header(self):
01274         if self.config_processed: # this function was already called, return its result
01275             return self.config_file
01276         # The config file is located in the build directory
01277         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01278         # If the file exists, read its current content in prev_data
01279         if exists(self.config_file):
01280             with open(self.config_file, "r") as f:
01281                 prev_data = f.read()
01282         else:
01283             prev_data = None
01284         # Get the current configuration data
01285         crt_data = self.config.config_to_header(self.config_data) if self.config_data else None
01286         # "changed" indicates if a configuration change was detected
01287         changed = False
01288         if prev_data is not None: # a previous mbed_config.h exists
01289             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01290                 remove(self.config_file)
01291                 self.config_file = None # this means "config file not present"
01292                 changed = True
01293             elif crt_data != prev_data: # different content of config file
01294                 with open(self.config_file, "w") as f:
01295                     f.write(crt_data)
01296                 changed = True
01297         else: # a previous mbed_config.h does not exist
01298             if crt_data is not None: # there's configuration data available
01299                 with open(self.config_file, "w") as f:
01300                     f.write(crt_data)
01301                 changed = True
01302             else:
01303                 self.config_file = None # this means "config file not present"
01304         # If there was a change in configuration, rebuild everything
01305         self.build_all = changed
01306         # Make sure that this function will only return the location of the configuration
01307         # file for subsequent calls, without trying to manipulate its content in any way.
01308         self.config_processed = True
01309         return self.config_file
01310 
01311     def dump_build_profile(self):
01312         """Dump the current build profile and macros into the `.profile` file
01313         in the build directory"""
01314         for key in ["cxx", "c", "asm", "ld"]:
01315             to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
01316             if key in ["cxx", "c"]:
01317                 to_dump += str(self.flags['common'])
01318             where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
01319             self._overwrite_when_not_equal(where, to_dump)
01320 
01321     @staticmethod
01322     def _overwrite_when_not_equal(filename, content):
01323         if not exists(filename) or content != open(filename).read():
01324             with open(filename, "w") as out:
01325                 out.write(content)
01326 
01327     @staticmethod
01328     def generic_check_executable(tool_key, executable_name, levels_up,
01329                                  nested_dir=None):
01330         """
01331         Positional args:
01332         tool_key: the key to index TOOLCHAIN_PATHS
01333         executable_name: the toolchain's named executable (ex. armcc)
01334         levels_up: each toolchain joins the toolchain_path, some
01335         variable directories (bin, include), and the executable name,
01336         so the TOOLCHAIN_PATH value must be appropriately distanced
01337 
01338         Keyword args:
01339         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01340           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01341           that will be used by toolchain's compile)
01342 
01343         Returns True if the executable location specified by the user
01344         exists and is valid OR the executable can be found on the PATH.
01345         Returns False otherwise.
01346         """
01347         # Search PATH if user did not specify a path or specified path doesn't
01348         # exist.
01349         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01350             exe = find_executable(executable_name)
01351             if not exe:
01352                 return False
01353             for level in range(levels_up):
01354                 # move up the specified number of directories
01355                 exe = dirname(exe)
01356             TOOLCHAIN_PATHS[tool_key] = exe
01357         if nested_dir:
01358             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01359                           executable_name)
01360         else:
01361             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01362         # User could have specified a path that exists but does not contain exe
01363         return exists(subdir) or exists(subdir +'.exe')
01364 
01365     @abstractmethod
01366     def check_executable(self):
01367         """Returns True if the executable (armcc) location specified by the
01368          user exists OR the executable can be found on the PATH.
01369          Returns False otherwise."""
01370         raise NotImplemented
01371 
01372     @abstractmethod
01373     def get_config_option(self, config_header):
01374         """Generate the compiler option that forces the inclusion of the configuration
01375         header file.
01376 
01377         Positional arguments:
01378         config_header -- The configuration header that will be included within all source files
01379 
01380         Return value:
01381         A list of the command line arguments that will force the inclusion the specified header
01382 
01383         Side effects:
01384         None
01385         """
01386         raise NotImplemented
01387 
01388     @abstractmethod
01389     def get_compile_options(self, defines, includes, for_asm=False):
01390         """Generate the compiler options from the defines and includes
01391 
01392         Positional arguments:
01393         defines -- The preprocessor macros defined on the command line
01394         includes -- The include file search paths
01395 
01396         Keyword arguments:
01397         for_asm -- generate the assembler options instead of the compiler options
01398 
01399         Return value:
01400         A list of the command line arguments that will force the inclusion the specified header
01401 
01402         Side effects:
01403         None
01404         """
01405         raise NotImplemented
01406 
01407     @abstractmethod
01408     def assemble(self, source, object, includes):
01409         """Generate the command line that assembles.
01410 
01411         Positional arguments:
01412         source -- a file path that is the file to assemble
01413         object -- a file path that is the destination object
01414         includes -- a list of all directories where header files may be found
01415 
01416         Return value:
01417         The complete command line, as a list, that would invoke the assembler
01418         on the source file, include all the include paths, and generate
01419         the specified object file.
01420 
01421         Side effects:
01422         None
01423 
01424         Note:
01425         This method should be decorated with @hook_tool.
01426         """
01427         raise NotImplemented
01428 
01429     @abstractmethod
01430     def compile_c(self, source, object, includes):
01431         """Generate the command line that compiles a C source file.
01432 
01433         Positional arguments:
01434         source -- the C source file to compile
01435         object -- the destination object file
01436         includes -- a list of all the directories where header files may be found
01437 
01438         Return value:
01439         The complete command line, as a list, that would invoke the C compiler
01440         on the source file, include all the include paths, and generate the
01441         specified object file.
01442 
01443         Side effects:
01444         None
01445 
01446         Note:
01447         This method should be decorated with @hook_tool.
01448         """
01449         raise NotImplemented
01450 
01451     @abstractmethod
01452     def compile_cpp(self, source, object, includes):
01453         """Generate the command line that compiles a C++ source file.
01454 
01455         Positional arguments:
01456         source -- the C++ source file to compile
01457         object -- the destination object file
01458         includes -- a list of all the directories where header files may be found
01459 
01460         Return value:
01461         The complete command line, as a list, that would invoke the C++ compiler
01462         on the source file, include all the include paths, and generate the
01463         specified object file.
01464 
01465         Side effects:
01466         None
01467 
01468         Note:
01469         This method should be decorated with @hook_tool.
01470         """
01471         raise NotImplemented
01472 
01473     @abstractmethod
01474     def link(self, output, objects, libraries, lib_dirs, mem_map):
01475         """Run the linker to create an executable and memory map.
01476 
01477         Positional arguments:
01478         output -- the file name to place the executable in
01479         objects -- all of the object files to link
01480         libraries -- all of the required libraries
01481         lib_dirs -- where the required libraries are located
01482         mem_map -- the location where the memory map file should be stored
01483 
01484         Return value:
01485         None
01486 
01487         Side effect:
01488         Runs the linker to produce the executable.
01489 
01490         Note:
01491         This method should be decorated with @hook_tool.
01492         """
01493         raise NotImplemented
01494 
01495     @abstractmethod
01496     def archive(self, objects, lib_path):
01497         """Run the command line that creates an archive.
01498 
01499         Positional arguhments:
01500         objects -- a list of all the object files that should be archived
01501         lib_path -- the file name of the resulting library file
01502 
01503         Return value:
01504         None
01505 
01506         Side effect:
01507         Runs the archiving tool to produce the library file.
01508 
01509         Note:
01510         This method should be decorated with @hook_tool.
01511         """
01512         raise NotImplemented
01513 
01514     @abstractmethod
01515     def binary(self, resources, elf, bin):
01516         """Run the command line that will Extract a simplified binary file.
01517 
01518         Positional arguments:
01519         resources -- A resources object (Is not used in any of the toolchains)
01520         elf -- the executable file that is to be converted
01521         bin -- the file name of the to be created simplified binary file
01522 
01523         Return value:
01524         None
01525 
01526         Side effect:
01527         Runs the elf2bin tool to produce the simplified binary file.
01528 
01529         Note:
01530         This method should be decorated with @hook_tool.
01531         """
01532         raise NotImplemented
01533 
01534     @staticmethod
01535     @abstractmethod
01536     def name_mangle(name):
01537         """Mangle a name based on the conventional name mangling of this toolchain
01538 
01539         Positional arguments:
01540         name -- the name to mangle
01541 
01542         Return:
01543         the mangled name as a string
01544         """
01545         raise NotImplemented
01546 
01547     @staticmethod
01548     @abstractmethod
01549     def make_ld_define(name, value):
01550         """Create an argument to the linker that would define a symbol
01551 
01552         Positional arguments:
01553         name -- the symbol to define
01554         value -- the value to give the symbol
01555 
01556         Return:
01557         The linker flag as a string
01558         """
01559         raise NotImplemented
01560 
01561     @staticmethod
01562     @abstractmethod
01563     def redirect_symbol(source, sync, build_dir):
01564         """Redirect a symbol at link time to point at somewhere else
01565 
01566         Positional arguments:
01567         source -- the symbol doing the pointing
01568         sync -- the symbol being pointed to
01569         build_dir -- the directory to put "response files" if needed by the toolchain
01570 
01571         Side Effects:
01572         Possibly create a file in the build directory
01573 
01574         Return:
01575         The linker flag to redirect the symbol, as a string
01576         """
01577         raise NotImplemented
01578 
01579     # Return the list of macros geenrated by the build system
01580     def get_config_macros(self):
01581         return self.config.config_to_macros(self.config_data) if self.config_data else []
01582 
01583     @property
01584     def report(self):
01585         to_ret = {}
01586         to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
01587                                 'symbols': self.get_symbols()}
01588         to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
01589                                   'symbols': self.get_symbols()}
01590         to_ret['assembler'] = {'flags': copy(self.flags['asm']),
01591                                'symbols': self.get_symbols(True)}
01592         to_ret['linker'] = {'flags': copy(self.flags['ld'])}
01593         to_ret.update(self.config.report)
01594         return to_ret
01595 
01596 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH
01597 
01598 TOOLCHAIN_PATHS = {
01599     'ARM': ARM_PATH,
01600     'uARM': ARM_PATH,
01601     'ARMC6': ARMC6_PATH,
01602     'GCC_ARM': GCC_ARM_PATH,
01603     'IAR': IAR_PATH
01604 }
01605 
01606 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
01607 from tools.toolchains.gcc import GCC_ARM
01608 from tools.toolchains.iar import IAR
01609 
01610 TOOLCHAIN_CLASSES = {
01611     u'ARM': ARM_STD,
01612     u'uARM': ARM_MICRO,
01613     u'ARMC6': ARMC6,
01614     u'GCC_ARM': GCC_ARM,
01615     u'IAR': IAR
01616 }
01617 
01618 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())