Nicolas Borla / Mbed OS BBR_1Ebene
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import sys
00021 from os import stat, walk, getcwd, sep, remove
00022 from copy import copy
00023 from time import time, sleep
00024 from shutil import copyfile
00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split,
00026                      abspath, isfile, isdir, normcase)
00027 from itertools import chain
00028 from inspect import getmro
00029 from copy import deepcopy
00030 from abc import ABCMeta, abstractmethod
00031 from distutils.spawn import find_executable
00032 from multiprocessing import Pool, cpu_count
00033 from hashlib import md5
00034 import fnmatch
00035 
00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException,
00037                     NotSupportedException, split_path, compile_worker)
00038 from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
00039 from .. import hooks
00040 from ..notifier.term import TerminalNotifier
00041 from ..memap import MemapParser
00042 
00043 
00044 #Disables multiprocessing if set to higher number than the host machine CPUs
00045 CPU_COUNT_MIN = 1
00046 CPU_COEF = 1
00047 
00048 class LazyDict(dict):
00049     def __init__(self):
00050         self.eager = {}
00051         self.lazy = {}
00052 
00053     def add_lazy(self, key, thunk):
00054         if key in self.eager:
00055             del self.eager[key]
00056         self.lazy[key] = thunk
00057 
00058     def __getitem__(self, key):
00059         if  (key not in self.eager
00060              and key in self.lazy):
00061             self.eager[key] = self.lazy[key]()
00062             del self.lazy[key]
00063         return self.eager[key]
00064 
00065     def __setitem__(self, key, value):
00066         self.eager[key] = value
00067 
00068     def __delitem__(self, key):
00069         if key in self.eager:
00070             del self.eager[key]
00071         else:
00072             del self.lazy[key]
00073 
00074     def __contains__(self, key):
00075         return key in self.eager or key in self.lazy
00076 
00077     def __iter__(self):
00078         return chain(iter(self.eager), iter(self.lazy))
00079 
00080     def __len__(self):
00081         return len(self.eager) + len(self.lazy)
00082 
00083     def __str__(self):
00084         return "Lazy{%s}" % (
00085             ", ".join("%r: %r" % (k, v) for k, v in
00086                       chain(self.eager.items(), ((k, "not evaluated")
00087                                                      for k in self.lazy))))
00088 
00089     def update(self, other):
00090         if isinstance(other, LazyDict):
00091             self.eager.update(other.eager)
00092             self.lazy.update(other.lazy)
00093         else:
00094             self.eager.update(other)
00095 
00096     def items(self):
00097         """Warning: This forces the evaluation all of the items in this LazyDict
00098         that are iterated over."""
00099         for k, v in self.eager.items():
00100             yield k, v
00101         for k in self.lazy.keys():
00102             yield k, self[k]
00103 
00104     def apply(self, fn):
00105         """Delay the application of a computation to all items of the lazy dict.
00106         Does no computation now. Instead the comuptation is performed when a
00107         consumer attempts to access a value in this LazyDict"""
00108         new_lazy = {}
00109         for k, f in self.lazy.items():
00110             def closure(f=f):
00111                 return fn(f())
00112             new_lazy[k] = closure
00113         for k, v in self.eager.items():
00114             def closure(v=v):
00115                 return fn(v)
00116             new_lazy[k] = closure
00117         self.lazy = new_lazy
00118         self.eager = {}
00119 
00120 class Resources:
00121     def __init__(self, base_path=None, collect_ignores=False):
00122         self.base_path = base_path
00123         self.collect_ignores = collect_ignores
00124 
00125         self.file_basepath = {}
00126 
00127         self.inc_dirs = []
00128         self.headers = []
00129 
00130         self.s_sources = []
00131         self.c_sources = []
00132         self.cpp_sources = []
00133 
00134         self.lib_dirs = set([])
00135         self.objects = []
00136         self.libraries = []
00137 
00138         # mbed special files
00139         self.lib_builds = []
00140         self.lib_refs = []
00141 
00142         self.repo_dirs = []
00143         self.repo_files = []
00144 
00145         self.linker_script = None
00146 
00147         # Other files
00148         self.hex_files = []
00149         self.bin_files = []
00150         self.json_files = []
00151 
00152         # Features
00153         self.features = LazyDict()
00154         self.ignored_dirs = []
00155 
00156     def __add__(self, resources):
00157         if resources is None:
00158             return self
00159         else:
00160             return self.add(resources)
00161 
00162     def __radd__(self, resources):
00163         if resources is None:
00164             return self
00165         else:
00166             return self.add(resources)
00167 
00168     def ignore_dir(self, directory):
00169         if self.collect_ignores:
00170             self.ignored_dirs.append(directory)
00171 
00172     def add(self, resources):
00173         self.file_basepath.update(resources.file_basepath)
00174 
00175         self.inc_dirs += resources.inc_dirs
00176         self.headers += resources.headers
00177 
00178         self.s_sources += resources.s_sources
00179         self.c_sources += resources.c_sources
00180         self.cpp_sources += resources.cpp_sources
00181 
00182         self.lib_dirs |= resources.lib_dirs
00183         self.objects += resources.objects
00184         self.libraries += resources.libraries
00185 
00186         self.lib_builds += resources.lib_builds
00187         self.lib_refs += resources.lib_refs
00188 
00189         self.repo_dirs += resources.repo_dirs
00190         self.repo_files += resources.repo_files
00191 
00192         if resources.linker_script is not None:
00193             self.linker_script = resources.linker_script
00194 
00195         self.hex_files += resources.hex_files
00196         self.bin_files += resources.bin_files
00197         self.json_files += resources.json_files
00198 
00199         self.features.update(resources.features)
00200         self.ignored_dirs += resources.ignored_dirs
00201 
00202         return self
00203 
00204     def rewrite_basepath(self, file_name, export_path, loc):
00205         """ Replace the basepath of filename with export_path
00206 
00207         Positional arguments:
00208         file_name - the absolute path to a file
00209         export_path - the final destination of the file after export
00210         """
00211         new_f = join(loc, relpath(file_name, self.file_basepath[file_name]))
00212         self.file_basepath[new_f] = export_path
00213         return new_f
00214 
00215     def subtract_basepath(self, export_path, loc=""):
00216         """ Rewrite all of the basepaths with the export_path
00217 
00218         Positional arguments:
00219         export_path - the final destination of the resources with respect to the
00220         generated project files
00221         """
00222         keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
00223                 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script',
00224                 'lib_dirs']
00225         for key in keys:
00226             vals = getattr(self, key)
00227             if isinstance(vals, set):
00228                 vals = list(vals)
00229             if isinstance(vals, list):
00230                 new_vals = []
00231                 for val in vals:
00232                     new_vals.append(self.rewrite_basepath(
00233                         val, export_path, loc))
00234                 if isinstance(getattr(self, key), set):
00235                     setattr(self, key, set(new_vals))
00236                 else:
00237                     setattr(self, key, new_vals)
00238             elif vals:
00239                 setattr(self, key, self.rewrite_basepath(
00240                     vals, export_path, loc))
00241         def closure(res, export_path=export_path, loc=loc):
00242             res.subtract_basepath(export_path, loc)
00243             return res
00244         self.features.apply(closure)
00245 
00246     def _collect_duplicates(self, dupe_dict, dupe_headers):
00247         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00248             objname, _ = splitext(basename(filename))
00249             dupe_dict.setdefault(objname, set())
00250             dupe_dict[objname] |= set([filename])
00251         for filename in self.headers:
00252             headername = basename(filename)
00253             dupe_headers.setdefault(headername, set())
00254             dupe_headers[headername] |= set([headername])
00255         for res in self.features.values():
00256             res._collect_duplicates(dupe_dict, dupe_headers)
00257         return dupe_dict, dupe_headers
00258 
00259     def detect_duplicates(self, toolchain):
00260         """Detect all potential ambiguities in filenames and report them with
00261         a toolchain notification
00262 
00263         Positional Arguments:
00264         toolchain - used for notifications
00265         """
00266         count = 0
00267         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00268         for objname, filenames in dupe_dict.items():
00269             if len(filenames) > 1:
00270                 count+=1
00271                 toolchain.notify.tool_error(
00272                     "Object file %s.o is not unique! It could be made from: %s"\
00273                     % (objname, " ".join(filenames)))
00274         for headername, locations in dupe_headers.items():
00275             if len(locations) > 1:
00276                 count+=1
00277                 toolchain.notify.tool_error(
00278                     "Header file %s is not unique! It could be: %s" %\
00279                     (headername, " ".join(locations)))
00280         return count
00281 
00282 
00283     def relative_to(self, base, dot=False):
00284         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00285                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00286                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00287                       'hex_files', 'bin_files', 'json_files']:
00288             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00289             setattr(self, field, v)
00290 
00291         def to_apply(feature, base=base, dot=dot):
00292             feature.relative_to(base, dot)
00293         self.features.apply(to_apply)
00294 
00295         if self.linker_script is not None:
00296             self.linker_script = rel_path(self.linker_script, base, dot)
00297 
00298     def win_to_unix(self):
00299         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00300                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00301                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00302                       'hex_files', 'bin_files', 'json_files']:
00303             v = [f.replace('\\', '/') for f in getattr(self, field)]
00304             setattr(self, field, v)
00305 
00306         def to_apply(feature):
00307             feature.win_to_unix()
00308         self.features.apply(to_apply)
00309 
00310         if self.linker_script is not None:
00311             self.linker_script = self.linker_script.replace('\\', '/')
00312 
00313     def __str__(self):
00314         s = []
00315 
00316         for (label, resources) in (
00317                 ('Include Directories', self.inc_dirs),
00318                 ('Headers', self.headers),
00319 
00320                 ('Assembly sources', self.s_sources),
00321                 ('C sources', self.c_sources),
00322                 ('C++ sources', self.cpp_sources),
00323 
00324                 ('Library directories', self.lib_dirs),
00325                 ('Objects', self.objects),
00326                 ('Libraries', self.libraries),
00327 
00328                 ('Hex files', self.hex_files),
00329                 ('Bin files', self.bin_files),
00330 
00331                 ('Features', self.features),
00332             ):
00333             if resources:
00334                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00335 
00336         if self.linker_script:
00337             s.append('Linker Script: ' + self.linker_script)
00338 
00339         return '\n'.join(s)
00340 
00341 # Support legacy build conventions: the original mbed build system did not have
00342 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00343 # had the knowledge of a list of these directories to be ignored.
00344 LEGACY_IGNORE_DIRS = set([
00345     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00346     'ARM', 'uARM', 'IAR',
00347     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00348     'ARMC6'
00349 ])
00350 LEGACY_TOOLCHAIN_NAMES = {
00351     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00352     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00353     'IAR': 'IAR',
00354     'ARMC6': 'ARMC6',
00355 }
00356 
00357 
00358 class mbedToolchain:
00359     # Verbose logging
00360     VERBOSE = True
00361 
00362     # Compile C files as CPP
00363     COMPILE_C_AS_CPP = False
00364 
00365     # Response files for compiling, includes, linking and archiving.
00366     # Not needed on posix systems where the typical arg limit is 2 megabytes
00367     RESPONSE_FILES = True
00368 
00369     CORTEX_SYMBOLS = {
00370         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00371         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00372         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00373         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00374         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00375         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00376         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00377         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00378         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00379         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00380         "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00381         "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00382         "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00383         "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00384         "Cortex-M33F-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00385         "Cortex-M33F": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00386     }
00387 
00388     MBED_CONFIG_FILE_NAME="mbed_config.h"
00389 
00390     PROFILE_FILE_NAME = ".profile"
00391 
00392     __metaclass__ = ABCMeta
00393 
00394     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00395 
00396     def __init__(self, target, notify=None, macros=None, build_profile=None,
00397                  build_dir=None):
00398         self.target = target
00399         self.name = self.__class__.__name__
00400 
00401         # compile/assemble/link/binary hooks
00402         self.hook = hooks.Hook(target, self)
00403 
00404         # Toolchain flags
00405         self.flags = deepcopy(build_profile or self.profile_template)
00406 
00407         # System libraries provided by the toolchain
00408         self.sys_libs = []
00409 
00410         # User-defined macros
00411         self.macros = macros or []
00412 
00413         # Macros generated from toolchain and target rules/features
00414         self.asm_symbols = None
00415         self.cxx_symbols = None
00416 
00417         # Labels generated from toolchain and target rules/features (used for selective build)
00418         self.labels = None
00419 
00420         # This will hold the initialized config object
00421         self.config = None
00422 
00423         # This will hold the configuration data (as returned by Config.get_config_data())
00424         self.config_data = None
00425 
00426         # This will hold the location of the configuration file or None if there's no configuration available
00427         self.config_file = None
00428 
00429         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00430         self.config_processed = False
00431 
00432         # Non-incremental compile
00433         self.build_all = False
00434 
00435         # Build output dir
00436         self.build_dir = build_dir
00437         self.timestamp = time()
00438 
00439         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00440         self.jobs = 0
00441 
00442         # Ignore patterns from .mbedignore files
00443         self.ignore_patterns = []
00444         self._ignore_regex = re.compile("$^")
00445 
00446         # Pre-mbed 2.0 ignore dirs
00447         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00448 
00449         # Output notify function
00450         # This function is passed all events, and expected to handle notification of the
00451         # user, emit the events to a log, etc.
00452         # The API for all notify methods passed into the notify parameter is as follows:
00453         # def notify(Event, Silent)
00454         # Where *Event* is a dict representing the toolchain event that was generated
00455         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00456         #                  or an application was linked
00457         #       *Silent* is a boolean
00458         if notify:
00459             self.notify = notify
00460         else:
00461             self.notify = TerminalNotifier()
00462 
00463         # uVisor spepcific rules
00464         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00465             self.target.core = re.sub(r"F$", '', self.target.core)
00466 
00467         # Stats cache is used to reduce the amount of IO requests to stat
00468         # header files during dependency change. See need_update()
00469         self.stat_cache = {}
00470 
00471         # Used by the mbed Online Build System to build in chrooted environment
00472         self.CHROOT = None
00473 
00474         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00475         self.init()
00476 
00477     # Used for post __init__() hooks
00478     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00479     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00480     def init(self):
00481         return True
00482 
00483     def get_output(self):
00484         return self.notifier.get_output()
00485 
00486     def get_symbols(self, for_asm=False):
00487         if for_asm:
00488             if self.asm_symbols is None:
00489                 self.asm_symbols = []
00490 
00491                 # Cortex CPU symbols
00492                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00493                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00494 
00495                 # Add target's symbols
00496                 self.asm_symbols += self.target.macros
00497                 # Add extra symbols passed via 'macros' parameter
00498                 self.asm_symbols += self.macros
00499             return list(set(self.asm_symbols))  # Return only unique symbols
00500         else:
00501             if self.cxx_symbols is None:
00502                 # Target and Toolchain symbols
00503                 labels = self.get_labels()
00504                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00505                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00506 
00507                 # Cortex CPU symbols
00508                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00509                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00510 
00511                 # Symbols defined by the on-line build.system
00512                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00513                 if MBED_ORG_USER:
00514                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00515 
00516                 # Add target's symbols
00517                 self.cxx_symbols += self.target.macros
00518                 # Add target's hardware
00519                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00520                 # Add target's features
00521                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00522                 # Add extra symbols passed via 'macros' parameter
00523                 self.cxx_symbols += self.macros
00524 
00525                 # Form factor variables
00526                 if hasattr(self.target, 'supported_form_factors'):
00527                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00528 
00529             return list(set(self.cxx_symbols))  # Return only unique symbols
00530 
00531     # Extend the internal list of macros
00532     def add_macros(self, new_macros):
00533         self.macros.extend(new_macros)
00534 
00535     def get_labels(self):
00536         if self.labels is None:
00537             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00538             toolchain_labels.remove('mbedToolchain')
00539             self.labels = {
00540                 'TARGET': self.target.labels,
00541                 'FEATURE': self.target.features,
00542                 'TOOLCHAIN': toolchain_labels
00543             }
00544 
00545             # This is a policy decision and it should /really/ be in the config system
00546             # ATM it's here for backward compatibility
00547             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00548                  "-O0" in self.flags['common']) or
00549                 ("-r" in self.flags['common'] and
00550                  "-On" in self.flags['common'])):
00551                 self.labels['TARGET'].append("DEBUG")
00552             else:
00553                 self.labels['TARGET'].append("RELEASE")
00554         return self.labels
00555 
00556 
00557     # Determine whether a source file needs updating/compiling
00558     def need_update(self, target, dependencies):
00559         if self.build_all:
00560             return True
00561 
00562         if not exists(target):
00563             return True
00564 
00565         target_mod_time = stat(target).st_mtime
00566 
00567         for d in dependencies:
00568             # Some objects are not provided with full path and here we do not have
00569             # information about the library paths. Safe option: assume an update
00570             if not d or not exists(d):
00571                 return True
00572 
00573             if d not in self.stat_cache:
00574                 self.stat_cache[d] = stat(d).st_mtime
00575 
00576             if self.stat_cache[d] >= target_mod_time:
00577                 return True
00578 
00579         return False
00580 
00581     def is_ignored(self, file_path):
00582         """Check if file path is ignored by any .mbedignore thus far"""
00583         return self._ignore_regex.match(normcase(file_path))
00584 
00585     def add_ignore_patterns(self, root, base_path, patterns):
00586         """Add a series of patterns to the ignored paths
00587 
00588         Positional arguments:
00589         root - the directory containing the ignore file
00590         base_path - the location that the scan started from
00591         patterns - the list of patterns we will ignore in the future
00592         """
00593         real_base = relpath(root, base_path)
00594         if real_base == ".":
00595             self.ignore_patterns.extend(normcase(p) for p in patterns)
00596         else:
00597             self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
00598         if self.ignore_patterns:
00599             self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
00600 
00601     # Create a Resources object from the path pointed to by *path* by either traversing a
00602     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00603     # when *path* is a file.
00604     # The parameter *base_path* is used to set the base_path attribute of the Resources
00605     # object and the parameter *exclude_paths* is used by the directory traversal to
00606     # exclude certain paths from the traversal.
00607     def scan_resources(self, path, exclude_paths=None, base_path=None,
00608                        collect_ignores=False):
00609         self.progress("scan", path)
00610 
00611         resources = Resources(path, collect_ignores=collect_ignores)
00612         if not base_path:
00613             if isfile(path):
00614                 base_path = dirname(path)
00615             else:
00616                 base_path = path
00617         resources.base_path = base_path
00618 
00619         if isfile(path):
00620             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00621         else:
00622             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00623         return resources
00624 
00625     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00626     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00627     # on every file it considers adding to the resources object.
00628     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00629         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00630         When topdown is True, the caller can modify the dirnames list in-place
00631         (perhaps using del or slice assignment), and walk() will only recurse into
00632         the subdirectories whose names remain in dirnames; this can be used to prune
00633         the search, impose a specific order of visiting, or even to inform walk()
00634         about directories the caller creates or renames before it resumes walk()
00635         again. Modifying dirnames when topdown is False is ineffective, because in
00636         bottom-up mode the directories in dirnames are generated before dirpath
00637         itself is generated.
00638         """
00639         labels = self.get_labels()
00640         for root, dirs, files in walk(path, followlinks=True):
00641             # Check if folder contains .mbedignore
00642             if ".mbedignore" in files:
00643                 with open (join(root,".mbedignore"), "r") as f:
00644                     lines=f.readlines()
00645                     lines = [l.strip() for l in lines] # Strip whitespaces
00646                     lines = [l for l in lines if l != ""] # Strip empty lines
00647                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00648                     # Append root path to glob patterns and append patterns to ignore_patterns
00649                     self.add_ignore_patterns(root, base_path, lines)
00650 
00651             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00652             root_path =join(relpath(root, base_path))
00653             if  (self.is_ignored(join(root_path,"")) or
00654                  self.build_dir == root_path):
00655                 resources.ignore_dir(root_path)
00656                 dirs[:] = []
00657                 continue
00658 
00659             for d in copy(dirs):
00660                 dir_path = join(root, d)
00661                 # Add internal repo folders/files. This is needed for exporters
00662                 if d == '.hg' or d == '.git':
00663                     resources.repo_dirs.append(dir_path)
00664 
00665                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00666                     # Ignore targets that do not match the TARGET in extra_labels list
00667                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00668                     # Ignore toolchain that do not match the current TOOLCHAIN
00669                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00670                     # Ignore .mbedignore files
00671                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00672                     # Ignore TESTS dir
00673                     (d == 'TESTS')):
00674                         resources.ignore_dir(dir_path)
00675                         dirs.remove(d)
00676                 elif d.startswith('FEATURE_'):
00677                     # Recursively scan features but ignore them in the current scan.
00678                     # These are dynamically added by the config system if the conditions are matched
00679                     def closure (dir_path=dir_path, base_path=base_path):
00680                         return self.scan_resources(dir_path, base_path=base_path,
00681                                                    collect_ignores=resources.collect_ignores)
00682                     resources.features.add_lazy(d[8:], closure)
00683                     resources.ignore_dir(dir_path)
00684                     dirs.remove(d)
00685                 elif exclude_paths:
00686                     for exclude_path in exclude_paths:
00687                         rel_path = relpath(dir_path, exclude_path)
00688                         if not (rel_path.startswith('..')):
00689                             resources.ignore_dir(dir_path)
00690                             dirs.remove(d)
00691                             break
00692 
00693             # Add root to include paths
00694             root = root.rstrip("/")
00695             resources.inc_dirs.append(root)
00696             resources.file_basepath[root] = base_path
00697 
00698             for file in files:
00699                 file_path = join(root, file)
00700                 self._add_file(file_path, resources, base_path)
00701 
00702     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00703     # (*file_path*) to the resources object based on the file type.
00704     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00705 
00706         if  (self.is_ignored(relpath(file_path, base_path)) or
00707              basename(file_path).startswith(".")):
00708             resources.ignore_dir(relpath(file_path, base_path))
00709             return
00710 
00711         resources.file_basepath[file_path] = base_path
00712         _, ext = splitext(file_path)
00713         ext = ext.lower()
00714 
00715         if   ext == '.s':
00716             resources.s_sources.append(file_path)
00717 
00718         elif ext == '.c':
00719             resources.c_sources.append(file_path)
00720 
00721         elif ext == '.cpp':
00722             resources.cpp_sources.append(file_path)
00723 
00724         elif ext == '.h' or ext == '.hpp':
00725             resources.headers.append(file_path)
00726 
00727         elif ext == '.o':
00728             resources.objects.append(file_path)
00729 
00730         elif ext == self.LIBRARY_EXT:
00731             resources.libraries.append(file_path)
00732             resources.lib_dirs.add(dirname(file_path))
00733 
00734         elif ext == self.LINKER_EXT:
00735             if resources.linker_script is not None:
00736                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00737             resources.linker_script = file_path
00738 
00739         elif ext == '.lib':
00740             resources.lib_refs.append(file_path)
00741 
00742         elif ext == '.bld':
00743             resources.lib_builds.append(file_path)
00744 
00745         elif basename(file_path) == '.hgignore':
00746             resources.repo_files.append(file_path)
00747 
00748         elif basename(file_path) == '.gitignore':
00749             resources.repo_files.append(file_path)
00750 
00751         elif ext == '.hex':
00752             resources.hex_files.append(file_path)
00753 
00754         elif ext == '.bin':
00755             resources.bin_files.append(file_path)
00756 
00757         elif ext == '.json':
00758             resources.json_files.append(file_path)
00759 
00760 
00761     def scan_repository(self, path):
00762         resources = []
00763 
00764         for root, dirs, files in walk(path):
00765             # Remove ignored directories
00766             for d in copy(dirs):
00767                 if d == '.' or d == '..':
00768                     dirs.remove(d)
00769 
00770             for file in files:
00771                 file_path = join(root, file)
00772                 resources.append(file_path)
00773 
00774         return resources
00775 
00776     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00777         # Handle a single file
00778         if not isinstance(files_paths, list):
00779             files_paths = [files_paths]
00780 
00781         for source in files_paths:
00782             if source is None:
00783                 files_paths.remove(source)
00784 
00785         for source in files_paths:
00786             if resources is not None and source in resources.file_basepath:
00787                 relative_path = relpath(source, resources.file_basepath[source])
00788             elif rel_path is not None:
00789                 relative_path = relpath(source, rel_path)
00790             else:
00791                 _, relative_path = split(source)
00792 
00793             target = join(trg_path, relative_path)
00794 
00795             if (target != source) and (self.need_update(target, [source])):
00796                 self.progress("copy", relative_path)
00797                 mkdir(dirname(target))
00798                 copyfile(source, target)
00799 
00800     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00801     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00802     def relative_object_path(self, build_path, base_dir, source):
00803         source_dir, name, _ = split_path(source)
00804 
00805         obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
00806         if obj_dir is not self.prev_dir:
00807             self.prev_dir = obj_dir
00808             mkdir(obj_dir)
00809         return join(obj_dir, name + '.o')
00810 
00811     # Generate response file for all includes.
00812     # ARM, GCC, IAR cross compatible
00813     def get_inc_file(self, includes):
00814         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00815         if not exists(include_file):
00816             with open(include_file, "w") as f:
00817                 cmd_list = []
00818                 for c in includes:
00819                     if c:
00820                         c = c.replace("\\", "/")
00821                         if self.CHROOT:
00822                             c = c.replace(self.CHROOT, '')
00823                         cmd_list.append('"-I%s"' % c)
00824                 string = " ".join(cmd_list)
00825                 f.write(string)
00826         return include_file
00827 
00828     # Generate response file for all objects when linking.
00829     # ARM, GCC, IAR cross compatible
00830     def get_link_file(self, cmd):
00831         link_file = join(self.build_dir, ".link_files.txt")
00832         with open(link_file, "w") as f:
00833             cmd_list = []
00834             for c in cmd:
00835                 if c:
00836                     c = c.replace("\\", "/")
00837                     if self.CHROOT:
00838                         c = c.replace(self.CHROOT, '')
00839                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00840             string = " ".join(cmd_list)
00841             f.write(string)
00842         return link_file
00843 
00844     # Generate response file for all objects when archiving.
00845     # ARM, GCC, IAR cross compatible
00846     def get_arch_file(self, objects):
00847         archive_file = join(self.build_dir, ".archive_files.txt")
00848         with open(archive_file, "w") as f:
00849             o_list = []
00850             for o in objects:
00851                 o_list.append('"%s"' % o)
00852             string = " ".join(o_list).replace("\\", "/")
00853             f.write(string)
00854         return archive_file
00855 
00856     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00857     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00858     def compile_sources(self, resources, inc_dirs=None):
00859         # Web IDE progress bar for project build
00860         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00861         self.to_be_compiled = len(files_to_compile)
00862         self.compiled = 0
00863 
00864         self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00865 
00866         inc_paths = resources.inc_dirs
00867         if inc_dirs is not None:
00868             if isinstance(inc_dirs, list):
00869                 inc_paths.extend(inc_dirs)
00870             else:
00871                 inc_paths.append(inc_dirs)
00872         # De-duplicate include paths
00873         inc_paths = set(inc_paths)
00874         # Sort include paths for consistency
00875         inc_paths = sorted(set(inc_paths))
00876         # Unique id of all include paths
00877         self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
00878 
00879         objects = []
00880         queue = []
00881         work_dir = getcwd()
00882         self.prev_dir = None
00883 
00884         # Generate configuration header (this will update self.build_all if needed)
00885         self.get_config_header()
00886         self.dump_build_profile()
00887 
00888         # Sort compile queue for consistency
00889         files_to_compile.sort()
00890         for source in files_to_compile:
00891             object = self.relative_object_path(
00892                 self.build_dir, resources.file_basepath[source], source)
00893 
00894             # Queue mode (multiprocessing)
00895             commands = self.compile_command(source, object, inc_paths)
00896             if commands is not None:
00897                 queue.append({
00898                     'source': source,
00899                     'object': object,
00900                     'commands': commands,
00901                     'work_dir': work_dir,
00902                     'chroot': self.CHROOT
00903                 })
00904             else:
00905                 self.compiled += 1
00906                 objects.append(object)
00907 
00908         # Use queues/multiprocessing if cpu count is higher than setting
00909         jobs = self.jobs if self.jobs else cpu_count()
00910         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00911             return self.compile_queue(queue, objects)
00912         else:
00913             return self.compile_seq(queue, objects)
00914 
00915     # Compile source files queue in sequential order
00916     def compile_seq(self, queue, objects):
00917         for item in queue:
00918             result = compile_worker(item)
00919 
00920             self.compiled += 1
00921             self.progress("compile", item['source'], build_update=True)
00922             for res in result['results']:
00923                 self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00924                 self.compile_output([
00925                     res['code'],
00926                     res['output'],
00927                     res['command']
00928                 ])
00929             objects.append(result['object'])
00930         return objects
00931 
00932     # Compile source files queue in parallel by creating pool of worker threads
00933     def compile_queue(self, queue, objects):
00934         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00935         p = Pool(processes=jobs_count)
00936 
00937         results = []
00938         for i in range(len(queue)):
00939             results.append(p.apply_async(compile_worker, [queue[i]]))
00940         p.close()
00941 
00942         itr = 0
00943         while len(results):
00944             itr += 1
00945             if itr > 180000:
00946                 p.terminate()
00947                 p.join()
00948                 raise ToolException("Compile did not finish in 5 minutes")
00949 
00950             sleep(0.01)
00951             pending = 0
00952             for r in results:
00953                 if r.ready():
00954                     try:
00955                         result = r.get()
00956                         results.remove(r)
00957 
00958                         self.compiled += 1
00959                         self.progress("compile", result['source'], build_update=True)
00960                         for res in result['results']:
00961                             self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00962                             self.compile_output([
00963                                 res['code'],
00964                                 res['output'],
00965                                 res['command']
00966                             ])
00967                         objects.append(result['object'])
00968                     except ToolException as err:
00969                         if p._taskqueue.queue:
00970                             p._taskqueue.queue.clear()
00971                             sleep(0.5)
00972                         p.terminate()
00973                         p.join()
00974                         raise ToolException(err)
00975                 else:
00976                     pending += 1
00977                     if pending >= jobs_count:
00978                         break
00979 
00980         results = None
00981         p.join()
00982 
00983         return objects
00984 
00985     # Determine the compile command based on type of source file
00986     def compile_command(self, source, object, includes):
00987         # Check dependencies
00988         _, ext = splitext(source)
00989         ext = ext.lower()
00990 
00991         if ext == '.c' or  ext == '.cpp':
00992             base, _ = splitext(object)
00993             dep_path = base + '.d'
00994             try:
00995                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
00996             except (IOError, IndexError):
00997                 deps = []
00998             config_file = ([self.config.app_config_location]
00999                            if self.config.app_config_location else [])
01000             deps.extend(config_file)
01001             if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01002                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
01003             else:
01004                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
01005             if len(deps) == 0 or self.need_update(object, deps):
01006                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01007                     return self.compile_cpp(source, object, includes)
01008                 else:
01009                     return self.compile_c(source, object, includes)
01010         elif ext == '.s':
01011             deps = [source]
01012             deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
01013             if self.need_update(object, deps):
01014                 return self.assemble(source, object, includes)
01015         else:
01016             return False
01017 
01018         return None
01019 
01020     def parse_dependencies(self, dep_path):
01021         """Parse the dependency information generated by the compiler.
01022 
01023         Positional arguments:
01024         dep_path -- the path to a file generated by a previous run of the compiler
01025 
01026         Return value:
01027         A list of all source files that the dependency file indicated were dependencies
01028 
01029         Side effects:
01030         None
01031 
01032         Note: A default implementation is provided for make-like file formats
01033         """
01034         dependencies = []
01035         buff = open(dep_path).readlines()
01036         if buff:
01037             buff[0] = re.sub('^(.*?)\: ', '', buff[0])
01038             for line in buff:
01039                 filename = line.replace('\\\n', '').strip()
01040                 if filename:
01041                     filename = filename.replace('\\ ', '\a')
01042                     dependencies.extend(((self.CHROOT if self.CHROOT else '') +
01043                                          f.replace('\a', ' '))
01044                                         for f in filename.split(" "))
01045         return list(filter(None, dependencies))
01046 
01047     def is_not_supported_error(self, output):
01048         return "#error directive: [NOT_SUPPORTED]" in output
01049 
01050     @abstractmethod
01051     def parse_output(self, output):
01052         """Take in compiler output and extract sinlge line warnings and errors from it.
01053 
01054         Positional arguments:
01055         output -- a string of all the messages emitted by a run of the compiler
01056 
01057         Return value:
01058         None
01059 
01060         Side effects:
01061         call self.cc_info or self.notify with a description of the event generated by the compiler
01062         """
01063         raise NotImplemented
01064 
01065     def compile_output(self, output=[]):
01066         _rc = output[0]
01067         _stderr = output[1].decode("utf-8")
01068         command = output[2]
01069 
01070         # Parse output for Warnings and Errors
01071         self.parse_output(_stderr)
01072         self.notify.debug("Return: %s"% _rc)
01073         for error_line in _stderr.splitlines():
01074             self.notify.debug("Output: %s"% error_line)
01075 
01076         # Check return code
01077         if _rc != 0:
01078             if self.is_not_supported_error(_stderr):
01079                 raise NotSupportedException(_stderr)
01080             else:
01081                 raise ToolException(_stderr)
01082 
01083     def build_library(self, objects, dir, name):
01084         needed_update = False
01085         lib = self.STD_LIB_NAME % name
01086         fout = join(dir, lib)
01087         if self.need_update(fout, objects):
01088             self.info("Library: %s" % lib)
01089             self.archive(objects, fout)
01090             needed_update = True
01091 
01092         return needed_update
01093 
01094     def link_program(self, r, tmp_path, name):
01095         needed_update = False
01096         ext = 'bin'
01097         if hasattr(self.target, 'OUTPUT_EXT'):
01098             ext = self.target.OUTPUT_EXT
01099 
01100         if hasattr(self.target, 'OUTPUT_NAMING'):
01101             self.notify.var("binary_naming", self.target.OUTPUT_NAMING)
01102             if self.target.OUTPUT_NAMING == "8.3":
01103                 name = name[0:8]
01104                 ext = ext[0:3]
01105 
01106         # Create destination directory
01107         head, tail =  split(name)
01108         new_path = join(tmp_path, head)
01109         mkdir(new_path)
01110 
01111         filename = name+'.'+ext
01112         # Absolute path of the final linked file
01113         full_path = join(tmp_path, filename)
01114         elf = join(tmp_path, name + '.elf')
01115         bin = None if ext == 'elf' else full_path
01116         map = join(tmp_path, name + '.map')
01117 
01118         r.objects = sorted(set(r.objects))
01119         config_file = ([self.config.app_config_location]
01120                        if self.config.app_config_location else [])
01121         dependencies = r.objects + r.libraries + [r.linker_script] + config_file
01122         dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
01123         if self.need_update(elf, dependencies):
01124             needed_update = True
01125             self.progress("link", name)
01126             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01127 
01128         if bin and self.need_update(bin, [elf]):
01129             needed_update = True
01130             self.progress("elf2bin", name)
01131             self.binary(r, elf, bin)
01132 
01133         # Initialize memap and process map file. This doesn't generate output.
01134         self.mem_stats(map)
01135 
01136         self.notify.var("compile_succeded", True)
01137         self.notify.var("binary", filename)
01138 
01139         return full_path, needed_update
01140 
01141     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01142     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01143     def default_cmd(self, command):
01144         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01145         self.notify.debug("Return: %s"% _rc)
01146 
01147         for output_line in _stdout.splitlines():
01148             self.notify.debug("Output: %s"% output_line)
01149         for error_line in _stderr.splitlines():
01150             self.notify.debug("Errors: %s"% error_line)
01151 
01152         if _rc != 0:
01153             for line in _stderr.splitlines():
01154                 self.notify.tool_error(line)
01155             raise ToolException(_stderr)
01156 
01157     def progress(self, action, file, build_update=False):
01158         if build_update:
01159             percent = 100. * float(self.compiled) / float(self.to_be_compiled)
01160         else:
01161             percent = None
01162         self.notify.progress(action, file, percent)
01163 
01164     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01165     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01166     def mem_stats(self, map):
01167         """! Creates parser object
01168         @param map Path to linker map file to parse and decode
01169         @return None
01170         """
01171         toolchain = self.__class__.__name__
01172 
01173         # Create memap object
01174         memap = MemapParser()
01175 
01176         # Parse and decode a map file
01177         if memap.parse(abspath(map), toolchain) is False:
01178             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01179             return None
01180 
01181         # Store the memap instance for later use
01182         self.memap_instance = memap
01183 
01184         # Note: memory statistics are not returned.
01185         # Need call to generate_output later (depends on depth & output format)
01186 
01187         return None
01188 
01189     def add_regions(self):
01190         """Add regions to the build profile, if there are any.
01191         """
01192         regions = list(self.config.regions)
01193         self.notify.info("Using regions %s in this build."
01194                          % ", ".join(region.name for region in regions))
01195         for region in regions:
01196             for define in [(region.name.upper() + "_ADDR", region.start),
01197                            (region.name.upper() + "_SIZE", region.size)]:
01198                 define_string = "-D%s=0x%x" %  define
01199                 self.cc.append(define_string)
01200                 self.cppc.append(define_string)
01201                 self.flags["common"].append(define_string)
01202             if region.active:
01203                 for define in [("MBED_APP_START", region.start),
01204                                ("MBED_APP_SIZE", region.size)]:
01205                     define_string = self.make_ld_define(*define)
01206                     self.ld.append(define_string)
01207                     self.flags["ld"].append(define_string)
01208             self.notify.info("  Region %s: size 0x%x, offset 0x%x"
01209                              % (region.name, region.size, region.start))
01210 
01211     # Set the configuration data
01212     def set_config_data(self, config_data):
01213         self.config_data = config_data
01214         if self.config.has_regions:
01215             self.add_regions()
01216 
01217     # Creates the configuration header if needed:
01218     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01219     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01220     # - if there is configuration data similar to the previous configuration data,
01221     #   "mbed_config.h" is left untouched.
01222     # - if there is new configuration data, "mbed_config.h" is overriden.
01223     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01224     # The "config_processed" variable (below) ensures this behaviour.
01225     # The function returns the location of the configuration file, or None if there is no
01226     # configuration data available (and thus no configuration file)
01227     def get_config_header(self):
01228         if self.config_processed: # this function was already called, return its result
01229             return self.config_file
01230         # The config file is located in the build directory
01231         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01232         # If the file exists, read its current content in prev_data
01233         if exists(self.config_file):
01234             with open(self.config_file, "r") as f:
01235                 prev_data = f.read()
01236         else:
01237             prev_data = None
01238         # Get the current configuration data
01239         crt_data = self.config.config_to_header(self.config_data) if self.config_data else None
01240         # "changed" indicates if a configuration change was detected
01241         changed = False
01242         if prev_data is not None: # a previous mbed_config.h exists
01243             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01244                 remove(self.config_file)
01245                 self.config_file = None # this means "config file not present"
01246                 changed = True
01247             elif crt_data != prev_data: # different content of config file
01248                 with open(self.config_file, "w") as f:
01249                     f.write(crt_data)
01250                 changed = True
01251         else: # a previous mbed_config.h does not exist
01252             if crt_data is not None: # there's configuration data available
01253                 with open(self.config_file, "w") as f:
01254                     f.write(crt_data)
01255                 changed = True
01256             else:
01257                 self.config_file = None # this means "config file not present"
01258         # If there was a change in configuration, rebuild everything
01259         self.build_all = changed
01260         # Make sure that this function will only return the location of the configuration
01261         # file for subsequent calls, without trying to manipulate its content in any way.
01262         self.config_processed = True
01263         return self.config_file
01264 
01265     def dump_build_profile(self):
01266         """Dump the current build profile and macros into the `.profile` file
01267         in the build directory"""
01268         for key in ["cxx", "c", "asm", "ld"]:
01269             to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
01270             if key in ["cxx", "c"]:
01271                 to_dump += str(self.flags['common'])
01272             where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
01273             self._overwrite_when_not_equal(where, to_dump)
01274 
01275     @staticmethod
01276     def _overwrite_when_not_equal(filename, content):
01277         if not exists(filename) or content != open(filename).read():
01278             with open(filename, "w") as out:
01279                 out.write(content)
01280 
01281     @staticmethod
01282     def generic_check_executable(tool_key, executable_name, levels_up,
01283                                  nested_dir=None):
01284         """
01285         Positional args:
01286         tool_key: the key to index TOOLCHAIN_PATHS
01287         executable_name: the toolchain's named executable (ex. armcc)
01288         levels_up: each toolchain joins the toolchain_path, some
01289         variable directories (bin, include), and the executable name,
01290         so the TOOLCHAIN_PATH value must be appropriately distanced
01291 
01292         Keyword args:
01293         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01294           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01295           that will be used by toolchain's compile)
01296 
01297         Returns True if the executable location specified by the user
01298         exists and is valid OR the executable can be found on the PATH.
01299         Returns False otherwise.
01300         """
01301         # Search PATH if user did not specify a path or specified path doesn't
01302         # exist.
01303         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01304             exe = find_executable(executable_name)
01305             if not exe:
01306                 return False
01307             for level in range(levels_up):
01308                 # move up the specified number of directories
01309                 exe = dirname(exe)
01310             TOOLCHAIN_PATHS[tool_key] = exe
01311         if nested_dir:
01312             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01313                           executable_name)
01314         else:
01315             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01316         # User could have specified a path that exists but does not contain exe
01317         return exists(subdir) or exists(subdir +'.exe')
01318 
01319     @abstractmethod
01320     def check_executable(self):
01321         """Returns True if the executable (armcc) location specified by the
01322          user exists OR the executable can be found on the PATH.
01323          Returns False otherwise."""
01324         raise NotImplemented
01325 
01326     @abstractmethod
01327     def get_config_option(self, config_header):
01328         """Generate the compiler option that forces the inclusion of the configuration
01329         header file.
01330 
01331         Positional arguments:
01332         config_header -- The configuration header that will be included within all source files
01333 
01334         Return value:
01335         A list of the command line arguments that will force the inclusion the specified header
01336 
01337         Side effects:
01338         None
01339         """
01340         raise NotImplemented
01341 
01342     @abstractmethod
01343     def get_compile_options(self, defines, includes, for_asm=False):
01344         """Generate the compiler options from the defines and includes
01345 
01346         Positional arguments:
01347         defines -- The preprocessor macros defined on the command line
01348         includes -- The include file search paths
01349 
01350         Keyword arguments:
01351         for_asm -- generate the assembler options instead of the compiler options
01352 
01353         Return value:
01354         A list of the command line arguments that will force the inclusion the specified header
01355 
01356         Side effects:
01357         None
01358         """
01359         raise NotImplemented
01360 
01361     @abstractmethod
01362     def assemble(self, source, object, includes):
01363         """Generate the command line that assembles.
01364 
01365         Positional arguments:
01366         source -- a file path that is the file to assemble
01367         object -- a file path that is the destination object
01368         includes -- a list of all directories where header files may be found
01369 
01370         Return value:
01371         The complete command line, as a list, that would invoke the assembler
01372         on the source file, include all the include paths, and generate
01373         the specified object file.
01374 
01375         Side effects:
01376         None
01377 
01378         Note:
01379         This method should be decorated with @hook_tool.
01380         """
01381         raise NotImplemented
01382 
01383     @abstractmethod
01384     def compile_c(self, source, object, includes):
01385         """Generate the command line that compiles a C source file.
01386 
01387         Positional arguments:
01388         source -- the C source file to compile
01389         object -- the destination object file
01390         includes -- a list of all the directories where header files may be found
01391 
01392         Return value:
01393         The complete command line, as a list, that would invoke the C compiler
01394         on the source file, include all the include paths, and generate the
01395         specified object file.
01396 
01397         Side effects:
01398         None
01399 
01400         Note:
01401         This method should be decorated with @hook_tool.
01402         """
01403         raise NotImplemented
01404 
01405     @abstractmethod
01406     def compile_cpp(self, source, object, includes):
01407         """Generate the command line that compiles a C++ source file.
01408 
01409         Positional arguments:
01410         source -- the C++ source file to compile
01411         object -- the destination object file
01412         includes -- a list of all the directories where header files may be found
01413 
01414         Return value:
01415         The complete command line, as a list, that would invoke the C++ compiler
01416         on the source file, include all the include paths, and generate the
01417         specified object file.
01418 
01419         Side effects:
01420         None
01421 
01422         Note:
01423         This method should be decorated with @hook_tool.
01424         """
01425         raise NotImplemented
01426 
01427     @abstractmethod
01428     def link(self, output, objects, libraries, lib_dirs, mem_map):
01429         """Run the linker to create an executable and memory map.
01430 
01431         Positional arguments:
01432         output -- the file name to place the executable in
01433         objects -- all of the object files to link
01434         libraries -- all of the required libraries
01435         lib_dirs -- where the required libraries are located
01436         mem_map -- the location where the memory map file should be stored
01437 
01438         Return value:
01439         None
01440 
01441         Side effect:
01442         Runs the linker to produce the executable.
01443 
01444         Note:
01445         This method should be decorated with @hook_tool.
01446         """
01447         raise NotImplemented
01448 
01449     @abstractmethod
01450     def archive(self, objects, lib_path):
01451         """Run the command line that creates an archive.
01452 
01453         Positional arguhments:
01454         objects -- a list of all the object files that should be archived
01455         lib_path -- the file name of the resulting library file
01456 
01457         Return value:
01458         None
01459 
01460         Side effect:
01461         Runs the archiving tool to produce the library file.
01462 
01463         Note:
01464         This method should be decorated with @hook_tool.
01465         """
01466         raise NotImplemented
01467 
01468     @abstractmethod
01469     def binary(self, resources, elf, bin):
01470         """Run the command line that will Extract a simplified binary file.
01471 
01472         Positional arguments:
01473         resources -- A resources object (Is not used in any of the toolchains)
01474         elf -- the executable file that is to be converted
01475         bin -- the file name of the to be created simplified binary file
01476 
01477         Return value:
01478         None
01479 
01480         Side effect:
01481         Runs the elf2bin tool to produce the simplified binary file.
01482 
01483         Note:
01484         This method should be decorated with @hook_tool.
01485         """
01486         raise NotImplemented
01487 
01488     @staticmethod
01489     @abstractmethod
01490     def name_mangle(name):
01491         """Mangle a name based on the conventional name mangling of this toolchain
01492 
01493         Positional arguments:
01494         name -- the name to mangle
01495 
01496         Return:
01497         the mangled name as a string
01498         """
01499         raise NotImplemented
01500 
01501     @staticmethod
01502     @abstractmethod
01503     def make_ld_define(name, value):
01504         """Create an argument to the linker that would define a symbol
01505 
01506         Positional arguments:
01507         name -- the symbol to define
01508         value -- the value to give the symbol
01509 
01510         Return:
01511         The linker flag as a string
01512         """
01513         raise NotImplemented
01514 
01515     @staticmethod
01516     @abstractmethod
01517     def redirect_symbol(source, sync, build_dir):
01518         """Redirect a symbol at link time to point at somewhere else
01519 
01520         Positional arguments:
01521         source -- the symbol doing the pointing
01522         sync -- the symbol being pointed to
01523         build_dir -- the directory to put "response files" if needed by the toolchain
01524 
01525         Side Effects:
01526         Possibly create a file in the build directory
01527 
01528         Return:
01529         The linker flag to redirect the symbol, as a string
01530         """
01531         raise NotImplemented
01532 
01533     # Return the list of macros geenrated by the build system
01534     def get_config_macros(self):
01535         return self.config.config_to_macros(self.config_data) if self.config_data else []
01536 
01537     @property
01538     def report(self):
01539         to_ret = {}
01540         to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
01541                                 'symbols': self.get_symbols()}
01542         to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
01543                                   'symbols': self.get_symbols()}
01544         to_ret['assembler'] = {'flags': copy(self.flags['asm']),
01545                                'symbols': self.get_symbols(True)}
01546         to_ret['linker'] = {'flags': copy(self.flags['ld'])}
01547         to_ret.update(self.config.report)
01548         return to_ret
01549 
01550 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH
01551 
01552 TOOLCHAIN_PATHS = {
01553     'ARM': ARM_PATH,
01554     'uARM': ARM_PATH,
01555     'ARMC6': ARMC6_PATH,
01556     'GCC_ARM': GCC_ARM_PATH,
01557     'IAR': IAR_PATH
01558 }
01559 
01560 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
01561 from tools.toolchains.gcc import GCC_ARM
01562 from tools.toolchains.iar import IAR
01563 
01564 TOOLCHAIN_CLASSES = {
01565     u'ARM': ARM_STD,
01566     u'uARM': ARM_MICRO,
01567     u'ARMC6': ARMC6,
01568     u'GCC_ARM': GCC_ARM,
01569     u'IAR': IAR
01570 }
01571 
01572 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())