Daiki Kato / mbed-os-lychee

Dependents:   mbed-os-example-blinky-gr-lychee GR-Boads_Camera_sample GR-Boards_Audio_Recoder GR-Boads_Camera_DisplayApp ... more

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 
00018 import re
00019 import sys
00020 from os import stat, walk, getcwd, sep, remove
00021 from copy import copy
00022 from time import time, sleep
00023 from types import ListType
00024 from shutil import copyfile
00025 from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir
00026 from inspect import getmro
00027 from copy import deepcopy
00028 from tools.config import Config
00029 from abc import ABCMeta, abstractmethod
00030 from distutils.spawn import find_executable
00031 
00032 from multiprocessing import Pool, cpu_count
00033 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
00034 from tools.settings import MBED_ORG_USER
00035 import tools.hooks as hooks
00036 from tools.memap import MemapParser
00037 from hashlib import md5
00038 import fnmatch
00039 
00040 
00041 #Disables multiprocessing if set to higher number than the host machine CPUs
00042 CPU_COUNT_MIN = 1
00043 CPU_COEF = 1
00044 
00045 class Resources:
00046     def __init__(self, base_path=None):
00047         self.base_path = base_path
00048 
00049         self.file_basepath = {}
00050 
00051         self.inc_dirs = []
00052         self.headers = []
00053 
00054         self.s_sources = []
00055         self.c_sources = []
00056         self.cpp_sources = []
00057 
00058         self.lib_dirs = set([])
00059         self.objects = []
00060         self.libraries = []
00061 
00062         # mbed special files
00063         self.lib_builds = []
00064         self.lib_refs = []
00065 
00066         self.repo_dirs = []
00067         self.repo_files = []
00068 
00069         self.linker_script = None
00070 
00071         # Other files
00072         self.hex_files = []
00073         self.bin_files = []
00074         self.json_files = []
00075 
00076         # Features
00077         self.features = {}
00078 
00079     def __add__(self, resources):
00080         if resources is None:
00081             return self
00082         else:
00083             return self.add(resources)
00084 
00085     def __radd__(self, resources):
00086         if resources is None:
00087             return self
00088         else:
00089             return self.add(resources)
00090 
00091     def add(self, resources):
00092         for f,p in resources.file_basepath.items():
00093             self.file_basepath[f] = p
00094 
00095         self.inc_dirs += resources.inc_dirs
00096         self.headers += resources.headers
00097 
00098         self.s_sources += resources.s_sources
00099         self.c_sources += resources.c_sources
00100         self.cpp_sources += resources.cpp_sources
00101 
00102         self.lib_dirs |= resources.lib_dirs
00103         self.objects += resources.objects
00104         self.libraries += resources.libraries
00105 
00106         self.lib_builds += resources.lib_builds
00107         self.lib_refs += resources.lib_refs
00108 
00109         self.repo_dirs += resources.repo_dirs
00110         self.repo_files += resources.repo_files
00111 
00112         if resources.linker_script is not None:
00113             self.linker_script = resources.linker_script
00114 
00115         self.hex_files += resources.hex_files
00116         self.bin_files += resources.bin_files
00117         self.json_files += resources.json_files
00118 
00119         self.features.update(resources.features)
00120 
00121         return self
00122 
00123     def _collect_duplicates(self, dupe_dict, dupe_headers):
00124         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00125             objname, _ = splitext(basename(filename))
00126             dupe_dict.setdefault(objname, set())
00127             dupe_dict[objname] |= set([filename])
00128         for filename in self.headers:
00129             headername = basename(filename)
00130             dupe_headers.setdefault(headername, set())
00131             dupe_headers[headername] |= set([headername])
00132         for res in self.features.values():
00133             res._collect_duplicates(dupe_dict, dupe_headers)
00134         return dupe_dict, dupe_headers
00135 
00136     def detect_duplicates(self, toolchain):
00137         """Detect all potential ambiguities in filenames and report them with
00138         a toolchain notification
00139 
00140         Positional Arguments:
00141         toolchain - used for notifications
00142         """
00143         count = 0
00144         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00145         for objname, filenames in dupe_dict.iteritems():
00146             if len(filenames) > 1:
00147                 count+=1
00148                 toolchain.tool_error(
00149                     "Object file %s.o is not unique! It could be made from: %s"\
00150                     % (objname, " ".join(filenames)))
00151         for headername, locations in dupe_headers.iteritems():
00152             if len(locations) > 1:
00153                 count+=1
00154                 toolchain.tool_error(
00155                     "Header file %s is not unique! It could be: %s" %\
00156                     (headername, " ".join(locations)))
00157         return count
00158 
00159 
00160     def relative_to(self, base, dot=False):
00161         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00162                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00163                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00164                       'hex_files', 'bin_files', 'json_files']:
00165             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00166             setattr(self, field, v)
00167 
00168         self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f}
00169 
00170         if self.linker_script is not None:
00171             self.linker_script = rel_path(self.linker_script, base, dot)
00172 
00173     def win_to_unix(self):
00174         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00175                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00176                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00177                       'hex_files', 'bin_files', 'json_files']:
00178             v = [f.replace('\\', '/') for f in getattr(self, field)]
00179             setattr(self, field, v)
00180 
00181         self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f}
00182 
00183         if self.linker_script is not None:
00184             self.linker_script = self.linker_script.replace('\\', '/')
00185 
00186     def __str__(self):
00187         s = []
00188 
00189         for (label, resources) in (
00190                 ('Include Directories', self.inc_dirs),
00191                 ('Headers', self.headers),
00192 
00193                 ('Assembly sources', self.s_sources),
00194                 ('C sources', self.c_sources),
00195                 ('C++ sources', self.cpp_sources),
00196 
00197                 ('Library directories', self.lib_dirs),
00198                 ('Objects', self.objects),
00199                 ('Libraries', self.libraries),
00200 
00201                 ('Hex files', self.hex_files),
00202                 ('Bin files', self.bin_files),
00203 
00204                 ('Features', self.features),
00205             ):
00206             if resources:
00207                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00208 
00209         if self.linker_script:
00210             s.append('Linker Script: ' + self.linker_script)
00211 
00212         return '\n'.join(s)
00213 
00214 # Support legacy build conventions: the original mbed build system did not have
00215 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00216 # had the knowledge of a list of these directories to be ignored.
00217 LEGACY_IGNORE_DIRS = set([
00218     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00219     'ARM', 'uARM', 'IAR',
00220     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00221 ])
00222 LEGACY_TOOLCHAIN_NAMES = {
00223     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00224     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00225     'IAR': 'IAR',
00226 }
00227 
00228 
00229 class mbedToolchain:
00230     # Verbose logging
00231     VERBOSE = True
00232 
00233     # Compile C files as CPP
00234     COMPILE_C_AS_CPP = False
00235 
00236     # Response files for compiling, includes, linking and archiving.
00237     # Not needed on posix systems where the typical arg limit is 2 megabytes
00238     RESPONSE_FILES = True
00239 
00240     CORTEX_SYMBOLS = {
00241         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00242         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00243         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00244         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00245         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00246         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00247         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00248         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00249         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00250         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00251     }
00252 
00253     MBED_CONFIG_FILE_NAME="mbed_config.h"
00254 
00255     __metaclass__ = ABCMeta
00256 
00257     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00258 
00259     def __init__(self, target, notify=None, macros=None, silent=False,
00260                  extra_verbose=False, build_profile=None, build_dir=None):
00261         self.target = target
00262         self.name = self.__class__.__name__
00263 
00264         # compile/assemble/link/binary hooks
00265         self.hook = hooks.Hook(target, self)
00266 
00267         # Toolchain flags
00268         self.flags = deepcopy(build_profile or self.profile_template)
00269 
00270         # System libraries provided by the toolchain
00271         self.sys_libs = []
00272 
00273         # User-defined macros
00274         self.macros = macros or []
00275 
00276         # Macros generated from toolchain and target rules/features
00277         self.asm_symbols = None
00278         self.cxx_symbols = None
00279 
00280         # Labels generated from toolchain and target rules/features (used for selective build)
00281         self.labels = None
00282 
00283         # This will hold the initialized config object
00284         self.config = None
00285 
00286         # This will hold the configuration data (as returned by Config.get_config_data())
00287         self.config_data = None
00288 
00289         # This will hold the location of the configuration file or None if there's no configuration available
00290         self.config_file = None
00291 
00292         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00293         self.config_processed = False
00294 
00295         # Non-incremental compile
00296         self.build_all = False
00297 
00298         # Build output dir
00299         self.build_dir = build_dir
00300         self.timestamp = time()
00301 
00302         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00303         self.jobs = 0
00304 
00305         # Ignore patterns from .mbedignore files
00306         self.ignore_patterns = []
00307 
00308         # Pre-mbed 2.0 ignore dirs
00309         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00310 
00311         # Output notify function
00312         # This function is passed all events, and expected to handle notification of the
00313         # user, emit the events to a log, etc.
00314         # The API for all notify methods passed into the notify parameter is as follows:
00315         # def notify(Event, Silent)
00316         # Where *Event* is a dict representing the toolchain event that was generated
00317         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00318         #                  or an application was linked
00319         #       *Silent* is a boolean
00320         if notify:
00321             self.notify_fun = notify
00322         elif extra_verbose:
00323             self.notify_fun = self.print_notify_verbose
00324         else:
00325             self.notify_fun = self.print_notify
00326 
00327         # Silent builds (no output)
00328         self.silent = silent
00329 
00330         # Print output buffer
00331         self.output = str()
00332         self.map_outputs = list()   # Place to store memmap scan results in JSON like data structures
00333 
00334         # uVisor spepcific rules
00335         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00336             self.target.core = re.sub(r"F$", '', self.target.core)
00337 
00338         # Stats cache is used to reduce the amount of IO requests to stat
00339         # header files during dependency change. See need_update()
00340         self.stat_cache = {}
00341 
00342         # Used by the mbed Online Build System to build in chrooted environment
00343         self.CHROOT = None
00344 
00345         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00346         self.init()
00347 
00348     # Used for post __init__() hooks
00349     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00350     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00351     def init(self):
00352         return True
00353 
00354     def get_output(self):
00355         return self.output
00356 
00357     def print_notify(self, event, silent=False):
00358         """ Default command line notification
00359         """
00360         msg = None
00361 
00362         if not self.VERBOSE and event['type'] == 'tool_error':
00363             msg = event['message']
00364 
00365         elif event['type'] in ['info', 'debug']:
00366             msg = event['message']
00367 
00368         elif event['type'] == 'cc':
00369             event['severity'] = event['severity'].title()
00370             event['file'] = basename(event['file'])
00371             msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00372 
00373         elif event['type'] == 'progress':
00374             if 'percent' in event:
00375                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00376                                                   event['percent'],
00377                                                   basename(event['file']))
00378             else:
00379                 msg = '{}: {}'.format(event['action'].title(),
00380                                       basename(event['file']))
00381 
00382         if msg:
00383             if not silent:
00384                 print msg
00385             self.output += msg + "\n"
00386 
00387     def print_notify_verbose(self, event, silent=False):
00388         """ Default command line notification with more verbose mode
00389         """
00390         if event['type'] in ['info', 'debug']:
00391             self.print_notify(event, silent=silent) # standard handle
00392 
00393         elif event['type'] == 'cc':
00394             event['severity'] = event['severity'].title()
00395             event['file'] = basename(event['file'])
00396             event['mcu_name'] = "None"
00397             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00398             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00399             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00400             if not silent:
00401                 print msg
00402             self.output += msg + "\n"
00403 
00404         elif event['type'] == 'progress':
00405             self.print_notify(event) # standard handle
00406 
00407     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00408     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00409     def notify(self, event):
00410         """ Little closure for notify functions
00411         """
00412         event['toolchain'] = self
00413         return self.notify_fun(event, self.silent)
00414 
00415     def get_symbols(self, for_asm=False):
00416         if for_asm:
00417             if self.asm_symbols is None:
00418                 self.asm_symbols = []
00419 
00420                 # Cortex CPU symbols
00421                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00422                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00423 
00424                 # Add target's symbols
00425                 self.asm_symbols += self.target.macros
00426                 # Add extra symbols passed via 'macros' parameter
00427                 self.asm_symbols += self.macros
00428             return list(set(self.asm_symbols))  # Return only unique symbols
00429         else:
00430             if self.cxx_symbols is None:
00431                 # Target and Toolchain symbols
00432                 labels = self.get_labels()
00433                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00434                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00435 
00436                 # Cortex CPU symbols
00437                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00438                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00439 
00440                 # Symbols defined by the on-line build.system
00441                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00442                 if MBED_ORG_USER:
00443                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00444 
00445                 # Add target's symbols
00446                 self.cxx_symbols += self.target.macros
00447                 # Add target's hardware
00448                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00449                 # Add target's features
00450                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00451                 # Add extra symbols passed via 'macros' parameter
00452                 self.cxx_symbols += self.macros
00453 
00454                 # Form factor variables
00455                 if hasattr(self.target, 'supported_form_factors'):
00456                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00457 
00458             return list(set(self.cxx_symbols))  # Return only unique symbols
00459 
00460     # Extend the internal list of macros
00461     def add_macros(self, new_macros):
00462         self.macros.extend(new_macros)
00463 
00464     def get_labels(self):
00465         if self.labels is None:
00466             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00467             toolchain_labels.remove('mbedToolchain')
00468             self.labels = {
00469                 'TARGET': self.target.labels,
00470                 'FEATURE': self.target.features,
00471                 'TOOLCHAIN': toolchain_labels
00472             }
00473 
00474             # This is a policy decision and it should /really/ be in the config system
00475             # ATM it's here for backward compatibility
00476             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00477                  "-O0") in self.flags['common'] or
00478                 ("-r" in self.flags['common'] and
00479                  "-On" in self.flags['common'])):
00480                 self.labels['TARGET'].append("DEBUG")
00481             else:
00482                 self.labels['TARGET'].append("RELEASE")
00483         return self.labels
00484 
00485 
00486     # Determine whether a source file needs updating/compiling
00487     def need_update(self, target, dependencies):
00488         if self.build_all:
00489             return True
00490 
00491         if not exists(target):
00492             return True
00493 
00494         target_mod_time = stat(target).st_mtime
00495 
00496         for d in dependencies:
00497             # Some objects are not provided with full path and here we do not have
00498             # information about the library paths. Safe option: assume an update
00499             if not d or not exists(d):
00500                 return True
00501             
00502             if not self.stat_cache.has_key(d):
00503                 self.stat_cache[d] = stat(d).st_mtime
00504 
00505             if self.stat_cache[d] >= target_mod_time:
00506                 return True
00507         
00508         return False
00509 
00510     def is_ignored(self, file_path):
00511         """Check if file path is ignored by any .mbedignore thus far"""
00512         for pattern in self.ignore_patterns:
00513             if fnmatch.fnmatch(file_path, pattern):
00514                 return True
00515         return False
00516 
00517     def add_ignore_patterns(self, root, base_path, patterns):
00518         """Add a series of patterns to the ignored paths
00519 
00520         Positional arguments:
00521         root - the directory containing the ignore file
00522         base_path - the location that the scan started from
00523         patterns - the list of patterns we will ignore in the future
00524         """
00525         real_base = relpath(root, base_path)
00526         if real_base == ".":
00527             self.ignore_patterns.extend(patterns)
00528         else:
00529             self.ignore_patterns.extend(join(real_base, pat) for pat in patterns)
00530 
00531     # Create a Resources object from the path pointed to by *path* by either traversing a
00532     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00533     # when *path* is a file.
00534     # The parameter *base_path* is used to set the base_path attribute of the Resources
00535     # object and the parameter *exclude_paths* is used by the directory traversal to
00536     # exclude certain paths from the traversal.
00537     def scan_resources(self, path, exclude_paths=None, base_path=None):
00538         self.progress("scan", path)
00539 
00540         resources = Resources(path)
00541         if not base_path:
00542             if isfile(path):
00543                 base_path = dirname(path)
00544             else:
00545                 base_path = path
00546         resources.base_path = base_path
00547 
00548         if isfile(path):
00549             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00550         else:
00551             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00552         return resources
00553 
00554     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00555     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00556     # on every file it considers adding to the resources object.
00557     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00558         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00559         When topdown is True, the caller can modify the dirnames list in-place
00560         (perhaps using del or slice assignment), and walk() will only recurse into
00561         the subdirectories whose names remain in dirnames; this can be used to prune
00562         the search, impose a specific order of visiting, or even to inform walk()
00563         about directories the caller creates or renames before it resumes walk()
00564         again. Modifying dirnames when topdown is False is ineffective, because in
00565         bottom-up mode the directories in dirnames are generated before dirpath
00566         itself is generated.
00567         """
00568         labels = self.get_labels()
00569         for root, dirs, files in walk(path, followlinks=True):
00570             # Check if folder contains .mbedignore
00571             if ".mbedignore" in files:
00572                 with open (join(root,".mbedignore"), "r") as f:
00573                     lines=f.readlines()
00574                     lines = [l.strip() for l in lines] # Strip whitespaces
00575                     lines = [l for l in lines if l != ""] # Strip empty lines
00576                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00577                     # Append root path to glob patterns and append patterns to ignore_patterns
00578                     self.add_ignore_patterns(root, base_path, lines)
00579 
00580             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00581             if (self.is_ignored(join(relpath(root, base_path),"")) or
00582                 self.build_dir == join(relpath(root, base_path))):
00583                 dirs[:] = []
00584                 continue
00585 
00586             for d in copy(dirs):
00587                 dir_path = join(root, d)
00588                 # Add internal repo folders/files. This is needed for exporters
00589                 if d == '.hg' or d == '.git':
00590                     resources.repo_dirs.append(dir_path)
00591 
00592                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00593                     # Ignore targets that do not match the TARGET in extra_labels list
00594                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00595                     # Ignore toolchain that do not match the current TOOLCHAIN
00596                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00597                     # Ignore .mbedignore files
00598                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00599                     # Ignore TESTS dir
00600                     (d == 'TESTS')):
00601                         dirs.remove(d)
00602                 elif d.startswith('FEATURE_'):
00603                     # Recursively scan features but ignore them in the current scan.
00604                     # These are dynamically added by the config system if the conditions are matched
00605                     resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path)
00606                     dirs.remove(d)
00607                 elif exclude_paths:
00608                     for exclude_path in exclude_paths:
00609                         rel_path = relpath(dir_path, exclude_path)
00610                         if not (rel_path.startswith('..')):
00611                             dirs.remove(d)
00612                             break
00613 
00614             # Add root to include paths
00615             resources.inc_dirs.append(root)
00616             resources.file_basepath[root] = base_path
00617 
00618             for file in files:
00619                 file_path = join(root, file)
00620                 self._add_file(file_path, resources, base_path)
00621 
00622     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00623     # (*file_path*) to the resources object based on the file type.
00624     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00625         resources.file_basepath[file_path] = base_path
00626 
00627         if self.is_ignored(relpath(file_path, base_path)):
00628             return
00629 
00630         _, ext = splitext(file_path)
00631         ext = ext.lower()
00632 
00633         if   ext == '.s':
00634             resources.s_sources.append(file_path)
00635 
00636         elif ext == '.c':
00637             resources.c_sources.append(file_path)
00638 
00639         elif ext == '.cpp':
00640             resources.cpp_sources.append(file_path)
00641 
00642         elif ext == '.h' or ext == '.hpp':
00643             resources.headers.append(file_path)
00644 
00645         elif ext == '.o':
00646             resources.objects.append(file_path)
00647 
00648         elif ext == self.LIBRARY_EXT:
00649             resources.libraries.append(file_path)
00650             resources.lib_dirs.add(dirname(file_path))
00651 
00652         elif ext == self.LINKER_EXT:
00653             if resources.linker_script is not None:
00654                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00655             resources.linker_script = file_path
00656 
00657         elif ext == '.lib':
00658             resources.lib_refs.append(file_path)
00659 
00660         elif ext == '.bld':
00661             resources.lib_builds.append(file_path)
00662 
00663         elif basename(file_path) == '.hgignore':
00664             resources.repo_files.append(file_path)
00665 
00666         elif basename(file_path) == '.gitignore':
00667             resources.repo_files.append(file_path)
00668 
00669         elif ext == '.hex':
00670             resources.hex_files.append(file_path)
00671 
00672         elif ext == '.bin':
00673             resources.bin_files.append(file_path)
00674 
00675         elif ext == '.json':
00676             resources.json_files.append(file_path)
00677 
00678 
00679     def scan_repository(self, path):
00680         resources = []
00681 
00682         for root, dirs, files in walk(path):
00683             # Remove ignored directories
00684             for d in copy(dirs):
00685                 if d == '.' or d == '..':
00686                     dirs.remove(d)
00687 
00688             for file in files:
00689                 file_path = join(root, file)
00690                 resources.append(file_path)
00691 
00692         return resources
00693 
00694     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00695         # Handle a single file
00696         if type(files_paths) != ListType: files_paths = [files_paths]
00697 
00698         for source in files_paths:
00699             if source is None:
00700                 files_paths.remove(source)
00701 
00702         for source in files_paths:
00703             if resources is not None and resources.file_basepath.has_key(source):
00704                 relative_path = relpath(source, resources.file_basepath[source])
00705             elif rel_path is not None:
00706                 relative_path = relpath(source, rel_path)
00707             else:
00708                 _, relative_path = split(source)
00709 
00710             target = join(trg_path, relative_path)
00711 
00712             if (target != source) and (self.need_update(target, [source])):
00713                 self.progress("copy", relative_path)
00714                 mkdir(dirname(target))
00715                 copyfile(source, target)
00716 
00717     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00718     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00719     def relative_object_path(self, build_path, base_dir, source):
00720         source_dir, name, _ = split_path(source)
00721 
00722         obj_dir = join(build_path, relpath(source_dir, base_dir))
00723         if obj_dir is not self.prev_dir:
00724             self.prev_dir = obj_dir
00725             mkdir(obj_dir)
00726         return join(obj_dir, name + '.o')
00727 
00728     # Generate response file for all includes.
00729     # ARM, GCC, IAR cross compatible
00730     def get_inc_file(self, includes):
00731         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00732         if not exists(include_file):
00733             with open(include_file, "wb") as f:
00734                 cmd_list = []
00735                 for c in includes:
00736                     if c:
00737                         c = c.replace("\\", "/")
00738                         if self.CHROOT:
00739                             c = c.replace(self.CHROOT, '')
00740                         cmd_list.append('-I%s' % c)
00741                 string = " ".join(cmd_list)
00742                 f.write(string)
00743         return include_file
00744 
00745     # Generate response file for all objects when linking.
00746     # ARM, GCC, IAR cross compatible
00747     def get_link_file(self, cmd):
00748         link_file = join(self.build_dir, ".link_files.txt")
00749         with open(link_file, "wb") as f:
00750             cmd_list = []
00751             for c in cmd:
00752                 if c:
00753                     c = c.replace("\\", "/")
00754                     if self.CHROOT:
00755                         c = c.replace(self.CHROOT, '')
00756                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00757             string = " ".join(cmd_list)
00758             f.write(string)
00759         return link_file
00760  
00761     # Generate response file for all objects when archiving.
00762     # ARM, GCC, IAR cross compatible
00763     def get_arch_file(self, objects):
00764         archive_file = join(self.build_dir, ".archive_files.txt")
00765         with open(archive_file, "wb") as f:
00766             o_list = []
00767             for o in objects:
00768                 o_list.append('"%s"' % o)
00769             string = " ".join(o_list).replace("\\", "/")
00770             f.write(string)
00771         return archive_file
00772 
00773     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00774     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00775     def compile_sources(self, resources, inc_dirs=None):
00776         # Web IDE progress bar for project build
00777         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00778         self.to_be_compiled = len(files_to_compile)
00779         self.compiled = 0
00780 
00781         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00782 
00783         inc_paths = resources.inc_dirs
00784         if inc_dirs is not None:
00785             inc_paths.extend(inc_dirs)
00786         # De-duplicate include paths
00787         inc_paths = set(inc_paths)
00788         # Sort include paths for consistency
00789         inc_paths = sorted(set(inc_paths))
00790         # Unique id of all include paths
00791         self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
00792 
00793         objects = []
00794         queue = []
00795         work_dir = getcwd()
00796         self.prev_dir = None
00797 
00798         # Generate configuration header (this will update self.build_all if needed)
00799         self.get_config_header()
00800 
00801         # Sort compile queue for consistency
00802         files_to_compile.sort()
00803         for source in files_to_compile:
00804             object = self.relative_object_path(
00805                 self.build_dir, resources.file_basepath[source], source)
00806 
00807             # Queue mode (multiprocessing)
00808             commands = self.compile_command(source, object, inc_paths)
00809             if commands is not None:
00810                 queue.append({
00811                     'source': source,
00812                     'object': object,
00813                     'commands': commands,
00814                     'work_dir': work_dir,
00815                     'chroot': self.CHROOT
00816                 })
00817             else:
00818                 self.compiled += 1
00819                 objects.append(object)
00820 
00821         # Use queues/multiprocessing if cpu count is higher than setting
00822         jobs = self.jobs if self.jobs else cpu_count()
00823         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00824             return self.compile_queue(queue, objects)
00825         else:
00826             return self.compile_seq(queue, objects)
00827 
00828     # Compile source files queue in sequential order
00829     def compile_seq(self, queue, objects):
00830         for item in queue:
00831             result = compile_worker(item)
00832 
00833             self.compiled += 1
00834             self.progress("compile", item['source'], build_update=True)
00835             for res in result['results']:
00836                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00837                 self.compile_output([
00838                     res['code'],
00839                     res['output'],
00840                     res['command']
00841                 ])
00842             objects.append(result['object'])
00843         return objects
00844 
00845     # Compile source files queue in parallel by creating pool of worker threads
00846     def compile_queue(self, queue, objects):
00847         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00848         p = Pool(processes=jobs_count)
00849 
00850         results = []
00851         for i in range(len(queue)):
00852             results.append(p.apply_async(compile_worker, [queue[i]]))
00853         p.close()
00854 
00855         itr = 0
00856         while len(results):
00857             itr += 1
00858             if itr > 180000:
00859                 p.terminate()
00860                 p.join()
00861                 raise ToolException("Compile did not finish in 5 minutes")
00862 
00863             sleep(0.01)
00864             pending = 0
00865             for r in results:
00866                 if r._ready is True:
00867                     try:
00868                         result = r.get()
00869                         results.remove(r)
00870 
00871                         self.compiled += 1
00872                         self.progress("compile", result['source'], build_update=True)
00873                         for res in result['results']:
00874                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00875                             self.compile_output([
00876                                 res['code'],
00877                                 res['output'],
00878                                 res['command']
00879                             ])
00880                         objects.append(result['object'])
00881                     except ToolException, err:
00882                         if p._taskqueue.queue:
00883                             p._taskqueue.queue.clear()
00884                             sleep(0.5)
00885                         p.terminate()
00886                         p.join()
00887                         raise ToolException(err)
00888                 else:
00889                     pending += 1
00890                     if pending >= jobs_count:
00891                         break
00892 
00893         results = None
00894         p.join()
00895 
00896         return objects
00897 
00898     # Determine the compile command based on type of source file
00899     def compile_command(self, source, object, includes):
00900         # Check dependencies
00901         _, ext = splitext(source)
00902         ext = ext.lower()
00903 
00904         if ext == '.c' or  ext == '.cpp':
00905             base, _ = splitext(object)
00906             dep_path = base + '.d'
00907             try:
00908                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
00909             except IOError, IndexError:
00910                 deps = []
00911             if len(deps) == 0 or self.need_update(object, deps):
00912                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
00913                     return self.compile_cpp(source, object, includes)
00914                 else:
00915                     return self.compile_c(source, object, includes)
00916         elif ext == '.s':
00917             deps = [source]
00918             if self.need_update(object, deps):
00919                 return self.assemble(source, object, includes)
00920         else:
00921             return False
00922 
00923         return None
00924 
00925     @abstractmethod
00926     def parse_dependencies(self, dep_path):
00927         """Parse the dependency information generated by the compiler.
00928 
00929         Positional arguments:
00930         dep_path -- the path to a file generated by a previous run of the compiler
00931 
00932         Return value:
00933         A list of all source files that the dependency file indicated were dependencies
00934 
00935         Side effects:
00936         None
00937         """
00938         raise NotImplemented
00939 
00940     def is_not_supported_error(self, output):
00941         return "#error directive: [NOT_SUPPORTED]" in output
00942 
00943     @abstractmethod
00944     def parse_output(self, output):
00945         """Take in compiler output and extract sinlge line warnings and errors from it.
00946 
00947         Positional arguments:
00948         output -- a string of all the messages emitted by a run of the compiler
00949 
00950         Return value:
00951         None
00952 
00953         Side effects:
00954         call self.cc_info or self.notify with a description of the event generated by the compiler
00955         """
00956         raise NotImplemented
00957 
00958     def compile_output(self, output=[]):
00959         _rc = output[0]
00960         _stderr = output[1]
00961         command = output[2]
00962 
00963         # Parse output for Warnings and Errors
00964         self.parse_output(_stderr)
00965         self.debug("Return: %s"% _rc)
00966         for error_line in _stderr.splitlines():
00967             self.debug("Output: %s"% error_line)
00968 
00969         # Check return code
00970         if _rc != 0:
00971             if self.is_not_supported_error(_stderr):
00972                 raise NotSupportedException(_stderr)
00973             else:
00974                 raise ToolException(_stderr)
00975 
00976     def build_library(self, objects, dir, name):
00977         needed_update = False
00978         lib = self.STD_LIB_NAME % name
00979         fout = join(dir, lib)
00980         if self.need_update(fout, objects):
00981             self.info("Library: %s" % lib)
00982             self.archive(objects, fout)
00983             needed_update = True
00984 
00985         return needed_update
00986 
00987     def link_program(self, r, tmp_path, name):
00988         needed_update = False
00989         ext = 'bin'
00990         if hasattr(self.target, 'OUTPUT_EXT'):
00991             ext = self.target.OUTPUT_EXT
00992 
00993         if hasattr(self.target, 'OUTPUT_NAMING'):
00994             self.var("binary_naming", self.target.OUTPUT_NAMING)
00995             if self.target.OUTPUT_NAMING == "8.3":
00996                 name = name[0:8]
00997                 ext = ext[0:3]
00998 
00999         # Create destination directory
01000         head, tail =  split(name)
01001         new_path = join(tmp_path, head)
01002         mkdir(new_path)
01003 
01004         filename = name+'.'+ext
01005         elf = join(tmp_path, name + '.elf')
01006         bin = join(tmp_path, filename)
01007         map = join(tmp_path, name + '.map')
01008 
01009         r.objects = sorted(set(r.objects))
01010         if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
01011             needed_update = True
01012             self.progress("link", name)
01013             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01014 
01015         if self.need_update(bin, [elf]):
01016             needed_update = True
01017             self.progress("elf2bin", name)
01018             self.binary(r, elf, bin)
01019 
01020         self.map_outputs = self.mem_stats(map)
01021 
01022         self.var("compile_succeded", True)
01023         self.var("binary", filename)
01024 
01025         return bin, needed_update
01026 
01027     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01028     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01029     def default_cmd(self, command):
01030         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01031         self.debug("Return: %s"% _rc)
01032 
01033         for output_line in _stdout.splitlines():
01034             self.debug("Output: %s"% output_line)
01035         for error_line in _stderr.splitlines():
01036             self.debug("Errors: %s"% error_line)
01037 
01038         if _rc != 0:
01039             for line in _stderr.splitlines():
01040                 self.tool_error(line)
01041             raise ToolException(_stderr)
01042 
01043     ### NOTIFICATIONS ###
01044     def info(self, message):
01045         self.notify({'type': 'info', 'message': message})
01046 
01047     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01048     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01049     def debug(self, message):
01050         if self.VERBOSE:
01051             if type(message) is ListType:
01052                 message = ' '.join(message)
01053             message = "[DEBUG] " + message
01054             self.notify({'type': 'debug', 'message': message})
01055 
01056     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01057     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01058     def cc_info(self, info=None):
01059         if info is not None:
01060             info['type'] = 'cc'
01061             self.notify(info)
01062 
01063     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01064     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01065     def cc_verbose(self, message, file=""):
01066         self.debug(message)
01067 
01068     def progress(self, action, file, build_update=False):
01069         msg = {'type': 'progress', 'action': action, 'file': file}
01070         if build_update:
01071             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01072         self.notify(msg)
01073 
01074     def tool_error(self, message):
01075         self.notify({'type': 'tool_error', 'message': message})
01076 
01077     def var(self, key, value):
01078         self.notify({'type': 'var', 'key': key, 'val': value})
01079 
01080     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01081     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01082     def mem_stats(self, map):
01083         """! Creates parser object
01084         @param map Path to linker map file to parse and decode
01085         @return Memory summary structure with memory usage statistics
01086                 None if map file can't be opened and processed
01087         """
01088         toolchain = self.__class__.__name__
01089 
01090         # Create memap object
01091         memap = MemapParser()
01092 
01093         # Parse and decode a map file
01094         if memap.parse(abspath(map), toolchain) is False:
01095             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01096             return None
01097 
01098         # Store the memap instance for later use
01099         self.memap_instance = memap
01100 
01101         # Here we return memory statistics structure (constructed after
01102         # call to generate_output) which contains raw data in bytes
01103         # about sections + summary
01104         return memap.mem_report
01105 
01106     # Set the configuration data
01107     def set_config_data(self, config_data):
01108         self.config_data = config_data
01109 
01110     # Creates the configuration header if needed:
01111     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01112     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01113     # - if there is configuration data similar to the previous configuration data,
01114     #   "mbed_config.h" is left untouched.
01115     # - if there is new configuration data, "mbed_config.h" is overriden.
01116     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01117     # The "config_processed" variable (below) ensures this behaviour.
01118     # The function returns the location of the configuration file, or None if there is no
01119     # configuration data available (and thus no configuration file)
01120     def get_config_header(self):
01121         if self.config_processed: # this function was already called, return its result
01122             return self.config_file
01123         # The config file is located in the build directory
01124         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01125         # If the file exists, read its current content in prev_data
01126         if exists(self.config_file):
01127             with open(self.config_file, "rt") as f:
01128                 prev_data = f.read()
01129         else:
01130             prev_data = None
01131         # Get the current configuration data
01132         crt_data = Config.config_to_header(self.config_data) if self.config_data else None
01133         # "changed" indicates if a configuration change was detected
01134         changed = False
01135         if prev_data is not None: # a previous mbed_config.h exists
01136             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01137                 remove(self.config_file)
01138                 self.config_file = None # this means "config file not present"
01139                 changed = True
01140             elif crt_data != prev_data: # different content of config file
01141                 with open(self.config_file, "wt") as f:
01142                     f.write(crt_data)
01143                 changed = True
01144         else: # a previous mbed_config.h does not exist
01145             if crt_data is not None: # there's configuration data available
01146                 with open(self.config_file, "wt") as f:
01147                     f.write(crt_data)
01148                 changed = True
01149             else:
01150                 self.config_file = None # this means "config file not present"
01151         # If there was a change in configuration, rebuild everything
01152         self.build_all = changed
01153         # Make sure that this function will only return the location of the configuration
01154         # file for subsequent calls, without trying to manipulate its content in any way.
01155         self.config_processed = True
01156         return self.config_file
01157 
01158     @staticmethod
01159     def generic_check_executable(tool_key, executable_name, levels_up,
01160                                  nested_dir=None):
01161         """
01162         Positional args:
01163         tool_key: the key to index TOOLCHAIN_PATHS
01164         executable_name: the toolchain's named executable (ex. armcc)
01165         levels_up: each toolchain joins the toolchain_path, some
01166         variable directories (bin, include), and the executable name,
01167         so the TOOLCHAIN_PATH value must be appropriately distanced
01168 
01169         Keyword args:
01170         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01171           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01172           that will be used by toolchain's compile)
01173 
01174         Returns True if the executable location specified by the user
01175         exists and is valid OR the executable can be found on the PATH.
01176         Returns False otherwise.
01177         """
01178         # Search PATH if user did not specify a path or specified path doesn't
01179         # exist.
01180         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01181             exe = find_executable(executable_name)
01182             if not exe:
01183                 return False
01184             for level in range(levels_up):
01185                 # move up the specified number of directories
01186                 exe = dirname(exe)
01187             TOOLCHAIN_PATHS[tool_key] = exe
01188         if nested_dir:
01189             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01190                           executable_name)
01191         else:
01192             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01193         # User could have specified a path that exists but does not contain exe
01194         return exists(subdir) or exists(subdir +'.exe')
01195 
01196     @abstractmethod
01197     def check_executable(self):
01198         """Returns True if the executable (armcc) location specified by the
01199          user exists OR the executable can be found on the PATH.
01200          Returns False otherwise."""
01201         raise NotImplemented
01202 
01203     @abstractmethod
01204     def get_config_option(self, config_header):
01205         """Generate the compiler option that forces the inclusion of the configuration
01206         header file.
01207 
01208         Positional arguments:
01209         config_header -- The configuration header that will be included within all source files
01210 
01211         Return value:
01212         A list of the command line arguments that will force the inclusion the specified header
01213 
01214         Side effects:
01215         None
01216         """
01217         raise NotImplemented
01218 
01219     @abstractmethod
01220     def assemble(self, source, object, includes):
01221         """Generate the command line that assembles.
01222 
01223         Positional arguments:
01224         source -- a file path that is the file to assemble
01225         object -- a file path that is the destination object
01226         includes -- a list of all directories where header files may be found
01227 
01228         Return value:
01229         The complete command line, as a list, that would invoke the assembler
01230         on the source file, include all the include paths, and generate
01231         the specified object file.
01232 
01233         Side effects:
01234         None
01235 
01236         Note:
01237         This method should be decorated with @hook_tool.
01238         """
01239         raise NotImplemented
01240 
01241     @abstractmethod
01242     def compile_c(self, source, object, includes):
01243         """Generate the command line that compiles a C source file.
01244 
01245         Positional arguments:
01246         source -- the C source file to compile
01247         object -- the destination object file
01248         includes -- a list of all the directories where header files may be found
01249 
01250         Return value:
01251         The complete command line, as a list, that would invoke the C compiler
01252         on the source file, include all the include paths, and generate the
01253         specified object file.
01254 
01255         Side effects:
01256         None
01257 
01258         Note:
01259         This method should be decorated with @hook_tool.
01260         """
01261         raise NotImplemented
01262 
01263     @abstractmethod
01264     def compile_cpp(self, source, object, includes):
01265         """Generate the command line that compiles a C++ source file.
01266 
01267         Positional arguments:
01268         source -- the C++ source file to compile
01269         object -- the destination object file
01270         includes -- a list of all the directories where header files may be found
01271 
01272         Return value:
01273         The complete command line, as a list, that would invoke the C++ compiler
01274         on the source file, include all the include paths, and generate the
01275         specified object file.
01276 
01277         Side effects:
01278         None
01279 
01280         Note:
01281         This method should be decorated with @hook_tool.
01282         """
01283         raise NotImplemented
01284 
01285     @abstractmethod
01286     def link(self, output, objects, libraries, lib_dirs, mem_map):
01287         """Run the linker to create an executable and memory map.
01288 
01289         Positional arguments:
01290         output -- the file name to place the executable in
01291         objects -- all of the object files to link
01292         libraries -- all of the required libraries
01293         lib_dirs -- where the required libraries are located
01294         mem_map -- the location where the memory map file should be stored
01295 
01296         Return value:
01297         None
01298 
01299         Side effect:
01300         Runs the linker to produce the executable.
01301 
01302         Note:
01303         This method should be decorated with @hook_tool.
01304         """
01305         raise NotImplemented
01306 
01307     @abstractmethod
01308     def archive(self, objects, lib_path):
01309         """Run the command line that creates an archive.
01310 
01311         Positional arguhments:
01312         objects -- a list of all the object files that should be archived
01313         lib_path -- the file name of the resulting library file
01314 
01315         Return value:
01316         None
01317 
01318         Side effect:
01319         Runs the archiving tool to produce the library file.
01320 
01321         Note:
01322         This method should be decorated with @hook_tool.
01323         """
01324         raise NotImplemented
01325 
01326     @abstractmethod
01327     def binary(self, resources, elf, bin):
01328         """Run the command line that will Extract a simplified binary file.
01329 
01330         Positional arguments:
01331         resources -- A resources object (Is not used in any of the toolchains)
01332         elf -- the executable file that is to be converted
01333         bin -- the file name of the to be created simplified binary file
01334 
01335         Return value:
01336         None
01337 
01338         Side effect:
01339         Runs the elf2bin tool to produce the simplified binary file.
01340 
01341         Note:
01342         This method should be decorated with @hook_tool.
01343         """
01344         raise NotImplemented
01345 
01346     @staticmethod
01347     @abstractmethod
01348     def name_mangle(name):
01349         """Mangle a name based on the conventional name mangling of this toolchain
01350 
01351         Positional arguments:
01352         name -- the name to mangle
01353 
01354         Return:
01355         the mangled name as a string
01356         """
01357         raise NotImplemented
01358 
01359     @staticmethod
01360     @abstractmethod
01361     def make_ld_define(name, value):
01362         """Create an argument to the linker that would define a symbol
01363 
01364         Positional arguments:
01365         name -- the symbol to define
01366         value -- the value to give the symbol
01367 
01368         Return:
01369         The linker flag as a string
01370         """
01371         raise NotImplemented
01372 
01373     @staticmethod
01374     @abstractmethod
01375     def redirect_symbol(source, sync, build_dir):
01376         """Redirect a symbol at link time to point at somewhere else
01377 
01378         Positional arguments:
01379         source -- the symbol doing the pointing
01380         sync -- the symbol being pointed to
01381         build_dir -- the directory to put "response files" if needed by the toolchain
01382 
01383         Side Effects:
01384         Possibly create a file in the build directory
01385 
01386         Return:
01387         The linker flag to redirect the symbol, as a string
01388         """
01389         raise NotImplemented
01390 
01391     # Return the list of macros geenrated by the build system
01392     def get_config_macros(self):
01393         return Config.config_to_macros(self.config_data) if self.config_data else []
01394 
01395 from tools.settings import ARM_PATH
01396 from tools.settings import GCC_ARM_PATH
01397 from tools.settings import IAR_PATH
01398 
01399 TOOLCHAIN_PATHS = {
01400     'ARM': ARM_PATH,
01401     'uARM': ARM_PATH,
01402     'GCC_ARM': GCC_ARM_PATH,
01403     'IAR': IAR_PATH
01404 }
01405 
01406 from tools.toolchains.arm import ARM_STD, ARM_MICRO
01407 from tools.toolchains.gcc import GCC_ARM
01408 from tools.toolchains.iar import IAR
01409 
01410 TOOLCHAIN_CLASSES = {
01411     'ARM': ARM_STD,
01412     'uARM': ARM_MICRO,
01413     'GCC_ARM': GCC_ARM,
01414     'IAR': IAR
01415 }
01416 
01417 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())