ON Semiconductor / mbed-os

Dependents:   mbed-TFT-example-NCS36510 mbed-Accelerometer-example-NCS36510 mbed-Accelerometer-example-NCS36510

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 
00018 import re
00019 import sys
00020 from os import stat, walk, getcwd, sep, remove
00021 from copy import copy
00022 from time import time, sleep
00023 from types import ListType
00024 from shutil import copyfile
00025 from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir
00026 from inspect import getmro
00027 from copy import deepcopy
00028 from tools.config import Config
00029 from abc import ABCMeta, abstractmethod
00030 from distutils.spawn import find_executable
00031 
00032 from multiprocessing import Pool, cpu_count
00033 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
00034 from tools.settings import MBED_ORG_USER
00035 import tools.hooks as hooks
00036 from tools.memap import MemapParser
00037 from hashlib import md5
00038 import fnmatch
00039 
00040 
00041 #Disables multiprocessing if set to higher number than the host machine CPUs
00042 CPU_COUNT_MIN = 1
00043 CPU_COEF = 1
00044 
00045 class Resources:
00046     def __init__(self, base_path=None):
00047         self.base_path = base_path
00048 
00049         self.file_basepath = {}
00050 
00051         self.inc_dirs = []
00052         self.headers = []
00053 
00054         self.s_sources = []
00055         self.c_sources = []
00056         self.cpp_sources = []
00057 
00058         self.lib_dirs = set([])
00059         self.objects = []
00060         self.libraries = []
00061 
00062         # mbed special files
00063         self.lib_builds = []
00064         self.lib_refs = []
00065 
00066         self.repo_dirs = []
00067         self.repo_files = []
00068 
00069         self.linker_script = None
00070 
00071         # Other files
00072         self.hex_files = []
00073         self.bin_files = []
00074         self.json_files = []
00075 
00076         # Features
00077         self.features = {}
00078 
00079     def __add__(self, resources):
00080         if resources is None:
00081             return self
00082         else:
00083             return self.add(resources)
00084 
00085     def __radd__(self, resources):
00086         if resources is None:
00087             return self
00088         else:
00089             return self.add(resources)
00090 
00091     def add(self, resources):
00092         for f,p in resources.file_basepath.items():
00093             self.file_basepath[f] = p
00094 
00095         self.inc_dirs += resources.inc_dirs
00096         self.headers += resources.headers
00097 
00098         self.s_sources += resources.s_sources
00099         self.c_sources += resources.c_sources
00100         self.cpp_sources += resources.cpp_sources
00101 
00102         self.lib_dirs |= resources.lib_dirs
00103         self.objects += resources.objects
00104         self.libraries += resources.libraries
00105 
00106         self.lib_builds += resources.lib_builds
00107         self.lib_refs += resources.lib_refs
00108 
00109         self.repo_dirs += resources.repo_dirs
00110         self.repo_files += resources.repo_files
00111 
00112         if resources.linker_script is not None:
00113             self.linker_script = resources.linker_script
00114 
00115         self.hex_files += resources.hex_files
00116         self.bin_files += resources.bin_files
00117         self.json_files += resources.json_files
00118 
00119         self.features.update(resources.features)
00120 
00121         return self
00122 
00123     def _collect_duplicates(self, dupe_dict, dupe_headers):
00124         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00125             objname, _ = splitext(basename(filename))
00126             dupe_dict.setdefault(objname, set())
00127             dupe_dict[objname] |= set([filename])
00128         for filename in self.headers:
00129             headername = basename(filename)
00130             dupe_headers.setdefault(headername, set())
00131             dupe_headers[headername] |= set([headername])
00132         for res in self.features.values():
00133             res._collect_duplicates(dupe_dict, dupe_headers)
00134         return dupe_dict, dupe_headers
00135 
00136     def detect_duplicates(self, toolchain):
00137         """Detect all potential ambiguities in filenames and report them with
00138         a toolchain notification
00139 
00140         Positional Arguments:
00141         toolchain - used for notifications
00142         """
00143         count = 0
00144         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00145         for objname, filenames in dupe_dict.iteritems():
00146             if len(filenames) > 1:
00147                 count+=1
00148                 toolchain.tool_error(
00149                     "Object file %s.o is not unique! It could be made from: %s"\
00150                     % (objname, " ".join(filenames)))
00151         for headername, locations in dupe_headers.iteritems():
00152             if len(locations) > 1:
00153                 count+=1
00154                 toolchain.tool_error(
00155                     "Header file %s is not unique! It could be: %s" %\
00156                     (headername, " ".join(locations)))
00157         return count
00158 
00159 
00160     def relative_to(self, base, dot=False):
00161         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00162                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00163                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00164                       'hex_files', 'bin_files', 'json_files']:
00165             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00166             setattr(self, field, v)
00167 
00168         self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f}
00169 
00170         if self.linker_script is not None:
00171             self.linker_script = rel_path(self.linker_script, base, dot)
00172 
00173     def win_to_unix(self):
00174         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00175                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00176                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00177                       'hex_files', 'bin_files', 'json_files']:
00178             v = [f.replace('\\', '/') for f in getattr(self, field)]
00179             setattr(self, field, v)
00180 
00181         self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f}
00182 
00183         if self.linker_script is not None:
00184             self.linker_script = self.linker_script.replace('\\', '/')
00185 
00186     def __str__(self):
00187         s = []
00188 
00189         for (label, resources) in (
00190                 ('Include Directories', self.inc_dirs),
00191                 ('Headers', self.headers),
00192 
00193                 ('Assembly sources', self.s_sources),
00194                 ('C sources', self.c_sources),
00195                 ('C++ sources', self.cpp_sources),
00196 
00197                 ('Library directories', self.lib_dirs),
00198                 ('Objects', self.objects),
00199                 ('Libraries', self.libraries),
00200 
00201                 ('Hex files', self.hex_files),
00202                 ('Bin files', self.bin_files),
00203 
00204                 ('Features', self.features),
00205             ):
00206             if resources:
00207                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00208 
00209         if self.linker_script:
00210             s.append('Linker Script: ' + self.linker_script)
00211 
00212         return '\n'.join(s)
00213 
00214 # Support legacy build conventions: the original mbed build system did not have
00215 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00216 # had the knowledge of a list of these directories to be ignored.
00217 LEGACY_IGNORE_DIRS = set([
00218     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00219     'ARM', 'uARM', 'IAR',
00220     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00221 ])
00222 LEGACY_TOOLCHAIN_NAMES = {
00223     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00224     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00225     'IAR': 'IAR',
00226 }
00227 
00228 
00229 class mbedToolchain:
00230     # Verbose logging
00231     VERBOSE = True
00232 
00233     # Compile C files as CPP
00234     COMPILE_C_AS_CPP = False
00235 
00236     # Response files for compiling, includes, linking and archiving.
00237     # Not needed on posix systems where the typical arg limit is 2 megabytes
00238     RESPONSE_FILES = True
00239 
00240     CORTEX_SYMBOLS = {
00241         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00242         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00243         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00244         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00245         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00246         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00247         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00248         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00249         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00250         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00251     }
00252 
00253     MBED_CONFIG_FILE_NAME="mbed_config.h"
00254 
00255     __metaclass__ = ABCMeta
00256 
00257     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00258 
00259     def __init__(self, target, notify=None, macros=None, silent=False, extra_verbose=False, build_profile=None):
00260         self.target = target
00261         self.name = self.__class__.__name__
00262 
00263         # compile/assemble/link/binary hooks
00264         self.hook = hooks.Hook(target, self)
00265 
00266         # Toolchain flags
00267         self.flags = deepcopy(build_profile or self.profile_template)
00268 
00269         # User-defined macros
00270         self.macros = macros or []
00271 
00272         # Macros generated from toolchain and target rules/features
00273         self.asm_symbols = None
00274         self.cxx_symbols = None
00275 
00276         # Labels generated from toolchain and target rules/features (used for selective build)
00277         self.labels = None
00278 
00279         # This will hold the initialized config object
00280         self.config = None
00281 
00282         # This will hold the configuration data (as returned by Config.get_config_data())
00283         self.config_data = None
00284 
00285         # This will hold the location of the configuration file or None if there's no configuration available
00286         self.config_file = None
00287 
00288         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00289         self.config_processed = False
00290 
00291         # Non-incremental compile
00292         self.build_all = False
00293 
00294         # Build output dir
00295         self.build_dir = None
00296         self.timestamp = time()
00297 
00298         # Output build naming based on target+toolchain combo (mbed 2.0 builds)
00299         self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
00300 
00301         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00302         self.jobs = 0
00303 
00304         # Ignore patterns from .mbedignore files
00305         self.ignore_patterns = []
00306 
00307         # Pre-mbed 2.0 ignore dirs
00308         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00309 
00310         # Output notify function
00311         # This function is passed all events, and expected to handle notification of the
00312         # user, emit the events to a log, etc.
00313         # The API for all notify methods passed into the notify parameter is as follows:
00314         # def notify(Event, Silent)
00315         # Where *Event* is a dict representing the toolchain event that was generated
00316         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00317         #                  or an application was linked
00318         #       *Silent* is a boolean
00319         if notify:
00320             self.notify_fun = notify
00321         elif extra_verbose:
00322             self.notify_fun = self.print_notify_verbose
00323         else:
00324             self.notify_fun = self.print_notify
00325 
00326         # Silent builds (no output)
00327         self.silent = silent
00328 
00329         # Print output buffer
00330         self.output = str()
00331         self.map_outputs = list()   # Place to store memmap scan results in JSON like data structures
00332 
00333         # uVisor spepcific rules
00334         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00335             self.target.core = re.sub(r"F$", '', self.target.core)
00336 
00337         # Stats cache is used to reduce the amount of IO requests to stat
00338         # header files during dependency change. See need_update()
00339         self.stat_cache = {}
00340 
00341         # Used by the mbed Online Build System to build in chrooted environment
00342         self.CHROOT = None
00343 
00344         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00345         self.init()
00346 
00347     # Used for post __init__() hooks
00348     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00349     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00350     def init(self):
00351         return True
00352 
00353     def get_output(self):
00354         return self.output
00355 
00356     def print_notify(self, event, silent=False):
00357         """ Default command line notification
00358         """
00359         msg = None
00360 
00361         if not self.VERBOSE and event['type'] == 'tool_error':
00362             msg = event['message']
00363 
00364         elif event['type'] in ['info', 'debug']:
00365             msg = event['message']
00366 
00367         elif event['type'] == 'cc':
00368             event['severity'] = event['severity'].title()
00369             event['file'] = basename(event['file'])
00370             msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00371 
00372         elif event['type'] == 'progress':
00373             if 'percent' in event:
00374                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00375                                                   event['percent'],
00376                                                   basename(event['file']))
00377             else:
00378                 msg = '{}: {}'.format(event['action'].title(),
00379                                       basename(event['file']))
00380 
00381         if msg:
00382             if not silent:
00383                 print msg
00384             self.output += msg + "\n"
00385 
00386     def print_notify_verbose(self, event, silent=False):
00387         """ Default command line notification with more verbose mode
00388         """
00389         if event['type'] in ['info', 'debug']:
00390             self.print_notify(event, silent=silent) # standard handle
00391 
00392         elif event['type'] == 'cc':
00393             event['severity'] = event['severity'].title()
00394             event['file'] = basename(event['file'])
00395             event['mcu_name'] = "None"
00396             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00397             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00398             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00399             if not silent:
00400                 print msg
00401             self.output += msg + "\n"
00402 
00403         elif event['type'] == 'progress':
00404             self.print_notify(event) # standard handle
00405 
00406     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00407     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00408     def notify(self, event):
00409         """ Little closure for notify functions
00410         """
00411         event['toolchain'] = self
00412         return self.notify_fun(event, self.silent)
00413 
00414     def get_symbols(self, for_asm=False):
00415         if for_asm:
00416             if self.asm_symbols is None:
00417                 self.asm_symbols = []
00418 
00419                 # Cortex CPU symbols
00420                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00421                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00422 
00423                 # Add target's symbols
00424                 self.asm_symbols += self.target.macros
00425                 # Add extra symbols passed via 'macros' parameter
00426                 self.asm_symbols += self.macros
00427             return list(set(self.asm_symbols))  # Return only unique symbols
00428         else:
00429             if self.cxx_symbols is None:
00430                 # Target and Toolchain symbols
00431                 labels = self.get_labels()
00432                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00433                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00434 
00435                 # Cortex CPU symbols
00436                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00437                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00438 
00439                 # Symbols defined by the on-line build.system
00440                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00441                 if MBED_ORG_USER:
00442                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00443 
00444                 # Add target's symbols
00445                 self.cxx_symbols += self.target.macros
00446                 # Add target's hardware
00447                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00448                 # Add target's features
00449                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00450                 # Add extra symbols passed via 'macros' parameter
00451                 self.cxx_symbols += self.macros
00452 
00453                 # Form factor variables
00454                 if hasattr(self.target, 'supported_form_factors'):
00455                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00456 
00457             return list(set(self.cxx_symbols))  # Return only unique symbols
00458 
00459     # Extend the internal list of macros
00460     def add_macros(self, new_macros):
00461         self.macros.extend(new_macros)
00462 
00463     def get_labels(self):
00464         if self.labels is None:
00465             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00466             toolchain_labels.remove('mbedToolchain')
00467             self.labels = {
00468                 'TARGET': self.target.labels,
00469                 'FEATURE': self.target.features,
00470                 'TOOLCHAIN': toolchain_labels
00471             }
00472 
00473             # This is a policy decision and it should /really/ be in the config system
00474             # ATM it's here for backward compatibility
00475             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00476                  "-O0") in self.flags['common'] or
00477                 ("-r" in self.flags['common'] and
00478                  "-On" in self.flags['common'])):
00479                 self.labels['TARGET'].append("DEBUG")
00480             else:
00481                 self.labels['TARGET'].append("RELEASE")
00482         return self.labels
00483 
00484 
00485     # Determine whether a source file needs updating/compiling
00486     def need_update(self, target, dependencies):
00487         if self.build_all:
00488             return True
00489 
00490         if not exists(target):
00491             return True
00492 
00493         target_mod_time = stat(target).st_mtime
00494 
00495         for d in dependencies:
00496             # Some objects are not provided with full path and here we do not have
00497             # information about the library paths. Safe option: assume an update
00498             if not d or not exists(d):
00499                 return True
00500             
00501             if not self.stat_cache.has_key(d):
00502                 self.stat_cache[d] = stat(d).st_mtime
00503 
00504             if self.stat_cache[d] >= target_mod_time:
00505                 return True
00506         
00507         return False
00508 
00509     def is_ignored(self, file_path):
00510         for pattern in self.ignore_patterns:
00511             if fnmatch.fnmatch(file_path, pattern):
00512                 return True
00513         return False
00514 
00515     # Create a Resources object from the path pointed to by *path* by either traversing a
00516     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00517     # when *path* is a file.
00518     # The parameter *base_path* is used to set the base_path attribute of the Resources
00519     # object and the parameter *exclude_paths* is used by the directory traversal to
00520     # exclude certain paths from the traversal.
00521     def scan_resources(self, path, exclude_paths=None, base_path=None):
00522         self.progress("scan", path)
00523 
00524         resources = Resources(path)
00525         if not base_path:
00526             if isfile(path):
00527                 base_path = dirname(path)
00528             else:
00529                 base_path = path
00530         resources.base_path = base_path
00531 
00532         if isfile(path):
00533             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00534         else:
00535             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00536         return resources
00537 
00538     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00539     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00540     # on every file it considers adding to the resources object.
00541     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00542         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00543         When topdown is True, the caller can modify the dirnames list in-place
00544         (perhaps using del or slice assignment), and walk() will only recurse into
00545         the subdirectories whose names remain in dirnames; this can be used to prune
00546         the search, impose a specific order of visiting, or even to inform walk()
00547         about directories the caller creates or renames before it resumes walk()
00548         again. Modifying dirnames when topdown is False is ineffective, because in
00549         bottom-up mode the directories in dirnames are generated before dirpath
00550         itself is generated.
00551         """
00552         labels = self.get_labels()
00553         for root, dirs, files in walk(path, followlinks=True):
00554             # Check if folder contains .mbedignore
00555             if ".mbedignore" in files:
00556                 with open (join(root,".mbedignore"), "r") as f:
00557                     lines=f.readlines()
00558                     lines = [l.strip() for l in lines] # Strip whitespaces
00559                     lines = [l for l in lines if l != ""] # Strip empty lines
00560                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00561                     # Append root path to glob patterns and append patterns to ignore_patterns
00562                     self.ignore_patterns.extend([join(root,line.strip()) for line in lines])
00563 
00564             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00565             if self.is_ignored(join(root,"")):
00566                 continue
00567 
00568             for d in copy(dirs):
00569                 dir_path = join(root, d)
00570                 # Add internal repo folders/files. This is needed for exporters
00571                 if d == '.hg' or d == '.git':
00572                     resources.repo_dirs.append(dir_path)
00573 
00574                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00575                     # Ignore targets that do not match the TARGET in extra_labels list
00576                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00577                     # Ignore toolchain that do not match the current TOOLCHAIN
00578                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00579                     # Ignore .mbedignore files
00580                     self.is_ignored(join(dir_path,"")) or
00581                     # Ignore TESTS dir
00582                     (d == 'TESTS')):
00583                         dirs.remove(d)
00584                 elif d.startswith('FEATURE_'):
00585                     # Recursively scan features but ignore them in the current scan.
00586                     # These are dynamically added by the config system if the conditions are matched
00587                     resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path)
00588                     dirs.remove(d)
00589                 elif exclude_paths:
00590                     for exclude_path in exclude_paths:
00591                         rel_path = relpath(dir_path, exclude_path)
00592                         if not (rel_path.startswith('..')):
00593                             dirs.remove(d)
00594                             break
00595 
00596             # Add root to include paths
00597             resources.inc_dirs.append(root)
00598             resources.file_basepath[root] = base_path
00599 
00600             for file in files:
00601                 file_path = join(root, file)
00602                 self._add_file(file_path, resources, base_path)
00603 
00604     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00605     # (*file_path*) to the resources object based on the file type.
00606     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00607         resources.file_basepath[file_path] = base_path
00608 
00609         if self.is_ignored(file_path):
00610             return
00611 
00612         _, ext = splitext(file_path)
00613         ext = ext.lower()
00614 
00615         if   ext == '.s':
00616             resources.s_sources.append(file_path)
00617 
00618         elif ext == '.c':
00619             resources.c_sources.append(file_path)
00620 
00621         elif ext == '.cpp':
00622             resources.cpp_sources.append(file_path)
00623 
00624         elif ext == '.h' or ext == '.hpp':
00625             resources.headers.append(file_path)
00626 
00627         elif ext == '.o':
00628             resources.objects.append(file_path)
00629 
00630         elif ext == self.LIBRARY_EXT:
00631             resources.libraries.append(file_path)
00632             resources.lib_dirs.add(dirname(file_path))
00633 
00634         elif ext == self.LINKER_EXT:
00635             if resources.linker_script is not None:
00636                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00637             resources.linker_script = file_path
00638 
00639         elif ext == '.lib':
00640             resources.lib_refs.append(file_path)
00641 
00642         elif ext == '.bld':
00643             resources.lib_builds.append(file_path)
00644 
00645         elif basename(file_path) == '.hgignore':
00646             resources.repo_files.append(file_path)
00647 
00648         elif basename(file_path) == '.gitignore':
00649             resources.repo_files.append(file_path)
00650 
00651         elif ext == '.hex':
00652             resources.hex_files.append(file_path)
00653 
00654         elif ext == '.bin':
00655             resources.bin_files.append(file_path)
00656 
00657         elif ext == '.json':
00658             resources.json_files.append(file_path)
00659 
00660 
00661     def scan_repository(self, path):
00662         resources = []
00663 
00664         for root, dirs, files in walk(path):
00665             # Remove ignored directories
00666             for d in copy(dirs):
00667                 if d == '.' or d == '..':
00668                     dirs.remove(d)
00669 
00670             for file in files:
00671                 file_path = join(root, file)
00672                 resources.append(file_path)
00673 
00674         return resources
00675 
00676     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00677         # Handle a single file
00678         if type(files_paths) != ListType: files_paths = [files_paths]
00679 
00680         for source in files_paths:
00681             if source is None:
00682                 files_paths.remove(source)
00683 
00684         for source in files_paths:
00685             if resources is not None and resources.file_basepath.has_key(source):
00686                 relative_path = relpath(source, resources.file_basepath[source])
00687             elif rel_path is not None:
00688                 relative_path = relpath(source, rel_path)
00689             else:
00690                 _, relative_path = split(source)
00691 
00692             target = join(trg_path, relative_path)
00693 
00694             if (target != source) and (self.need_update(target, [source])):
00695                 self.progress("copy", relative_path)
00696                 mkdir(dirname(target))
00697                 copyfile(source, target)
00698 
00699     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00700     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00701     def relative_object_path(self, build_path, base_dir, source):
00702         source_dir, name, _ = split_path(source)
00703 
00704         obj_dir = join(build_path, relpath(source_dir, base_dir))
00705         if obj_dir is not self.prev_dir:
00706             self.prev_dir = obj_dir
00707             mkdir(obj_dir)
00708         return join(obj_dir, name + '.o')
00709 
00710     # Generate response file for all includes.
00711     # ARM, GCC, IAR cross compatible
00712     def get_inc_file(self, includes):
00713         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00714         if not exists(include_file):
00715             with open(include_file, "wb") as f:
00716                 cmd_list = []
00717                 for c in includes:
00718                     if c:
00719                         c = c.replace("\\", "/")
00720                         if self.CHROOT:
00721                             c = c.replace(self.CHROOT, '')
00722                         cmd_list.append('-I%s' % c)
00723                 string = " ".join(cmd_list)
00724                 f.write(string)
00725         return include_file
00726 
00727     # Generate response file for all objects when linking.
00728     # ARM, GCC, IAR cross compatible
00729     def get_link_file(self, cmd):
00730         link_file = join(self.build_dir, ".link_files.txt")
00731         with open(link_file, "wb") as f:
00732             cmd_list = []
00733             for c in cmd:
00734                 if c:
00735                     c = c.replace("\\", "/")
00736                     if self.CHROOT:
00737                         c = c.replace(self.CHROOT, '')
00738                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00739             string = " ".join(cmd_list)
00740             f.write(string)
00741         return link_file
00742  
00743     # Generate response file for all objects when archiving.
00744     # ARM, GCC, IAR cross compatible
00745     def get_arch_file(self, objects):
00746         archive_file = join(self.build_dir, ".archive_files.txt")
00747         with open(archive_file, "wb") as f:
00748             o_list = []
00749             for o in objects:
00750                 o_list.append('"%s"' % o)
00751             string = " ".join(o_list).replace("\\", "/")
00752             f.write(string)
00753         return archive_file
00754 
00755     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00756     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00757     def compile_sources(self, resources, build_path, inc_dirs=None):
00758         # Web IDE progress bar for project build
00759         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00760         self.to_be_compiled = len(files_to_compile)
00761         self.compiled = 0
00762 
00763         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00764 
00765         inc_paths = resources.inc_dirs
00766         if inc_dirs is not None:
00767             inc_paths.extend(inc_dirs)
00768         # De-duplicate include paths
00769         inc_paths = set(inc_paths)
00770         # Sort include paths for consistency
00771         inc_paths = sorted(set(inc_paths))
00772         # Unique id of all include paths
00773         self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
00774         # Where to store response files
00775         self.build_dir = build_path
00776 
00777         objects = []
00778         queue = []
00779         work_dir = getcwd()
00780         self.prev_dir = None
00781 
00782         # Generate configuration header (this will update self.build_all if needed)
00783         self.get_config_header()
00784 
00785         # Sort compile queue for consistency
00786         files_to_compile.sort()
00787         for source in files_to_compile:
00788             object = self.relative_object_path(build_path, resources.file_basepath[source], source)
00789 
00790             # Queue mode (multiprocessing)
00791             commands = self.compile_command(source, object, inc_paths)
00792             if commands is not None:
00793                 queue.append({
00794                     'source': source,
00795                     'object': object,
00796                     'commands': commands,
00797                     'work_dir': work_dir,
00798                     'chroot': self.CHROOT
00799                 })
00800             else:
00801                 self.compiled += 1
00802                 objects.append(object)
00803 
00804         # Use queues/multiprocessing if cpu count is higher than setting
00805         jobs = self.jobs if self.jobs else cpu_count()
00806         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00807             return self.compile_queue(queue, objects)
00808         else:
00809             return self.compile_seq(queue, objects)
00810 
00811     # Compile source files queue in sequential order
00812     def compile_seq(self, queue, objects):
00813         for item in queue:
00814             result = compile_worker(item)
00815 
00816             self.compiled += 1
00817             self.progress("compile", item['source'], build_update=True)
00818             for res in result['results']:
00819                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00820                 self.compile_output([
00821                     res['code'],
00822                     res['output'],
00823                     res['command']
00824                 ])
00825             objects.append(result['object'])
00826         return objects
00827 
00828     # Compile source files queue in parallel by creating pool of worker threads
00829     def compile_queue(self, queue, objects):
00830         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00831         p = Pool(processes=jobs_count)
00832 
00833         results = []
00834         for i in range(len(queue)):
00835             results.append(p.apply_async(compile_worker, [queue[i]]))
00836         p.close()
00837 
00838         itr = 0
00839         while len(results):
00840             itr += 1
00841             if itr > 180000:
00842                 p.terminate()
00843                 p.join()
00844                 raise ToolException("Compile did not finish in 5 minutes")
00845 
00846             sleep(0.01)
00847             pending = 0
00848             for r in results:
00849                 if r._ready is True:
00850                     try:
00851                         result = r.get()
00852                         results.remove(r)
00853 
00854                         self.compiled += 1
00855                         self.progress("compile", result['source'], build_update=True)
00856                         for res in result['results']:
00857                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00858                             self.compile_output([
00859                                 res['code'],
00860                                 res['output'],
00861                                 res['command']
00862                             ])
00863                         objects.append(result['object'])
00864                     except ToolException, err:
00865                         if p._taskqueue.queue:
00866                             p._taskqueue.queue.clear()
00867                             sleep(0.5)
00868                         p.terminate()
00869                         p.join()
00870                         raise ToolException(err)
00871                 else:
00872                     pending += 1
00873                     if pending >= jobs_count:
00874                         break
00875 
00876         results = None
00877         p.join()
00878 
00879         return objects
00880 
00881     # Determine the compile command based on type of source file
00882     def compile_command(self, source, object, includes):
00883         # Check dependencies
00884         _, ext = splitext(source)
00885         ext = ext.lower()
00886 
00887         if ext == '.c' or  ext == '.cpp':
00888             base, _ = splitext(object)
00889             dep_path = base + '.d'
00890             try:
00891                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
00892             except IOError, IndexError:
00893                 deps = []
00894             if len(deps) == 0 or self.need_update(object, deps):
00895                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
00896                     return self.compile_cpp(source, object, includes)
00897                 else:
00898                     return self.compile_c(source, object, includes)
00899         elif ext == '.s':
00900             deps = [source]
00901             if self.need_update(object, deps):
00902                 return self.assemble(source, object, includes)
00903         else:
00904             return False
00905 
00906         return None
00907 
00908     @abstractmethod
00909     def parse_dependencies(self, dep_path):
00910         """Parse the dependency information generated by the compiler.
00911 
00912         Positional arguments:
00913         dep_path -- the path to a file generated by a previous run of the compiler
00914 
00915         Return value:
00916         A list of all source files that the dependency file indicated were dependencies
00917 
00918         Side effects:
00919         None
00920         """
00921         raise NotImplemented
00922 
00923     def is_not_supported_error(self, output):
00924         return "#error directive: [NOT_SUPPORTED]" in output
00925 
00926     @abstractmethod
00927     def parse_output(self, output):
00928         """Take in compiler output and extract sinlge line warnings and errors from it.
00929 
00930         Positional arguments:
00931         output -- a string of all the messages emitted by a run of the compiler
00932 
00933         Return value:
00934         None
00935 
00936         Side effects:
00937         call self.cc_info or self.notify with a description of the event generated by the compiler
00938         """
00939         raise NotImplemented
00940 
00941     def compile_output(self, output=[]):
00942         _rc = output[0]
00943         _stderr = output[1]
00944         command = output[2]
00945 
00946         # Parse output for Warnings and Errors
00947         self.parse_output(_stderr)
00948         self.debug("Return: %s"% _rc)
00949         for error_line in _stderr.splitlines():
00950             self.debug("Output: %s"% error_line)
00951 
00952         # Check return code
00953         if _rc != 0:
00954             if self.is_not_supported_error(_stderr):
00955                 raise NotSupportedException(_stderr)
00956             else:
00957                 raise ToolException(_stderr)
00958 
00959     def build_library(self, objects, dir, name):
00960         needed_update = False
00961         lib = self.STD_LIB_NAME % name
00962         fout = join(dir, lib)
00963         if self.need_update(fout, objects):
00964             self.info("Library: %s" % lib)
00965             self.archive(objects, fout)
00966             needed_update = True
00967 
00968         return needed_update
00969 
00970     def link_program(self, r, tmp_path, name):
00971         needed_update = False
00972         ext = 'bin'
00973         if hasattr(self.target, 'OUTPUT_EXT'):
00974             ext = self.target.OUTPUT_EXT
00975 
00976         if hasattr(self.target, 'OUTPUT_NAMING'):
00977             self.var("binary_naming", self.target.OUTPUT_NAMING)
00978             if self.target.OUTPUT_NAMING == "8.3":
00979                 name = name[0:8]
00980                 ext = ext[0:3]
00981 
00982         # Create destination directory
00983         head, tail =  split(name)
00984         new_path = join(tmp_path, head)
00985         mkdir(new_path)
00986 
00987         filename = name+'.'+ext
00988         elf = join(tmp_path, name + '.elf')
00989         bin = join(tmp_path, filename)
00990         map = join(tmp_path, name + '.map')
00991 
00992         r.objects = sorted(set(r.objects))
00993         if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
00994             needed_update = True
00995             self.progress("link", name)
00996             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
00997 
00998         if self.need_update(bin, [elf]):
00999             needed_update = True
01000             self.progress("elf2bin", name)
01001             self.binary(r, elf, bin)
01002 
01003         self.map_outputs = self.mem_stats(map)
01004 
01005         self.var("compile_succeded", True)
01006         self.var("binary", filename)
01007 
01008         return bin, needed_update
01009 
01010     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01011     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01012     def default_cmd(self, command):
01013         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01014         self.debug("Return: %s"% _rc)
01015 
01016         for output_line in _stdout.splitlines():
01017             self.debug("Output: %s"% output_line)
01018         for error_line in _stderr.splitlines():
01019             self.debug("Errors: %s"% error_line)
01020 
01021         if _rc != 0:
01022             for line in _stderr.splitlines():
01023                 self.tool_error(line)
01024             raise ToolException(_stderr)
01025 
01026     ### NOTIFICATIONS ###
01027     def info(self, message):
01028         self.notify({'type': 'info', 'message': message})
01029 
01030     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01031     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01032     def debug(self, message):
01033         if self.VERBOSE:
01034             if type(message) is ListType:
01035                 message = ' '.join(message)
01036             message = "[DEBUG] " + message
01037             self.notify({'type': 'debug', 'message': message})
01038 
01039     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01040     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01041     def cc_info(self, info=None):
01042         if info is not None:
01043             info['type'] = 'cc'
01044             self.notify(info)
01045 
01046     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01047     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01048     def cc_verbose(self, message, file=""):
01049         self.debug(message)
01050 
01051     def progress(self, action, file, build_update=False):
01052         msg = {'type': 'progress', 'action': action, 'file': file}
01053         if build_update:
01054             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01055         self.notify(msg)
01056 
01057     def tool_error(self, message):
01058         self.notify({'type': 'tool_error', 'message': message})
01059 
01060     def var(self, key, value):
01061         self.notify({'type': 'var', 'key': key, 'val': value})
01062 
01063     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01064     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01065     def mem_stats(self, map):
01066         """! Creates parser object
01067         @param map Path to linker map file to parse and decode
01068         @return Memory summary structure with memory usage statistics
01069                 None if map file can't be opened and processed
01070         """
01071         toolchain = self.__class__.__name__
01072 
01073         # Create memap object
01074         memap = MemapParser()
01075 
01076         # Parse and decode a map file
01077         if memap.parse(abspath(map), toolchain) is False:
01078             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01079             return None
01080 
01081         # Store the memap instance for later use
01082         self.memap_instance = memap
01083 
01084         # Here we return memory statistics structure (constructed after
01085         # call to generate_output) which contains raw data in bytes
01086         # about sections + summary
01087         return memap.mem_report
01088 
01089     # Set the configuration data
01090     def set_config_data(self, config_data):
01091         self.config_data = config_data
01092 
01093     # Creates the configuration header if needed:
01094     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01095     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01096     # - if there is configuration data similar to the previous configuration data,
01097     #   "mbed_config.h" is left untouched.
01098     # - if there is new configuration data, "mbed_config.h" is overriden.
01099     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01100     # The "config_processed" variable (below) ensures this behaviour.
01101     # The function returns the location of the configuration file, or None if there is no
01102     # configuration data available (and thus no configuration file)
01103     def get_config_header(self):
01104         if self.config_processed: # this function was already called, return its result
01105             return self.config_file
01106         # The config file is located in the build directory
01107         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01108         # If the file exists, read its current content in prev_data
01109         if exists(self.config_file):
01110             with open(self.config_file, "rt") as f:
01111                 prev_data = f.read()
01112         else:
01113             prev_data = None
01114         # Get the current configuration data
01115         crt_data = Config.config_to_header(self.config_data) if self.config_data else None
01116         # "changed" indicates if a configuration change was detected
01117         changed = False
01118         if prev_data is not None: # a previous mbed_config.h exists
01119             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01120                 remove(self.config_file)
01121                 self.config_file = None # this means "config file not present"
01122                 changed = True
01123             elif crt_data != prev_data: # different content of config file
01124                 with open(self.config_file, "wt") as f:
01125                     f.write(crt_data)
01126                 changed = True
01127         else: # a previous mbed_config.h does not exist
01128             if crt_data is not None: # there's configuration data available
01129                 with open(self.config_file, "wt") as f:
01130                     f.write(crt_data)
01131                 changed = True
01132             else:
01133                 self.config_file = None # this means "config file not present"
01134         # If there was a change in configuration, rebuild everything
01135         self.build_all = changed
01136         # Make sure that this function will only return the location of the configuration
01137         # file for subsequent calls, without trying to manipulate its content in any way.
01138         self.config_processed = True
01139         return self.config_file
01140 
01141     @staticmethod
01142     def generic_check_executable(tool_key, executable_name, levels_up,
01143                                  nested_dir=None):
01144         """
01145         Positional args:
01146         tool_key: the key to index TOOLCHAIN_PATHS
01147         executable_name: the toolchain's named executable (ex. armcc)
01148         levels_up: each toolchain joins the toolchain_path, some
01149         variable directories (bin, include), and the executable name,
01150         so the TOOLCHAIN_PATH value must be appropriately distanced
01151 
01152         Keyword args:
01153         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01154           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01155           that will be used by toolchain's compile)
01156 
01157         Returns True if the executable location specified by the user
01158         exists and is valid OR the executable can be found on the PATH.
01159         Returns False otherwise.
01160         """
01161         # Search PATH if user did not specify a path or specified path doesn't
01162         # exist.
01163         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01164             exe = find_executable(executable_name)
01165             if not exe:
01166                 return False
01167             for level in range(levels_up):
01168                 # move up the specified number of directories
01169                 exe = dirname(exe)
01170             TOOLCHAIN_PATHS[tool_key] = exe
01171         if nested_dir:
01172             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01173                           executable_name)
01174         else:
01175             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01176         # User could have specified a path that exists but does not contain exe
01177         return exists(subdir) or exists(subdir +'.exe')
01178 
01179     @abstractmethod
01180     def check_executable(self):
01181         """Returns True if the executable (armcc) location specified by the
01182          user exists OR the executable can be found on the PATH.
01183          Returns False otherwise."""
01184         raise NotImplemented
01185 
01186     @abstractmethod
01187     def get_config_option(self, config_header):
01188         """Generate the compiler option that forces the inclusion of the configuration
01189         header file.
01190 
01191         Positional arguments:
01192         config_header -- The configuration header that will be included within all source files
01193 
01194         Return value:
01195         A list of the command line arguments that will force the inclusion the specified header
01196 
01197         Side effects:
01198         None
01199         """
01200         raise NotImplemented
01201 
01202     @abstractmethod
01203     def assemble(self, source, object, includes):
01204         """Generate the command line that assembles.
01205 
01206         Positional arguments:
01207         source -- a file path that is the file to assemble
01208         object -- a file path that is the destination object
01209         includes -- a list of all directories where header files may be found
01210 
01211         Return value:
01212         The complete command line, as a list, that would invoke the assembler
01213         on the source file, include all the include paths, and generate
01214         the specified object file.
01215 
01216         Side effects:
01217         None
01218 
01219         Note:
01220         This method should be decorated with @hook_tool.
01221         """
01222         raise NotImplemented
01223 
01224     @abstractmethod
01225     def compile_c(self, source, object, includes):
01226         """Generate the command line that compiles a C source file.
01227 
01228         Positional arguments:
01229         source -- the C source file to compile
01230         object -- the destination object file
01231         includes -- a list of all the directories where header files may be found
01232 
01233         Return value:
01234         The complete command line, as a list, that would invoke the C compiler
01235         on the source file, include all the include paths, and generate the
01236         specified object file.
01237 
01238         Side effects:
01239         None
01240 
01241         Note:
01242         This method should be decorated with @hook_tool.
01243         """
01244         raise NotImplemented
01245 
01246     @abstractmethod
01247     def compile_cpp(self, source, object, includes):
01248         """Generate the command line that compiles a C++ source file.
01249 
01250         Positional arguments:
01251         source -- the C++ source file to compile
01252         object -- the destination object file
01253         includes -- a list of all the directories where header files may be found
01254 
01255         Return value:
01256         The complete command line, as a list, that would invoke the C++ compiler
01257         on the source file, include all the include paths, and generate the
01258         specified object file.
01259 
01260         Side effects:
01261         None
01262 
01263         Note:
01264         This method should be decorated with @hook_tool.
01265         """
01266         raise NotImplemented
01267 
01268     @abstractmethod
01269     def link(self, output, objects, libraries, lib_dirs, mem_map):
01270         """Run the linker to create an executable and memory map.
01271 
01272         Positional arguments:
01273         output -- the file name to place the executable in
01274         objects -- all of the object files to link
01275         libraries -- all of the required libraries
01276         lib_dirs -- where the required libraries are located
01277         mem_map -- the location where the memory map file should be stored
01278 
01279         Return value:
01280         None
01281 
01282         Side effect:
01283         Runs the linker to produce the executable.
01284 
01285         Note:
01286         This method should be decorated with @hook_tool.
01287         """
01288         raise NotImplemented
01289 
01290     @abstractmethod
01291     def archive(self, objects, lib_path):
01292         """Run the command line that creates an archive.
01293 
01294         Positional arguhments:
01295         objects -- a list of all the object files that should be archived
01296         lib_path -- the file name of the resulting library file
01297 
01298         Return value:
01299         None
01300 
01301         Side effect:
01302         Runs the archiving tool to produce the library file.
01303 
01304         Note:
01305         This method should be decorated with @hook_tool.
01306         """
01307         raise NotImplemented
01308 
01309     @abstractmethod
01310     def binary(self, resources, elf, bin):
01311         """Run the command line that will Extract a simplified binary file.
01312 
01313         Positional arguments:
01314         resources -- A resources object (Is not used in any of the toolchains)
01315         elf -- the executable file that is to be converted
01316         bin -- the file name of the to be created simplified binary file
01317 
01318         Return value:
01319         None
01320 
01321         Side effect:
01322         Runs the elf2bin tool to produce the simplified binary file.
01323 
01324         Note:
01325         This method should be decorated with @hook_tool.
01326         """
01327         raise NotImplemented
01328 
01329     # Return the list of macros geenrated by the build system
01330     def get_config_macros(self):
01331         return Config.config_to_macros(self.config_data) if self.config_data else []
01332 
01333 from tools.settings import ARM_PATH
01334 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
01335 from tools.settings import IAR_PATH
01336 
01337 TOOLCHAIN_PATHS = {
01338     'ARM': ARM_PATH,
01339     'uARM': ARM_PATH,
01340     'GCC_ARM': GCC_ARM_PATH,
01341     'GCC_CR': GCC_CR_PATH,
01342     'IAR': IAR_PATH
01343 }
01344 
01345 from tools.toolchains.arm import ARM_STD, ARM_MICRO
01346 from tools.toolchains.gcc import GCC_ARM, GCC_CR
01347 from tools.toolchains.iar import IAR
01348 
01349 TOOLCHAIN_CLASSES = {
01350     'ARM': ARM_STD,
01351     'uARM': ARM_MICRO,
01352     'GCC_ARM': GCC_ARM,
01353     'GCC_CR': GCC_CR,
01354     'IAR': IAR
01355 }
01356 
01357 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())