Greg Steiert / pegasus_dev

Dependents:   blinky_max32630fthr

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 
00018 import re
00019 import sys
00020 from os import stat, walk, getcwd, sep, remove
00021 from copy import copy
00022 from time import time, sleep
00023 from types import ListType
00024 from shutil import copyfile
00025 from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir
00026 from inspect import getmro
00027 from copy import deepcopy
00028 from tools.config import Config
00029 from abc import ABCMeta, abstractmethod
00030 from distutils.spawn import find_executable
00031 
00032 from multiprocessing import Pool, cpu_count
00033 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker
00034 from tools.settings import MBED_ORG_USER
00035 import tools.hooks as hooks
00036 from tools.memap import MemapParser
00037 from hashlib import md5
00038 import fnmatch
00039 
00040 
00041 #Disables multiprocessing if set to higher number than the host machine CPUs
00042 CPU_COUNT_MIN = 1
00043 CPU_COEF = 1
00044 
00045 class Resources:
00046     def __init__(self, base_path=None):
00047         self.base_path = base_path
00048 
00049         self.file_basepath = {}
00050 
00051         self.inc_dirs = []
00052         self.headers = []
00053 
00054         self.s_sources = []
00055         self.c_sources = []
00056         self.cpp_sources = []
00057 
00058         self.lib_dirs = set([])
00059         self.objects = []
00060         self.libraries = []
00061 
00062         # mbed special files
00063         self.lib_builds = []
00064         self.lib_refs = []
00065 
00066         self.repo_dirs = []
00067         self.repo_files = []
00068 
00069         self.linker_script = None
00070 
00071         # Other files
00072         self.hex_files = []
00073         self.bin_files = []
00074         self.json_files = []
00075 
00076         # Features
00077         self.features = {}
00078 
00079     def __add__(self, resources):
00080         if resources is None:
00081             return self
00082         else:
00083             return self.add(resources)
00084 
00085     def __radd__(self, resources):
00086         if resources is None:
00087             return self
00088         else:
00089             return self.add(resources)
00090 
00091     def add(self, resources):
00092         for f,p in resources.file_basepath.items():
00093             self.file_basepath[f] = p
00094 
00095         self.inc_dirs += resources.inc_dirs
00096         self.headers += resources.headers
00097 
00098         self.s_sources += resources.s_sources
00099         self.c_sources += resources.c_sources
00100         self.cpp_sources += resources.cpp_sources
00101 
00102         self.lib_dirs |= resources.lib_dirs
00103         self.objects += resources.objects
00104         self.libraries += resources.libraries
00105 
00106         self.lib_builds += resources.lib_builds
00107         self.lib_refs += resources.lib_refs
00108 
00109         self.repo_dirs += resources.repo_dirs
00110         self.repo_files += resources.repo_files
00111 
00112         if resources.linker_script is not None:
00113             self.linker_script = resources.linker_script
00114 
00115         self.hex_files += resources.hex_files
00116         self.bin_files += resources.bin_files
00117         self.json_files += resources.json_files
00118 
00119         self.features.update(resources.features)
00120 
00121         return self
00122 
00123     def _collect_duplicates(self, dupe_dict, dupe_headers):
00124         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00125             objname, _ = splitext(basename(filename))
00126             dupe_dict.setdefault(objname, set())
00127             dupe_dict[objname] |= set([filename])
00128         for filename in self.headers:
00129             headername = basename(filename)
00130             dupe_headers.setdefault(headername, set())
00131             dupe_headers[headername] |= set([headername])
00132         for res in self.features.values():
00133             res._collect_duplicates(dupe_dict, dupe_headers)
00134         return dupe_dict, dupe_headers
00135 
00136     def detect_duplicates(self, toolchain):
00137         """Detect all potential ambiguities in filenames and report them with
00138         a toolchain notification
00139 
00140         Positional Arguments:
00141         toolchain - used for notifications
00142         """
00143         count = 0
00144         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00145         for objname, filenames in dupe_dict.iteritems():
00146             if len(filenames) > 1:
00147                 count+=1
00148                 toolchain.tool_error(
00149                     "Object file %s.o is not unique! It could be made from: %s"\
00150                     % (objname, " ".join(filenames)))
00151         for headername, locations in dupe_headers.iteritems():
00152             if len(locations) > 1:
00153                 count+=1
00154                 toolchain.tool_error(
00155                     "Header file %s is not unique! It could be: %s" %\
00156                     (headername, " ".join(locations)))
00157         return count
00158 
00159 
00160     def relative_to(self, base, dot=False):
00161         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00162                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00163                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00164                       'hex_files', 'bin_files', 'json_files']:
00165             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00166             setattr(self, field, v)
00167 
00168         self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f}
00169 
00170         if self.linker_script is not None:
00171             self.linker_script = rel_path(self.linker_script, base, dot)
00172 
00173     def win_to_unix(self):
00174         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00175                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00176                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00177                       'hex_files', 'bin_files', 'json_files']:
00178             v = [f.replace('\\', '/') for f in getattr(self, field)]
00179             setattr(self, field, v)
00180 
00181         self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f}
00182 
00183         if self.linker_script is not None:
00184             self.linker_script = self.linker_script.replace('\\', '/')
00185 
00186     def __str__(self):
00187         s = []
00188 
00189         for (label, resources) in (
00190                 ('Include Directories', self.inc_dirs),
00191                 ('Headers', self.headers),
00192 
00193                 ('Assembly sources', self.s_sources),
00194                 ('C sources', self.c_sources),
00195                 ('C++ sources', self.cpp_sources),
00196 
00197                 ('Library directories', self.lib_dirs),
00198                 ('Objects', self.objects),
00199                 ('Libraries', self.libraries),
00200 
00201                 ('Hex files', self.hex_files),
00202                 ('Bin files', self.bin_files),
00203 
00204                 ('Features', self.features),
00205             ):
00206             if resources:
00207                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00208 
00209         if self.linker_script:
00210             s.append('Linker Script: ' + self.linker_script)
00211 
00212         return '\n'.join(s)
00213 
00214 # Support legacy build conventions: the original mbed build system did not have
00215 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00216 # had the knowledge of a list of these directories to be ignored.
00217 LEGACY_IGNORE_DIRS = set([
00218     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00219     'ARM', 'uARM', 'IAR',
00220     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00221 ])
00222 LEGACY_TOOLCHAIN_NAMES = {
00223     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00224     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00225     'IAR': 'IAR',
00226 }
00227 
00228 
00229 class mbedToolchain:
00230     # Verbose logging
00231     VERBOSE = True
00232 
00233     # Compile C files as CPP
00234     COMPILE_C_AS_CPP = False
00235 
00236     # Response files for compiling, includes, linking and archiving.
00237     # Not needed on posix systems where the typical arg limit is 2 megabytes
00238     RESPONSE_FILES = True
00239 
00240     CORTEX_SYMBOLS = {
00241         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00242         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00243         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00244         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00245         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00246         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00247         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00248         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00249         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00250         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00251     }
00252 
00253     MBED_CONFIG_FILE_NAME="mbed_config.h"
00254 
00255     __metaclass__ = ABCMeta
00256 
00257     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00258 
00259     def __init__(self, target, notify=None, macros=None, silent=False, extra_verbose=False, build_profile=None):
00260         self.target = target
00261         self.name = self.__class__.__name__
00262 
00263         # compile/assemble/link/binary hooks
00264         self.hook = hooks.Hook(target, self)
00265 
00266         # Toolchain flags
00267         self.flags = deepcopy(build_profile or self.profile_template)
00268 
00269         # User-defined macros
00270         self.macros = macros or []
00271 
00272         # Macros generated from toolchain and target rules/features
00273         self.asm_symbols = None
00274         self.cxx_symbols = None
00275 
00276         # Labels generated from toolchain and target rules/features (used for selective build)
00277         self.labels = None
00278 
00279         # This will hold the initialized config object
00280         self.config = None
00281 
00282         # This will hold the configuration data (as returned by Config.get_config_data())
00283         self.config_data = None
00284 
00285         # This will hold the location of the configuration file or None if there's no configuration available
00286         self.config_file = None
00287 
00288         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00289         self.config_processed = False
00290 
00291         # Non-incremental compile
00292         self.build_all = False
00293 
00294         # Build output dir
00295         self.build_dir = None
00296         self.timestamp = time()
00297 
00298         # Output build naming based on target+toolchain combo (mbed 2.0 builds)
00299         self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
00300 
00301         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00302         self.jobs = 0
00303 
00304         # Ignore patterns from .mbedignore files
00305         self.ignore_patterns = []
00306 
00307         # Pre-mbed 2.0 ignore dirs
00308         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00309 
00310         # Output notify function
00311         # This function is passed all events, and expected to handle notification of the
00312         # user, emit the events to a log, etc.
00313         # The API for all notify methods passed into the notify parameter is as follows:
00314         # def notify(Event, Silent)
00315         # Where *Event* is a dict representing the toolchain event that was generated
00316         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00317         #                  or an application was linked
00318         #       *Silent* is a boolean
00319         if notify:
00320             self.notify_fun = notify
00321         elif extra_verbose:
00322             self.notify_fun = self.print_notify_verbose
00323         else:
00324             self.notify_fun = self.print_notify
00325 
00326         # Silent builds (no output)
00327         self.silent = silent
00328 
00329         # Print output buffer
00330         self.output = str()
00331         self.map_outputs = list()   # Place to store memmap scan results in JSON like data structures
00332 
00333         # uVisor spepcific rules
00334         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00335             self.target.core = re.sub(r"F$", '', self.target.core)
00336 
00337         # Stats cache is used to reduce the amount of IO requests to stat
00338         # header files during dependency change. See need_update()
00339         self.stat_cache = {}
00340 
00341         # Used by the mbed Online Build System to build in chrooted environment
00342         self.CHROOT = None
00343 
00344         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00345         self.init()
00346 
00347     # Used for post __init__() hooks
00348     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00349     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00350     def init(self):
00351         return True
00352 
00353     def get_output(self):
00354         return self.output
00355 
00356     def print_notify(self, event, silent=False):
00357         """ Default command line notification
00358         """
00359         msg = None
00360 
00361         if not self.VERBOSE and event['type'] == 'tool_error':
00362             msg = event['message']
00363 
00364         elif event['type'] in ['info', 'debug']:
00365             msg = event['message']
00366 
00367         elif event['type'] == 'cc':
00368             event['severity'] = event['severity'].title()
00369             event['file'] = basename(event['file'])
00370             msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00371 
00372         elif event['type'] == 'progress':
00373             if 'percent' in event:
00374                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00375                                                   event['percent'],
00376                                                   basename(event['file']))
00377             else:
00378                 msg = '{}: {}'.format(event['action'].title(),
00379                                       basename(event['file']))
00380 
00381         if msg:
00382             if not silent:
00383                 print msg
00384             self.output += msg + "\n"
00385 
00386     def print_notify_verbose(self, event, silent=False):
00387         """ Default command line notification with more verbose mode
00388         """
00389         if event['type'] in ['info', 'debug']:
00390             self.print_notify(event, silent=silent) # standard handle
00391 
00392         elif event['type'] == 'cc':
00393             event['severity'] = event['severity'].title()
00394             event['file'] = basename(event['file'])
00395             event['mcu_name'] = "None"
00396             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00397             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00398             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00399             if not silent:
00400                 print msg
00401             self.output += msg + "\n"
00402 
00403         elif event['type'] == 'progress':
00404             self.print_notify(event) # standard handle
00405 
00406     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00407     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00408     def notify(self, event):
00409         """ Little closure for notify functions
00410         """
00411         event['toolchain'] = self
00412         return self.notify_fun(event, self.silent)
00413 
00414     def get_symbols(self, for_asm=False):
00415         if for_asm:
00416             if self.asm_symbols is None:
00417                 self.asm_symbols = []
00418 
00419                 # Cortex CPU symbols
00420                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00421                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00422 
00423                 # Add target's symbols
00424                 self.asm_symbols += self.target.macros
00425                 # Add extra symbols passed via 'macros' parameter
00426                 self.asm_symbols += self.macros
00427             return list(set(self.asm_symbols))  # Return only unique symbols
00428         else:
00429             if self.cxx_symbols is None:
00430                 # Target and Toolchain symbols
00431                 labels = self.get_labels()
00432                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00433                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00434 
00435                 # Cortex CPU symbols
00436                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00437                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00438 
00439                 # Symbols defined by the on-line build.system
00440                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00441                 if MBED_ORG_USER:
00442                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00443 
00444                 # Add target's symbols
00445                 self.cxx_symbols += self.target.macros
00446                 # Add target's hardware
00447                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00448                 # Add target's features
00449                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00450                 # Add extra symbols passed via 'macros' parameter
00451                 self.cxx_symbols += self.macros
00452 
00453                 # Form factor variables
00454                 if hasattr(self.target, 'supported_form_factors'):
00455                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00456 
00457             return list(set(self.cxx_symbols))  # Return only unique symbols
00458 
00459     # Extend the internal list of macros
00460     def add_macros(self, new_macros):
00461         self.macros.extend(new_macros)
00462 
00463     def get_labels(self):
00464         if self.labels is None:
00465             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00466             toolchain_labels.remove('mbedToolchain')
00467             self.labels = {
00468                 'TARGET': self.target.labels,
00469                 'FEATURE': self.target.features,
00470                 'TOOLCHAIN': toolchain_labels
00471             }
00472 
00473             # This is a policy decision and it should /really/ be in the config system
00474             # ATM it's here for backward compatibility
00475             if (("-g" in self.flags['common'] and
00476                  "-O0") in self.flags['common'] or
00477                 ("-r" in self.flags['common'] and
00478                  "-On" in self.flags['common'])):
00479                 self.labels['TARGET'].append("DEBUG")
00480             else:
00481                 self.labels['TARGET'].append("RELEASE")
00482         return self.labels
00483 
00484 
00485     # Determine whether a source file needs updating/compiling
00486     def need_update(self, target, dependencies):
00487         if self.build_all:
00488             return True
00489 
00490         if not exists(target):
00491             return True
00492 
00493         target_mod_time = stat(target).st_mtime
00494 
00495         for d in dependencies:
00496             # Some objects are not provided with full path and here we do not have
00497             # information about the library paths. Safe option: assume an update
00498             if not d or not exists(d):
00499                 return True
00500             
00501             if not self.stat_cache.has_key(d):
00502                 self.stat_cache[d] = stat(d).st_mtime
00503 
00504             if self.stat_cache[d] >= target_mod_time:
00505                 return True
00506         
00507         return False
00508 
00509     def is_ignored(self, file_path):
00510         for pattern in self.ignore_patterns:
00511             if fnmatch.fnmatch(file_path, pattern):
00512                 return True
00513         return False
00514 
00515     # Create a Resources object from the path pointed to by *path* by either traversing a
00516     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00517     # when *path* is a file.
00518     # The parameter *base_path* is used to set the base_path attribute of the Resources
00519     # object and the parameter *exclude_paths* is used by the directory traversal to
00520     # exclude certain paths from the traversal.
00521     def scan_resources(self, path, exclude_paths=None, base_path=None):
00522         self.progress("scan", path)
00523 
00524         resources = Resources(path)
00525         if not base_path:
00526             if isfile(path):
00527                 base_path = dirname(path)
00528             else:
00529                 base_path = path
00530         resources.base_path = base_path
00531 
00532         if isfile(path):
00533             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00534         else:
00535             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00536         return resources
00537 
00538     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00539     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00540     # on every file it considers adding to the resources object.
00541     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00542         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00543         When topdown is True, the caller can modify the dirnames list in-place
00544         (perhaps using del or slice assignment), and walk() will only recurse into
00545         the subdirectories whose names remain in dirnames; this can be used to prune
00546         the search, impose a specific order of visiting, or even to inform walk()
00547         about directories the caller creates or renames before it resumes walk()
00548         again. Modifying dirnames when topdown is False is ineffective, because in
00549         bottom-up mode the directories in dirnames are generated before dirpath
00550         itself is generated.
00551         """
00552         labels = self.get_labels()
00553         for root, dirs, files in walk(path, followlinks=True):
00554             # Check if folder contains .mbedignore
00555             if ".mbedignore" in files:
00556                 with open (join(root,".mbedignore"), "r") as f:
00557                     lines=f.readlines()
00558                     lines = [l.strip() for l in lines] # Strip whitespaces
00559                     lines = [l for l in lines if l != ""] # Strip empty lines
00560                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00561                     # Append root path to glob patterns and append patterns to ignore_patterns
00562                     self.ignore_patterns.extend([join(root,line.strip()) for line in lines])
00563 
00564             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00565             if self.is_ignored(join(root,"")):
00566                 continue
00567 
00568             for d in copy(dirs):
00569                 dir_path = join(root, d)
00570                 # Add internal repo folders/files. This is needed for exporters
00571                 if d == '.hg' or d == '.git':
00572                     resources.repo_dirs.append(dir_path)
00573 
00574                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00575                     # Ignore targets that do not match the TARGET in extra_labels list
00576                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00577                     # Ignore toolchain that do not match the current TOOLCHAIN
00578                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00579                     # Ignore .mbedignore files
00580                     self.is_ignored(join(dir_path,"")) or
00581                     # Ignore TESTS dir
00582                     (d == 'TESTS')):
00583                         dirs.remove(d)
00584                 elif d.startswith('FEATURE_'):
00585                     # Recursively scan features but ignore them in the current scan.
00586                     # These are dynamically added by the config system if the conditions are matched
00587                     resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path)
00588                     dirs.remove(d)
00589                 elif exclude_paths:
00590                     for exclude_path in exclude_paths:
00591                         rel_path = relpath(dir_path, exclude_path)
00592                         if not (rel_path.startswith('..')):
00593                             dirs.remove(d)
00594                             break
00595 
00596             # Add root to include paths
00597             resources.inc_dirs.append(root)
00598             resources.file_basepath[root] = base_path
00599 
00600             for file in files:
00601                 file_path = join(root, file)
00602                 self._add_file(file_path, resources, base_path)
00603 
00604     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00605     # (*file_path*) to the resources object based on the file type.
00606     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00607         resources.file_basepath[file_path] = base_path
00608 
00609         if self.is_ignored(file_path):
00610             return
00611 
00612         _, ext = splitext(file_path)
00613         ext = ext.lower()
00614 
00615         if   ext == '.s':
00616             resources.s_sources.append(file_path)
00617 
00618         elif ext == '.c':
00619             resources.c_sources.append(file_path)
00620 
00621         elif ext == '.cpp':
00622             resources.cpp_sources.append(file_path)
00623 
00624         elif ext == '.h' or ext == '.hpp':
00625             resources.headers.append(file_path)
00626 
00627         elif ext == '.o':
00628             resources.objects.append(file_path)
00629 
00630         elif ext == self.LIBRARY_EXT:
00631             resources.libraries.append(file_path)
00632             resources.lib_dirs.add(dirname(file_path))
00633 
00634         elif ext == self.LINKER_EXT:
00635             if resources.linker_script is not None:
00636                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00637             resources.linker_script = file_path
00638 
00639         elif ext == '.lib':
00640             resources.lib_refs.append(file_path)
00641 
00642         elif ext == '.bld':
00643             resources.lib_builds.append(file_path)
00644 
00645         elif basename(file_path) == '.hgignore':
00646             resources.repo_files.append(file_path)
00647 
00648         elif basename(file_path) == '.gitignore':
00649             resources.repo_files.append(file_path)
00650 
00651         elif ext == '.hex':
00652             resources.hex_files.append(file_path)
00653 
00654         elif ext == '.bin':
00655             resources.bin_files.append(file_path)
00656 
00657         elif ext == '.json':
00658             resources.json_files.append(file_path)
00659 
00660 
00661     def scan_repository(self, path):
00662         resources = []
00663 
00664         for root, dirs, files in walk(path):
00665             # Remove ignored directories
00666             for d in copy(dirs):
00667                 if d == '.' or d == '..':
00668                     dirs.remove(d)
00669 
00670             for file in files:
00671                 file_path = join(root, file)
00672                 resources.append(file_path)
00673 
00674         return resources
00675 
00676     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00677         # Handle a single file
00678         if type(files_paths) != ListType: files_paths = [files_paths]
00679 
00680         for source in files_paths:
00681             if source is None:
00682                 files_paths.remove(source)
00683 
00684         for source in files_paths:
00685             if resources is not None and resources.file_basepath.has_key(source):
00686                 relative_path = relpath(source, resources.file_basepath[source])
00687             elif rel_path is not None:
00688                 relative_path = relpath(source, rel_path)
00689             else:
00690                 _, relative_path = split(source)
00691 
00692             target = join(trg_path, relative_path)
00693 
00694             if (target != source) and (self.need_update(target, [source])):
00695                 self.progress("copy", relative_path)
00696                 mkdir(dirname(target))
00697                 copyfile(source, target)
00698 
00699     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00700     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00701     def relative_object_path(self, build_path, base_dir, source):
00702         source_dir, name, _ = split_path(source)
00703 
00704         obj_dir = join(build_path, relpath(source_dir, base_dir))
00705         if obj_dir is not self.prev_dir:
00706             self.prev_dir = obj_dir
00707             mkdir(obj_dir)
00708         return join(obj_dir, name + '.o')
00709 
00710     # Generate response file for all includes.
00711     # ARM, GCC, IAR cross compatible
00712     def get_inc_file(self, includes):
00713         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00714         if not exists(include_file):
00715             with open(include_file, "wb") as f:
00716                 cmd_list = []
00717                 for c in includes:
00718                     if c:
00719                         c = c.replace("\\", "/")
00720                         if self.CHROOT:
00721                             c = c.replace(self.CHROOT, '')
00722                         cmd_list.append('-I%s' % c)
00723                 string = " ".join(cmd_list)
00724                 f.write(string)
00725         return include_file
00726 
00727     # Generate response file for all objects when linking.
00728     # ARM, GCC, IAR cross compatible
00729     def get_link_file(self, cmd):
00730         link_file = join(self.build_dir, ".link_files.txt")
00731         with open(link_file, "wb") as f:
00732             cmd_list = []
00733             for c in cmd:
00734                 if c:
00735                     c = c.replace("\\", "/")
00736                     if self.CHROOT:
00737                         c = c.replace(self.CHROOT, '')
00738                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00739             string = " ".join(cmd_list)
00740             f.write(string)
00741         return link_file
00742  
00743     # Generate response file for all objects when archiving.
00744     # ARM, GCC, IAR cross compatible
00745     def get_arch_file(self, objects):
00746         archive_file = join(self.build_dir, ".archive_files.txt")
00747         with open(archive_file, "wb") as f:
00748             o_list = []
00749             for o in objects:
00750                 o_list.append('"%s"' % o)
00751             string = " ".join(o_list).replace("\\", "/")
00752             f.write(string)
00753         return archive_file
00754 
00755     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00756     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00757     def compile_sources(self, resources, build_path, inc_dirs=None):
00758         # Web IDE progress bar for project build
00759         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00760         self.to_be_compiled = len(files_to_compile)
00761         self.compiled = 0
00762 
00763         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00764 
00765         inc_paths = resources.inc_dirs
00766         if inc_dirs is not None:
00767             inc_paths.extend(inc_dirs)
00768         # De-duplicate include paths
00769         inc_paths = set(inc_paths)
00770         # Sort include paths for consistency
00771         inc_paths = sorted(set(inc_paths))
00772         # Unique id of all include paths
00773         self.inc_md5 = md5(' '.join(inc_paths)).hexdigest()
00774         # Where to store response files
00775         self.build_dir = build_path
00776 
00777         objects = []
00778         queue = []
00779         work_dir = getcwd()
00780         self.prev_dir = None
00781 
00782         # Generate configuration header (this will update self.build_all if needed)
00783         self.get_config_header()
00784 
00785         # Sort compile queue for consistency
00786         files_to_compile.sort()
00787         for source in files_to_compile:
00788             object = self.relative_object_path(build_path, resources.file_basepath[source], source)
00789 
00790             # Queue mode (multiprocessing)
00791             commands = self.compile_command(source, object, inc_paths)
00792             if commands is not None:
00793                 queue.append({
00794                     'source': source,
00795                     'object': object,
00796                     'commands': commands,
00797                     'work_dir': work_dir,
00798                     'chroot': self.CHROOT
00799                 })
00800             else:
00801                 self.compiled += 1
00802                 objects.append(object)
00803 
00804         # Use queues/multiprocessing if cpu count is higher than setting
00805         jobs = self.jobs if self.jobs else cpu_count()
00806         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00807             return self.compile_queue(queue, objects)
00808         else:
00809             return self.compile_seq(queue, objects)
00810 
00811     # Compile source files queue in sequential order
00812     def compile_seq(self, queue, objects):
00813         for item in queue:
00814             result = compile_worker(item)
00815 
00816             self.compiled += 1
00817             self.progress("compile", item['source'], build_update=True)
00818             for res in result['results']:
00819                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00820                 self.compile_output([
00821                     res['code'],
00822                     res['output'],
00823                     res['command']
00824                 ])
00825             objects.append(result['object'])
00826         return objects
00827 
00828     # Compile source files queue in parallel by creating pool of worker threads
00829     def compile_queue(self, queue, objects):
00830         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00831         p = Pool(processes=jobs_count)
00832 
00833         results = []
00834         for i in range(len(queue)):
00835             results.append(p.apply_async(compile_worker, [queue[i]]))
00836         p.close()
00837 
00838         itr = 0
00839         while len(results):
00840             itr += 1
00841             if itr > 180000:
00842                 p.terminate()
00843                 p.join()
00844                 raise ToolException("Compile did not finish in 5 minutes")
00845 
00846             sleep(0.01)
00847             pending = 0
00848             for r in results:
00849                 if r._ready is True:
00850                     try:
00851                         result = r.get()
00852                         results.remove(r)
00853 
00854                         self.compiled += 1
00855                         self.progress("compile", result['source'], build_update=True)
00856                         for res in result['results']:
00857                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00858                             self.compile_output([
00859                                 res['code'],
00860                                 res['output'],
00861                                 res['command']
00862                             ])
00863                         objects.append(result['object'])
00864                     except ToolException, err:
00865                         if p._taskqueue.queue:
00866                             p._taskqueue.queue.clear()
00867                             sleep(0.5)
00868                         p.terminate()
00869                         p.join()
00870                         raise ToolException(err)
00871                 else:
00872                     pending += 1
00873                     if pending >= jobs_count:
00874                         break
00875 
00876         results = None
00877         p.join()
00878 
00879         return objects
00880 
00881     # Determine the compile command based on type of source file
00882     def compile_command(self, source, object, includes):
00883         # Check dependencies
00884         _, ext = splitext(source)
00885         ext = ext.lower()
00886 
00887         if ext == '.c' or  ext == '.cpp':
00888             base, _ = splitext(object)
00889             dep_path = base + '.d'
00890             deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
00891             if len(deps) == 0 or self.need_update(object, deps):
00892                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
00893                     return self.compile_cpp(source, object, includes)
00894                 else:
00895                     return self.compile_c(source, object, includes)
00896         elif ext == '.s':
00897             deps = [source]
00898             if self.need_update(object, deps):
00899                 return self.assemble(source, object, includes)
00900         else:
00901             return False
00902 
00903         return None
00904 
00905     @abstractmethod
00906     def parse_dependencies(self, dep_path):
00907         """Parse the dependency information generated by the compiler.
00908 
00909         Positional arguments:
00910         dep_path -- the path to a file generated by a previous run of the compiler
00911 
00912         Return value:
00913         A list of all source files that the dependency file indicated were dependencies
00914 
00915         Side effects:
00916         None
00917         """
00918         raise NotImplemented
00919 
00920     def is_not_supported_error(self, output):
00921         return "#error directive: [NOT_SUPPORTED]" in output
00922 
00923     @abstractmethod
00924     def parse_output(self, output):
00925         """Take in compiler output and extract sinlge line warnings and errors from it.
00926 
00927         Positional arguments:
00928         output -- a string of all the messages emitted by a run of the compiler
00929 
00930         Return value:
00931         None
00932 
00933         Side effects:
00934         call self.cc_info or self.notify with a description of the event generated by the compiler
00935         """
00936         raise NotImplemented
00937 
00938     def compile_output(self, output=[]):
00939         _rc = output[0]
00940         _stderr = output[1]
00941         command = output[2]
00942 
00943         # Parse output for Warnings and Errors
00944         self.parse_output(_stderr)
00945         self.debug("Return: %s"% _rc)
00946         for error_line in _stderr.splitlines():
00947             self.debug("Output: %s"% error_line)
00948 
00949         # Check return code
00950         if _rc != 0:
00951             if self.is_not_supported_error(_stderr):
00952                 raise NotSupportedException(_stderr)
00953             else:
00954                 raise ToolException(_stderr)
00955 
00956     def build_library(self, objects, dir, name):
00957         needed_update = False
00958         lib = self.STD_LIB_NAME % name
00959         fout = join(dir, lib)
00960         if self.need_update(fout, objects):
00961             self.info("Library: %s" % lib)
00962             self.archive(objects, fout)
00963             needed_update = True
00964 
00965         return needed_update
00966 
00967     def link_program(self, r, tmp_path, name):
00968         needed_update = False
00969         ext = 'bin'
00970         if hasattr(self.target, 'OUTPUT_EXT'):
00971             ext = self.target.OUTPUT_EXT
00972 
00973         if hasattr(self.target, 'OUTPUT_NAMING'):
00974             self.var("binary_naming", self.target.OUTPUT_NAMING)
00975             if self.target.OUTPUT_NAMING == "8.3":
00976                 name = name[0:8]
00977                 ext = ext[0:3]
00978 
00979         # Create destination directory
00980         head, tail =  split(name)
00981         new_path = join(tmp_path, head)
00982         mkdir(new_path)
00983 
00984         filename = name+'.'+ext
00985         elf = join(tmp_path, name + '.elf')
00986         bin = join(tmp_path, filename)
00987         map = join(tmp_path, name + '.map')
00988 
00989         r.objects = sorted(set(r.objects))
00990         if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
00991             needed_update = True
00992             self.progress("link", name)
00993             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
00994 
00995         if self.need_update(bin, [elf]):
00996             needed_update = True
00997             self.progress("elf2bin", name)
00998             self.binary(r, elf, bin)
00999 
01000         self.map_outputs = self.mem_stats(map)
01001 
01002         self.var("compile_succeded", True)
01003         self.var("binary", filename)
01004 
01005         return bin, needed_update
01006 
01007     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01008     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01009     def default_cmd(self, command):
01010         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01011         self.debug("Return: %s"% _rc)
01012 
01013         for output_line in _stdout.splitlines():
01014             self.debug("Output: %s"% output_line)
01015         for error_line in _stderr.splitlines():
01016             self.debug("Errors: %s"% error_line)
01017 
01018         if _rc != 0:
01019             for line in _stderr.splitlines():
01020                 self.tool_error(line)
01021             raise ToolException(_stderr)
01022 
01023     ### NOTIFICATIONS ###
01024     def info(self, message):
01025         self.notify({'type': 'info', 'message': message})
01026 
01027     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01028     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01029     def debug(self, message):
01030         if self.VERBOSE:
01031             if type(message) is ListType:
01032                 message = ' '.join(message)
01033             message = "[DEBUG] " + message
01034             self.notify({'type': 'debug', 'message': message})
01035 
01036     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01037     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01038     def cc_info(self, info=None):
01039         if info is not None:
01040             info['type'] = 'cc'
01041             self.notify(info)
01042 
01043     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01044     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01045     def cc_verbose(self, message, file=""):
01046         self.debug(message)
01047 
01048     def progress(self, action, file, build_update=False):
01049         msg = {'type': 'progress', 'action': action, 'file': file}
01050         if build_update:
01051             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01052         self.notify(msg)
01053 
01054     def tool_error(self, message):
01055         self.notify({'type': 'tool_error', 'message': message})
01056 
01057     def var(self, key, value):
01058         self.notify({'type': 'var', 'key': key, 'val': value})
01059 
01060     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01061     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01062     def mem_stats(self, map):
01063         """! Creates parser object
01064         @param map Path to linker map file to parse and decode
01065         @return Memory summary structure with memory usage statistics
01066                 None if map file can't be opened and processed
01067         """
01068         toolchain = self.__class__.__name__
01069 
01070         # Create memap object
01071         memap = MemapParser()
01072 
01073         # Parse and decode a map file
01074         if memap.parse(abspath(map), toolchain) is False:
01075             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01076             return None
01077 
01078         # Store the memap instance for later use
01079         self.memap_instance = memap
01080 
01081         # Here we return memory statistics structure (constructed after
01082         # call to generate_output) which contains raw data in bytes
01083         # about sections + summary
01084         return memap.mem_report
01085 
01086     # Set the configuration data
01087     def set_config_data(self, config_data):
01088         self.config_data = config_data
01089 
01090     # Creates the configuration header if needed:
01091     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01092     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01093     # - if there is configuration data similar to the previous configuration data,
01094     #   "mbed_config.h" is left untouched.
01095     # - if there is new configuration data, "mbed_config.h" is overriden.
01096     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01097     # The "config_processed" variable (below) ensures this behaviour.
01098     # The function returns the location of the configuration file, or None if there is no
01099     # configuration data available (and thus no configuration file)
01100     def get_config_header(self):
01101         if self.config_processed: # this function was already called, return its result
01102             return self.config_file
01103         # The config file is located in the build directory
01104         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01105         # If the file exists, read its current content in prev_data
01106         if exists(self.config_file):
01107             with open(self.config_file, "rt") as f:
01108                 prev_data = f.read()
01109         else:
01110             prev_data = None
01111         # Get the current configuration data
01112         crt_data = Config.config_to_header(self.config_data) if self.config_data else None
01113         # "changed" indicates if a configuration change was detected
01114         changed = False
01115         if prev_data is not None: # a previous mbed_config.h exists
01116             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01117                 remove(self.config_file)
01118                 self.config_file = None # this means "config file not present"
01119                 changed = True
01120             elif crt_data != prev_data: # different content of config file
01121                 with open(self.config_file, "wt") as f:
01122                     f.write(crt_data)
01123                 changed = True
01124         else: # a previous mbed_config.h does not exist
01125             if crt_data is not None: # there's configuration data available
01126                 with open(self.config_file, "wt") as f:
01127                     f.write(crt_data)
01128                 changed = True
01129             else:
01130                 self.config_file = None # this means "config file not present"
01131         # If there was a change in configuration, rebuild everything
01132         self.build_all = changed
01133         # Make sure that this function will only return the location of the configuration
01134         # file for subsequent calls, without trying to manipulate its content in any way.
01135         self.config_processed = True
01136         return self.config_file
01137 
01138     @staticmethod
01139     def generic_check_executable(tool_key, executable_name, levels_up,
01140                                  nested_dir=None):
01141         """
01142         Positional args:
01143         tool_key: the key to index TOOLCHAIN_PATHS
01144         executable_name: the toolchain's named executable (ex. armcc)
01145         levels_up: each toolchain joins the toolchain_path, some
01146         variable directories (bin, include), and the executable name,
01147         so the TOOLCHAIN_PATH value must be appropriately distanced
01148 
01149         Keyword args:
01150         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01151           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01152           that will be used by toolchain's compile)
01153 
01154         Returns True if the executable location specified by the user
01155         exists and is valid OR the executable can be found on the PATH.
01156         Returns False otherwise.
01157         """
01158         # Search PATH if user did not specify a path or specified path doesn't
01159         # exist.
01160         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01161             exe = find_executable(executable_name)
01162             if not exe:
01163                 return False
01164             for level in range(levels_up):
01165                 # move up the specified number of directories
01166                 exe = dirname(exe)
01167             TOOLCHAIN_PATHS[tool_key] = exe
01168         if nested_dir:
01169             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01170                           executable_name)
01171         else:
01172             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01173         # User could have specified a path that exists but does not contain exe
01174         return exists(subdir) or exists(subdir +'.exe')
01175 
01176     @abstractmethod
01177     def check_executable(self):
01178         """Returns True if the executable (armcc) location specified by the
01179          user exists OR the executable can be found on the PATH.
01180          Returns False otherwise."""
01181         raise NotImplemented
01182 
01183     @abstractmethod
01184     def get_config_option(self, config_header):
01185         """Generate the compiler option that forces the inclusion of the configuration
01186         header file.
01187 
01188         Positional arguments:
01189         config_header -- The configuration header that will be included within all source files
01190 
01191         Return value:
01192         A list of the command line arguments that will force the inclusion the specified header
01193 
01194         Side effects:
01195         None
01196         """
01197         raise NotImplemented
01198 
01199     @abstractmethod
01200     def assemble(self, source, object, includes):
01201         """Generate the command line that assembles.
01202 
01203         Positional arguments:
01204         source -- a file path that is the file to assemble
01205         object -- a file path that is the destination object
01206         includes -- a list of all directories where header files may be found
01207 
01208         Return value:
01209         The complete command line, as a list, that would invoke the assembler
01210         on the source file, include all the include paths, and generate
01211         the specified object file.
01212 
01213         Side effects:
01214         None
01215 
01216         Note:
01217         This method should be decorated with @hook_tool.
01218         """
01219         raise NotImplemented
01220 
01221     @abstractmethod
01222     def compile_c(self, source, object, includes):
01223         """Generate the command line that compiles a C source file.
01224 
01225         Positional arguments:
01226         source -- the C source file to compile
01227         object -- the destination object file
01228         includes -- a list of all the directories where header files may be found
01229 
01230         Return value:
01231         The complete command line, as a list, that would invoke the C compiler
01232         on the source file, include all the include paths, and generate the
01233         specified object file.
01234 
01235         Side effects:
01236         None
01237 
01238         Note:
01239         This method should be decorated with @hook_tool.
01240         """
01241         raise NotImplemented
01242 
01243     @abstractmethod
01244     def compile_cpp(self, source, object, includes):
01245         """Generate the command line that compiles a C++ source file.
01246 
01247         Positional arguments:
01248         source -- the C++ source file to compile
01249         object -- the destination object file
01250         includes -- a list of all the directories where header files may be found
01251 
01252         Return value:
01253         The complete command line, as a list, that would invoke the C++ compiler
01254         on the source file, include all the include paths, and generate the
01255         specified object file.
01256 
01257         Side effects:
01258         None
01259 
01260         Note:
01261         This method should be decorated with @hook_tool.
01262         """
01263         raise NotImplemented
01264 
01265     @abstractmethod
01266     def link(self, output, objects, libraries, lib_dirs, mem_map):
01267         """Run the linker to create an executable and memory map.
01268 
01269         Positional arguments:
01270         output -- the file name to place the executable in
01271         objects -- all of the object files to link
01272         libraries -- all of the required libraries
01273         lib_dirs -- where the required libraries are located
01274         mem_map -- the location where the memory map file should be stored
01275 
01276         Return value:
01277         None
01278 
01279         Side effect:
01280         Runs the linker to produce the executable.
01281 
01282         Note:
01283         This method should be decorated with @hook_tool.
01284         """
01285         raise NotImplemented
01286 
01287     @abstractmethod
01288     def archive(self, objects, lib_path):
01289         """Run the command line that creates an archive.
01290 
01291         Positional arguhments:
01292         objects -- a list of all the object files that should be archived
01293         lib_path -- the file name of the resulting library file
01294 
01295         Return value:
01296         None
01297 
01298         Side effect:
01299         Runs the archiving tool to produce the library file.
01300 
01301         Note:
01302         This method should be decorated with @hook_tool.
01303         """
01304         raise NotImplemented
01305 
01306     @abstractmethod
01307     def binary(self, resources, elf, bin):
01308         """Run the command line that will Extract a simplified binary file.
01309 
01310         Positional arguments:
01311         resources -- A resources object (Is not used in any of the toolchains)
01312         elf -- the executable file that is to be converted
01313         bin -- the file name of the to be created simplified binary file
01314 
01315         Return value:
01316         None
01317 
01318         Side effect:
01319         Runs the elf2bin tool to produce the simplified binary file.
01320 
01321         Note:
01322         This method should be decorated with @hook_tool.
01323         """
01324         raise NotImplemented
01325 
01326     # Return the list of macros geenrated by the build system
01327     def get_config_macros(self):
01328         return Config.config_to_macros(self.config_data) if self.config_data else []
01329 
01330 from tools.settings import ARM_PATH
01331 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
01332 from tools.settings import IAR_PATH
01333 
01334 TOOLCHAIN_PATHS = {
01335     'ARM': ARM_PATH,
01336     'uARM': ARM_PATH,
01337     'GCC_ARM': GCC_ARM_PATH,
01338     'GCC_CR': GCC_CR_PATH,
01339     'IAR': IAR_PATH
01340 }
01341 
01342 from tools.toolchains.arm import ARM_STD, ARM_MICRO
01343 from tools.toolchains.gcc import GCC_ARM, GCC_CR
01344 from tools.toolchains.iar import IAR
01345 
01346 TOOLCHAIN_CLASSES = {
01347     'ARM': ARM_STD,
01348     'uARM': ARM_MICRO,
01349     'GCC_ARM': GCC_ARM,
01350     'GCC_CR': GCC_CR,
01351     'IAR': IAR
01352 }
01353 
01354 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())