Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import sys
00021 from os import stat, walk, getcwd, sep, remove
00022 from copy import copy
00023 from time import time, sleep
00024 from shutil import copyfile
00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split,
00026                      abspath, isfile, isdir, normcase)
00027 from itertools import chain
00028 from inspect import getmro
00029 from copy import deepcopy
00030 from abc import ABCMeta, abstractmethod
00031 from distutils.spawn import find_executable
00032 from multiprocessing import Pool, cpu_count
00033 from hashlib import md5
00034 import fnmatch
00035 
00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException,
00037                     NotSupportedException, split_path, compile_worker)
00038 from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
00039 from .. import hooks
00040 from ..memap import MemapParser
00041 
00042 
00043 #Disables multiprocessing if set to higher number than the host machine CPUs
00044 CPU_COUNT_MIN = 1
00045 CPU_COEF = 1
00046 
00047 class LazyDict(dict):
00048     def __init__(self):
00049         self.eager = {}
00050         self.lazy = {}
00051 
00052     def add_lazy(self, key, thunk):
00053         if key in self.eager:
00054             del self.eager[key]
00055         self.lazy[key] = thunk
00056 
00057     def __getitem__(self, key):
00058         if  (key not in self.eager
00059              and key in self.lazy):
00060             self.eager[key] = self.lazy[key]()
00061             del self.lazy[key]
00062         return self.eager[key]
00063 
00064     def __setitem__(self, key, value):
00065         self.eager[key] = value
00066 
00067     def __delitem__(self, key):
00068         if key in self.eager:
00069             del self.eager[key]
00070         else:
00071             del self.lazy[key]
00072 
00073     def __contains__(self, key):
00074         return key in self.eager or key in self.lazy
00075 
00076     def __iter__(self):
00077         return chain(iter(self.eager), iter(self.lazy))
00078 
00079     def __len__(self):
00080         return len(self.eager) + len(self.lazy)
00081 
00082     def __str__(self):
00083         return "Lazy{%s}" % (
00084             ", ".join("%r: %r" % (k, v) for k, v in
00085                       chain(self.eager.items(), ((k, "not evaluated")
00086                                                      for k in self.lazy))))
00087 
00088     def update(self, other):
00089         if isinstance(other, LazyDict):
00090             self.eager.update(other.eager)
00091             self.lazy.update(other.lazy)
00092         else:
00093             self.eager.update(other)
00094 
00095     def items(self):
00096         """Warning: This forces the evaluation all of the items in this LazyDict
00097         that are iterated over."""
00098         for k, v in self.eager.items():
00099             yield k, v
00100         for k in self.lazy.keys():
00101             yield k, self[k]
00102 
00103     def apply(self, fn):
00104         """Delay the application of a computation to all items of the lazy dict.
00105         Does no computation now. Instead the comuptation is performed when a
00106         consumer attempts to access a value in this LazyDict"""
00107         new_lazy = {}
00108         for k, f in self.lazy.items():
00109             def closure(f=f):
00110                 return fn(f())
00111             new_lazy[k] = closure
00112         for k, v in self.eager.items():
00113             def closure(v=v):
00114                 return fn(v)
00115             new_lazy[k] = closure
00116         self.lazy = new_lazy
00117         self.eager = {}
00118 
00119 class Resources:
00120     def __init__(self, base_path=None, collect_ignores=False):
00121         self.base_path = base_path
00122         self.collect_ignores = collect_ignores
00123 
00124         self.file_basepath = {}
00125 
00126         self.inc_dirs = []
00127         self.headers = []
00128 
00129         self.s_sources = []
00130         self.c_sources = []
00131         self.cpp_sources = []
00132 
00133         self.lib_dirs = set([])
00134         self.objects = []
00135         self.libraries = []
00136 
00137         # mbed special files
00138         self.lib_builds = []
00139         self.lib_refs = []
00140 
00141         self.repo_dirs = []
00142         self.repo_files = []
00143 
00144         self.linker_script = None
00145 
00146         # Other files
00147         self.hex_files = []
00148         self.bin_files = []
00149         self.json_files = []
00150 
00151         # Features
00152         self.features = LazyDict()
00153         self.ignored_dirs = []
00154 
00155     def __add__(self, resources):
00156         if resources is None:
00157             return self
00158         else:
00159             return self.add(resources)
00160 
00161     def __radd__(self, resources):
00162         if resources is None:
00163             return self
00164         else:
00165             return self.add(resources)
00166 
00167     def ignore_dir(self, directory):
00168         if self.collect_ignores:
00169             self.ignored_dirs.append(directory)
00170 
00171     def add(self, resources):
00172         for f,p in resources.file_basepath.items():
00173             self.file_basepath[f] = p
00174 
00175         self.inc_dirs += resources.inc_dirs
00176         self.headers += resources.headers
00177 
00178         self.s_sources += resources.s_sources
00179         self.c_sources += resources.c_sources
00180         self.cpp_sources += resources.cpp_sources
00181 
00182         self.lib_dirs |= resources.lib_dirs
00183         self.objects += resources.objects
00184         self.libraries += resources.libraries
00185 
00186         self.lib_builds += resources.lib_builds
00187         self.lib_refs += resources.lib_refs
00188 
00189         self.repo_dirs += resources.repo_dirs
00190         self.repo_files += resources.repo_files
00191 
00192         if resources.linker_script is not None:
00193             self.linker_script = resources.linker_script
00194 
00195         self.hex_files += resources.hex_files
00196         self.bin_files += resources.bin_files
00197         self.json_files += resources.json_files
00198 
00199         self.features.update(resources.features)
00200         self.ignored_dirs += resources.ignored_dirs
00201 
00202         return self
00203 
00204     def _collect_duplicates(self, dupe_dict, dupe_headers):
00205         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00206             objname, _ = splitext(basename(filename))
00207             dupe_dict.setdefault(objname, set())
00208             dupe_dict[objname] |= set([filename])
00209         for filename in self.headers:
00210             headername = basename(filename)
00211             dupe_headers.setdefault(headername, set())
00212             dupe_headers[headername] |= set([headername])
00213         for res in self.features.values():
00214             res._collect_duplicates(dupe_dict, dupe_headers)
00215         return dupe_dict, dupe_headers
00216 
00217     def detect_duplicates(self, toolchain):
00218         """Detect all potential ambiguities in filenames and report them with
00219         a toolchain notification
00220 
00221         Positional Arguments:
00222         toolchain - used for notifications
00223         """
00224         count = 0
00225         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00226         for objname, filenames in dupe_dict.items():
00227             if len(filenames) > 1:
00228                 count+=1
00229                 toolchain.tool_error(
00230                     "Object file %s.o is not unique! It could be made from: %s"\
00231                     % (objname, " ".join(filenames)))
00232         for headername, locations in dupe_headers.items():
00233             if len(locations) > 1:
00234                 count+=1
00235                 toolchain.tool_error(
00236                     "Header file %s is not unique! It could be: %s" %\
00237                     (headername, " ".join(locations)))
00238         return count
00239 
00240 
00241     def relative_to(self, base, dot=False):
00242         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00243                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00244                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00245                       'hex_files', 'bin_files', 'json_files']:
00246             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00247             setattr(self, field, v)
00248 
00249         def to_apply(feature, base=base, dot=dot):
00250             feature.relative_to(base, dot)
00251         self.features.apply(to_apply)
00252 
00253         if self.linker_script is not None:
00254             self.linker_script = rel_path(self.linker_script, base, dot)
00255 
00256     def win_to_unix(self):
00257         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00258                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00259                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00260                       'hex_files', 'bin_files', 'json_files']:
00261             v = [f.replace('\\', '/') for f in getattr(self, field)]
00262             setattr(self, field, v)
00263 
00264         def to_apply(feature):
00265             feature.win_to_unix()
00266         self.features.apply(to_apply)
00267 
00268         if self.linker_script is not None:
00269             self.linker_script = self.linker_script.replace('\\', '/')
00270 
00271     def __str__(self):
00272         s = []
00273 
00274         for (label, resources) in (
00275                 ('Include Directories', self.inc_dirs),
00276                 ('Headers', self.headers),
00277 
00278                 ('Assembly sources', self.s_sources),
00279                 ('C sources', self.c_sources),
00280                 ('C++ sources', self.cpp_sources),
00281 
00282                 ('Library directories', self.lib_dirs),
00283                 ('Objects', self.objects),
00284                 ('Libraries', self.libraries),
00285 
00286                 ('Hex files', self.hex_files),
00287                 ('Bin files', self.bin_files),
00288 
00289                 ('Features', self.features),
00290             ):
00291             if resources:
00292                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00293 
00294         if self.linker_script:
00295             s.append('Linker Script: ' + self.linker_script)
00296 
00297         return '\n'.join(s)
00298 
00299 # Support legacy build conventions: the original mbed build system did not have
00300 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00301 # had the knowledge of a list of these directories to be ignored.
00302 LEGACY_IGNORE_DIRS = set([
00303     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00304     'ARM', 'uARM', 'IAR',
00305     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00306     'ARMC6'
00307 ])
00308 LEGACY_TOOLCHAIN_NAMES = {
00309     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00310     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00311     'IAR': 'IAR',
00312     'ARMC6': 'ARMC6',
00313 }
00314 
00315 
00316 class mbedToolchain:
00317     # Verbose logging
00318     VERBOSE = True
00319 
00320     # Compile C files as CPP
00321     COMPILE_C_AS_CPP = False
00322 
00323     # Response files for compiling, includes, linking and archiving.
00324     # Not needed on posix systems where the typical arg limit is 2 megabytes
00325     RESPONSE_FILES = True
00326 
00327     CORTEX_SYMBOLS = {
00328         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00329         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00330         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00331         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00332         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00333         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00334         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00335         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00336         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00337         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00338         "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00339         "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00340         "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00341         "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00342         "Cortex-M33F-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00343         "Cortex-M33F": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00344     }
00345 
00346     MBED_CONFIG_FILE_NAME="mbed_config.h"
00347 
00348     PROFILE_FILE_NAME = ".profile"
00349 
00350     __metaclass__ = ABCMeta
00351 
00352     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00353 
00354     def __init__(self, target, notify=None, macros=None, silent=False,
00355                  extra_verbose=False, build_profile=None, build_dir=None):
00356         self.target = target
00357         self.name = self.__class__.__name__
00358 
00359         # compile/assemble/link/binary hooks
00360         self.hook = hooks.Hook(target, self)
00361 
00362         # Toolchain flags
00363         self.flags = deepcopy(build_profile or self.profile_template)
00364 
00365         # System libraries provided by the toolchain
00366         self.sys_libs = []
00367 
00368         # User-defined macros
00369         self.macros = macros or []
00370 
00371         # Macros generated from toolchain and target rules/features
00372         self.asm_symbols = None
00373         self.cxx_symbols = None
00374 
00375         # Labels generated from toolchain and target rules/features (used for selective build)
00376         self.labels = None
00377 
00378         # This will hold the initialized config object
00379         self.config = None
00380 
00381         # This will hold the configuration data (as returned by Config.get_config_data())
00382         self.config_data = None
00383 
00384         # This will hold the location of the configuration file or None if there's no configuration available
00385         self.config_file = None
00386 
00387         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00388         self.config_processed = False
00389 
00390         # Non-incremental compile
00391         self.build_all = False
00392 
00393         # Build output dir
00394         self.build_dir = build_dir
00395         self.timestamp = time()
00396 
00397         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00398         self.jobs = 0
00399 
00400         # Ignore patterns from .mbedignore files
00401         self.ignore_patterns = []
00402         self._ignore_regex = re.compile("$^")
00403 
00404         # Pre-mbed 2.0 ignore dirs
00405         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00406 
00407         # Output notify function
00408         # This function is passed all events, and expected to handle notification of the
00409         # user, emit the events to a log, etc.
00410         # The API for all notify methods passed into the notify parameter is as follows:
00411         # def notify(Event, Silent)
00412         # Where *Event* is a dict representing the toolchain event that was generated
00413         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00414         #                  or an application was linked
00415         #       *Silent* is a boolean
00416         if notify:
00417             self.notify_fun = notify
00418         elif extra_verbose:
00419             self.notify_fun = self.print_notify_verbose
00420         else:
00421             self.notify_fun = self.print_notify
00422 
00423         # Silent builds (no output)
00424         self.silent = silent
00425 
00426         # Print output buffer
00427         self.output = str()
00428 
00429         # uVisor spepcific rules
00430         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00431             self.target.core = re.sub(r"F$", '', self.target.core)
00432 
00433         # Stats cache is used to reduce the amount of IO requests to stat
00434         # header files during dependency change. See need_update()
00435         self.stat_cache = {}
00436 
00437         # Used by the mbed Online Build System to build in chrooted environment
00438         self.CHROOT = None
00439 
00440         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00441         self.init()
00442 
00443     # Used for post __init__() hooks
00444     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00445     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00446     def init(self):
00447         return True
00448 
00449     def get_output(self):
00450         return self.output
00451 
00452     def print_notify(self, event, silent=False):
00453         """ Default command line notification
00454         """
00455         msg = None
00456 
00457         if not self.VERBOSE and event['type'] == 'tool_error':
00458             msg = event['message']
00459 
00460         elif event['type'] in ['info', 'debug']:
00461             msg = event['message']
00462 
00463         elif event['type'] == 'cc':
00464             event['severity'] = event['severity'].title()
00465 
00466             if PRINT_COMPILER_OUTPUT_AS_LINK:
00467                 event['file'] = getcwd() + event['file'].strip('.')
00468                 msg = '[%(severity)s] %(file)s:%(line)s:%(col)s: %(message)s' % event
00469             else:
00470                 event['file'] = basename(event['file'])
00471                 msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00472 
00473         elif event['type'] == 'progress':
00474             if 'percent' in event:
00475                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00476                                                   event['percent'],
00477                                                   basename(event['file']))
00478             else:
00479                 msg = '{}: {}'.format(event['action'].title(),
00480                                       basename(event['file']))
00481 
00482         if msg:
00483             if not silent:
00484                 print(msg)
00485             self.output += msg + "\n"
00486 
00487     def print_notify_verbose(self, event, silent=False):
00488         """ Default command line notification with more verbose mode
00489         """
00490         if event['type'] in ['info', 'debug']:
00491             self.print_notify(event, silent=silent) # standard handle
00492 
00493         elif event['type'] == 'cc':
00494             event['severity'] = event['severity'].title()
00495             event['file'] = basename(event['file'])
00496             event['mcu_name'] = "None"
00497             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00498             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00499             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00500             if not silent:
00501                 print(msg)
00502             self.output += msg + "\n"
00503 
00504         elif event['type'] == 'progress':
00505             self.print_notify(event) # standard handle
00506 
00507     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00508     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00509     def notify(self, event):
00510         """ Little closure for notify functions
00511         """
00512         event['toolchain'] = self
00513         return self.notify_fun(event, self.silent)
00514 
00515     def get_symbols(self, for_asm=False):
00516         if for_asm:
00517             if self.asm_symbols is None:
00518                 self.asm_symbols = []
00519 
00520                 # Cortex CPU symbols
00521                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00522                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00523 
00524                 # Add target's symbols
00525                 self.asm_symbols += self.target.macros
00526                 # Add extra symbols passed via 'macros' parameter
00527                 self.asm_symbols += self.macros
00528             return list(set(self.asm_symbols))  # Return only unique symbols
00529         else:
00530             if self.cxx_symbols is None:
00531                 # Target and Toolchain symbols
00532                 labels = self.get_labels()
00533                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00534                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00535 
00536                 # Cortex CPU symbols
00537                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00538                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00539 
00540                 # Symbols defined by the on-line build.system
00541                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00542                 if MBED_ORG_USER:
00543                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00544 
00545                 # Add target's symbols
00546                 self.cxx_symbols += self.target.macros
00547                 # Add target's hardware
00548                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00549                 # Add target's features
00550                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00551                 # Add extra symbols passed via 'macros' parameter
00552                 self.cxx_symbols += self.macros
00553 
00554                 # Form factor variables
00555                 if hasattr(self.target, 'supported_form_factors'):
00556                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00557 
00558             return list(set(self.cxx_symbols))  # Return only unique symbols
00559 
00560     # Extend the internal list of macros
00561     def add_macros(self, new_macros):
00562         self.macros.extend(new_macros)
00563 
00564     def get_labels(self):
00565         if self.labels is None:
00566             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00567             toolchain_labels.remove('mbedToolchain')
00568             self.labels = {
00569                 'TARGET': self.target.labels,
00570                 'FEATURE': self.target.features,
00571                 'TOOLCHAIN': toolchain_labels
00572             }
00573 
00574             # This is a policy decision and it should /really/ be in the config system
00575             # ATM it's here for backward compatibility
00576             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00577                  "-O0" in self.flags['common']) or
00578                 ("-r" in self.flags['common'] and
00579                  "-On" in self.flags['common'])):
00580                 self.labels['TARGET'].append("DEBUG")
00581             else:
00582                 self.labels['TARGET'].append("RELEASE")
00583         return self.labels
00584 
00585 
00586     # Determine whether a source file needs updating/compiling
00587     def need_update(self, target, dependencies):
00588         if self.build_all:
00589             return True
00590 
00591         if not exists(target):
00592             return True
00593 
00594         target_mod_time = stat(target).st_mtime
00595 
00596         for d in dependencies:
00597             # Some objects are not provided with full path and here we do not have
00598             # information about the library paths. Safe option: assume an update
00599             if not d or not exists(d):
00600                 return True
00601 
00602             if d not in self.stat_cache:
00603                 self.stat_cache[d] = stat(d).st_mtime
00604 
00605             if self.stat_cache[d] >= target_mod_time:
00606                 return True
00607 
00608         return False
00609 
00610     def is_ignored(self, file_path):
00611         """Check if file path is ignored by any .mbedignore thus far"""
00612         return self._ignore_regex.match(normcase(file_path))
00613 
00614     def add_ignore_patterns(self, root, base_path, patterns):
00615         """Add a series of patterns to the ignored paths
00616 
00617         Positional arguments:
00618         root - the directory containing the ignore file
00619         base_path - the location that the scan started from
00620         patterns - the list of patterns we will ignore in the future
00621         """
00622         real_base = relpath(root, base_path)
00623         if real_base == ".":
00624             self.ignore_patterns.extend(normcase(p) for p in patterns)
00625         else:
00626             self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
00627         if self.ignore_patterns:
00628             self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
00629 
00630     # Create a Resources object from the path pointed to by *path* by either traversing a
00631     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00632     # when *path* is a file.
00633     # The parameter *base_path* is used to set the base_path attribute of the Resources
00634     # object and the parameter *exclude_paths* is used by the directory traversal to
00635     # exclude certain paths from the traversal.
00636     def scan_resources(self, path, exclude_paths=None, base_path=None,
00637                        collect_ignores=False):
00638         self.progress("scan", path)
00639 
00640         resources = Resources(path, collect_ignores=collect_ignores)
00641         if not base_path:
00642             if isfile(path):
00643                 base_path = dirname(path)
00644             else:
00645                 base_path = path
00646         resources.base_path = base_path
00647 
00648         if isfile(path):
00649             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00650         else:
00651             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00652         return resources
00653 
00654     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00655     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00656     # on every file it considers adding to the resources object.
00657     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00658         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00659         When topdown is True, the caller can modify the dirnames list in-place
00660         (perhaps using del or slice assignment), and walk() will only recurse into
00661         the subdirectories whose names remain in dirnames; this can be used to prune
00662         the search, impose a specific order of visiting, or even to inform walk()
00663         about directories the caller creates or renames before it resumes walk()
00664         again. Modifying dirnames when topdown is False is ineffective, because in
00665         bottom-up mode the directories in dirnames are generated before dirpath
00666         itself is generated.
00667         """
00668         labels = self.get_labels()
00669         for root, dirs, files in walk(path, followlinks=True):
00670             # Check if folder contains .mbedignore
00671             if ".mbedignore" in files:
00672                 with open (join(root,".mbedignore"), "r") as f:
00673                     lines=f.readlines()
00674                     lines = [l.strip() for l in lines] # Strip whitespaces
00675                     lines = [l for l in lines if l != ""] # Strip empty lines
00676                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00677                     # Append root path to glob patterns and append patterns to ignore_patterns
00678                     self.add_ignore_patterns(root, base_path, lines)
00679 
00680             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00681             root_path =join(relpath(root, base_path))
00682             if  (self.is_ignored(join(root_path,"")) or
00683                  self.build_dir == root_path):
00684                 resources.ignore_dir(root_path)
00685                 dirs[:] = []
00686                 continue
00687 
00688             for d in copy(dirs):
00689                 dir_path = join(root, d)
00690                 # Add internal repo folders/files. This is needed for exporters
00691                 if d == '.hg' or d == '.git':
00692                     resources.repo_dirs.append(dir_path)
00693 
00694                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00695                     # Ignore targets that do not match the TARGET in extra_labels list
00696                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00697                     # Ignore toolchain that do not match the current TOOLCHAIN
00698                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00699                     # Ignore .mbedignore files
00700                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00701                     # Ignore TESTS dir
00702                     (d == 'TESTS')):
00703                         resources.ignore_dir(dir_path)
00704                         dirs.remove(d)
00705                 elif d.startswith('FEATURE_'):
00706                     # Recursively scan features but ignore them in the current scan.
00707                     # These are dynamically added by the config system if the conditions are matched
00708                     def closure (dir_path=dir_path, base_path=base_path):
00709                         return self.scan_resources(dir_path, base_path=base_path,
00710                                                    collect_ignores=resources.collect_ignores)
00711                     resources.features.add_lazy(d[8:], closure)
00712                     resources.ignore_dir(dir_path)
00713                     dirs.remove(d)
00714                 elif exclude_paths:
00715                     for exclude_path in exclude_paths:
00716                         rel_path = relpath(dir_path, exclude_path)
00717                         if not (rel_path.startswith('..')):
00718                             resources.ignore_dir(dir_path)
00719                             dirs.remove(d)
00720                             break
00721 
00722             # Add root to include paths
00723             root = root.rstrip("/")
00724             resources.inc_dirs.append(root)
00725             resources.file_basepath[root] = base_path
00726 
00727             for file in files:
00728                 file_path = join(root, file)
00729                 self._add_file(file_path, resources, base_path)
00730 
00731     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00732     # (*file_path*) to the resources object based on the file type.
00733     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00734         resources.file_basepath[file_path] = base_path
00735 
00736         if self.is_ignored(relpath(file_path, base_path)):
00737             resources.ignore_dir(relpath(file_path, base_path))
00738             return
00739 
00740         _, ext = splitext(file_path)
00741         ext = ext.lower()
00742 
00743         if   ext == '.s':
00744             resources.s_sources.append(file_path)
00745 
00746         elif ext == '.c':
00747             resources.c_sources.append(file_path)
00748 
00749         elif ext == '.cpp':
00750             resources.cpp_sources.append(file_path)
00751 
00752         elif ext == '.h' or ext == '.hpp':
00753             resources.headers.append(file_path)
00754 
00755         elif ext == '.o':
00756             resources.objects.append(file_path)
00757 
00758         elif ext == self.LIBRARY_EXT:
00759             resources.libraries.append(file_path)
00760             resources.lib_dirs.add(dirname(file_path))
00761 
00762         elif ext == self.LINKER_EXT:
00763             if resources.linker_script is not None:
00764                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00765             resources.linker_script = file_path
00766 
00767         elif ext == '.lib':
00768             resources.lib_refs.append(file_path)
00769 
00770         elif ext == '.bld':
00771             resources.lib_builds.append(file_path)
00772 
00773         elif basename(file_path) == '.hgignore':
00774             resources.repo_files.append(file_path)
00775 
00776         elif basename(file_path) == '.gitignore':
00777             resources.repo_files.append(file_path)
00778 
00779         elif ext == '.hex':
00780             resources.hex_files.append(file_path)
00781 
00782         elif ext == '.bin':
00783             resources.bin_files.append(file_path)
00784 
00785         elif ext == '.json':
00786             resources.json_files.append(file_path)
00787 
00788 
00789     def scan_repository(self, path):
00790         resources = []
00791 
00792         for root, dirs, files in walk(path):
00793             # Remove ignored directories
00794             for d in copy(dirs):
00795                 if d == '.' or d == '..':
00796                     dirs.remove(d)
00797 
00798             for file in files:
00799                 file_path = join(root, file)
00800                 resources.append(file_path)
00801 
00802         return resources
00803 
00804     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00805         # Handle a single file
00806         if not isinstance(files_paths, list):
00807             files_paths = [files_paths]
00808 
00809         for source in files_paths:
00810             if source is None:
00811                 files_paths.remove(source)
00812 
00813         for source in files_paths:
00814             if resources is not None and source in resources.file_basepath:
00815                 relative_path = relpath(source, resources.file_basepath[source])
00816             elif rel_path is not None:
00817                 relative_path = relpath(source, rel_path)
00818             else:
00819                 _, relative_path = split(source)
00820 
00821             target = join(trg_path, relative_path)
00822 
00823             if (target != source) and (self.need_update(target, [source])):
00824                 self.progress("copy", relative_path)
00825                 mkdir(dirname(target))
00826                 copyfile(source, target)
00827 
00828     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00829     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00830     def relative_object_path(self, build_path, base_dir, source):
00831         source_dir, name, _ = split_path(source)
00832 
00833         obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
00834         if obj_dir is not self.prev_dir:
00835             self.prev_dir = obj_dir
00836             mkdir(obj_dir)
00837         return join(obj_dir, name + '.o')
00838 
00839     # Generate response file for all includes.
00840     # ARM, GCC, IAR cross compatible
00841     def get_inc_file(self, includes):
00842         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00843         if not exists(include_file):
00844             with open(include_file, "w") as f:
00845                 cmd_list = []
00846                 for c in includes:
00847                     if c:
00848                         c = c.replace("\\", "/")
00849                         if self.CHROOT:
00850                             c = c.replace(self.CHROOT, '')
00851                         cmd_list.append('"-I%s"' % c)
00852                 string = " ".join(cmd_list)
00853                 f.write(string)
00854         return include_file
00855 
00856     # Generate response file for all objects when linking.
00857     # ARM, GCC, IAR cross compatible
00858     def get_link_file(self, cmd):
00859         link_file = join(self.build_dir, ".link_files.txt")
00860         with open(link_file, "w") as f:
00861             cmd_list = []
00862             for c in cmd:
00863                 if c:
00864                     c = c.replace("\\", "/")
00865                     if self.CHROOT:
00866                         c = c.replace(self.CHROOT, '')
00867                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00868             string = " ".join(cmd_list)
00869             f.write(string)
00870         return link_file
00871 
00872     # Generate response file for all objects when archiving.
00873     # ARM, GCC, IAR cross compatible
00874     def get_arch_file(self, objects):
00875         archive_file = join(self.build_dir, ".archive_files.txt")
00876         with open(archive_file, "w") as f:
00877             o_list = []
00878             for o in objects:
00879                 o_list.append('"%s"' % o)
00880             string = " ".join(o_list).replace("\\", "/")
00881             f.write(string)
00882         return archive_file
00883 
00884     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00885     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00886     def compile_sources(self, resources, inc_dirs=None):
00887         # Web IDE progress bar for project build
00888         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00889         self.to_be_compiled = len(files_to_compile)
00890         self.compiled = 0
00891 
00892         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00893 
00894         inc_paths = resources.inc_dirs
00895         if inc_dirs is not None:
00896             if isinstance(inc_dirs, list):
00897                 inc_paths.extend(inc_dirs)
00898             else:
00899                 inc_paths.append(inc_dirs)
00900         # De-duplicate include paths
00901         inc_paths = set(inc_paths)
00902         # Sort include paths for consistency
00903         inc_paths = sorted(set(inc_paths))
00904         # Unique id of all include paths
00905         self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
00906 
00907         objects = []
00908         queue = []
00909         work_dir = getcwd()
00910         self.prev_dir = None
00911 
00912         # Generate configuration header (this will update self.build_all if needed)
00913         self.get_config_header()
00914         self.dump_build_profile()
00915 
00916         # Sort compile queue for consistency
00917         files_to_compile.sort()
00918         for source in files_to_compile:
00919             object = self.relative_object_path(
00920                 self.build_dir, resources.file_basepath[source], source)
00921 
00922             # Queue mode (multiprocessing)
00923             commands = self.compile_command(source, object, inc_paths)
00924             if commands is not None:
00925                 queue.append({
00926                     'source': source,
00927                     'object': object,
00928                     'commands': commands,
00929                     'work_dir': work_dir,
00930                     'chroot': self.CHROOT
00931                 })
00932             else:
00933                 self.compiled += 1
00934                 objects.append(object)
00935 
00936         # Use queues/multiprocessing if cpu count is higher than setting
00937         jobs = self.jobs if self.jobs else cpu_count()
00938         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00939             return self.compile_queue(queue, objects)
00940         else:
00941             return self.compile_seq(queue, objects)
00942 
00943     # Compile source files queue in sequential order
00944     def compile_seq(self, queue, objects):
00945         for item in queue:
00946             result = compile_worker(item)
00947 
00948             self.compiled += 1
00949             self.progress("compile", item['source'], build_update=True)
00950             for res in result['results']:
00951                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00952                 self.compile_output([
00953                     res['code'],
00954                     res['output'],
00955                     res['command']
00956                 ])
00957             objects.append(result['object'])
00958         return objects
00959 
00960     # Compile source files queue in parallel by creating pool of worker threads
00961     def compile_queue(self, queue, objects):
00962         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00963         p = Pool(processes=jobs_count)
00964 
00965         results = []
00966         for i in range(len(queue)):
00967             results.append(p.apply_async(compile_worker, [queue[i]]))
00968         p.close()
00969 
00970         itr = 0
00971         while len(results):
00972             itr += 1
00973             if itr > 180000:
00974                 p.terminate()
00975                 p.join()
00976                 raise ToolException("Compile did not finish in 5 minutes")
00977 
00978             sleep(0.01)
00979             pending = 0
00980             for r in results:
00981                 if r.ready():
00982                     try:
00983                         result = r.get()
00984                         results.remove(r)
00985 
00986                         self.compiled += 1
00987                         self.progress("compile", result['source'], build_update=True)
00988                         for res in result['results']:
00989                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00990                             self.compile_output([
00991                                 res['code'],
00992                                 res['output'],
00993                                 res['command']
00994                             ])
00995                         objects.append(result['object'])
00996                     except ToolException as err:
00997                         if p._taskqueue.queue:
00998                             p._taskqueue.queue.clear()
00999                             sleep(0.5)
01000                         p.terminate()
01001                         p.join()
01002                         raise ToolException(err)
01003                 else:
01004                     pending += 1
01005                     if pending >= jobs_count:
01006                         break
01007 
01008         results = None
01009         p.join()
01010 
01011         return objects
01012 
01013     # Determine the compile command based on type of source file
01014     def compile_command(self, source, object, includes):
01015         # Check dependencies
01016         _, ext = splitext(source)
01017         ext = ext.lower()
01018 
01019         if ext == '.c' or  ext == '.cpp':
01020             base, _ = splitext(object)
01021             dep_path = base + '.d'
01022             try:
01023                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
01024             except (IOError, IndexError):
01025                 deps = []
01026             config_file = ([self.config.app_config_location]
01027                            if self.config.app_config_location else [])
01028             deps.extend(config_file)
01029             if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01030                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
01031             else:
01032                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
01033             if len(deps) == 0 or self.need_update(object, deps):
01034                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01035                     return self.compile_cpp(source, object, includes)
01036                 else:
01037                     return self.compile_c(source, object, includes)
01038         elif ext == '.s':
01039             deps = [source]
01040             deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
01041             if self.need_update(object, deps):
01042                 return self.assemble(source, object, includes)
01043         else:
01044             return False
01045 
01046         return None
01047 
01048     def parse_dependencies(self, dep_path):
01049         """Parse the dependency information generated by the compiler.
01050 
01051         Positional arguments:
01052         dep_path -- the path to a file generated by a previous run of the compiler
01053 
01054         Return value:
01055         A list of all source files that the dependency file indicated were dependencies
01056 
01057         Side effects:
01058         None
01059 
01060         Note: A default implementation is provided for make-like file formats
01061         """
01062         dependencies = []
01063         buff = open(dep_path).readlines()
01064         if buff:
01065             buff[0] = re.sub('^(.*?)\: ', '', buff[0])
01066             for line in buff:
01067                 filename = line.replace('\\\n', '').strip()
01068                 if filename:
01069                     filename = filename.replace('\\ ', '\a')
01070                     dependencies.extend(((self.CHROOT if self.CHROOT else '') +
01071                                          f.replace('\a', ' '))
01072                                         for f in filename.split(" "))
01073         return list(filter(None, dependencies))
01074 
01075     def is_not_supported_error(self, output):
01076         return "#error directive: [NOT_SUPPORTED]" in output
01077 
01078     @abstractmethod
01079     def parse_output(self, output):
01080         """Take in compiler output and extract sinlge line warnings and errors from it.
01081 
01082         Positional arguments:
01083         output -- a string of all the messages emitted by a run of the compiler
01084 
01085         Return value:
01086         None
01087 
01088         Side effects:
01089         call self.cc_info or self.notify with a description of the event generated by the compiler
01090         """
01091         raise NotImplemented
01092 
01093     def compile_output(self, output=[]):
01094         _rc = output[0]
01095         _stderr = output[1].decode("utf-8")
01096         command = output[2]
01097 
01098         # Parse output for Warnings and Errors
01099         self.parse_output(_stderr)
01100         self.debug("Return: %s"% _rc)
01101         for error_line in _stderr.splitlines():
01102             self.debug("Output: %s"% error_line)
01103 
01104         # Check return code
01105         if _rc != 0:
01106             if self.is_not_supported_error(_stderr):
01107                 raise NotSupportedException(_stderr)
01108             else:
01109                 raise ToolException(_stderr)
01110 
01111     def build_library(self, objects, dir, name):
01112         needed_update = False
01113         lib = self.STD_LIB_NAME % name
01114         fout = join(dir, lib)
01115         if self.need_update(fout, objects):
01116             self.info("Library: %s" % lib)
01117             self.archive(objects, fout)
01118             needed_update = True
01119 
01120         return needed_update
01121 
01122     def link_program(self, r, tmp_path, name):
01123         needed_update = False
01124         ext = 'bin'
01125         if hasattr(self.target, 'OUTPUT_EXT'):
01126             ext = self.target.OUTPUT_EXT
01127 
01128         if hasattr(self.target, 'OUTPUT_NAMING'):
01129             self.var("binary_naming", self.target.OUTPUT_NAMING)
01130             if self.target.OUTPUT_NAMING == "8.3":
01131                 name = name[0:8]
01132                 ext = ext[0:3]
01133 
01134         # Create destination directory
01135         head, tail =  split(name)
01136         new_path = join(tmp_path, head)
01137         mkdir(new_path)
01138 
01139         filename = name+'.'+ext
01140         # Absolute path of the final linked file
01141         full_path = join(tmp_path, filename)
01142         elf = join(tmp_path, name + '.elf')
01143         bin = None if ext == 'elf' else full_path
01144         map = join(tmp_path, name + '.map')
01145 
01146         r.objects = sorted(set(r.objects))
01147         config_file = ([self.config.app_config_location]
01148                        if self.config.app_config_location else [])
01149         dependencies = r.objects + r.libraries + [r.linker_script] + config_file
01150         dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
01151         if self.need_update(elf, dependencies):
01152             needed_update = True
01153             self.progress("link", name)
01154             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01155 
01156         if bin and self.need_update(bin, [elf]):
01157             needed_update = True
01158             self.progress("elf2bin", name)
01159             self.binary(r, elf, bin)
01160 
01161         # Initialize memap and process map file. This doesn't generate output.
01162         self.mem_stats(map)
01163 
01164         self.var("compile_succeded", True)
01165         self.var("binary", filename)
01166 
01167         return full_path, needed_update
01168 
01169     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01170     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01171     def default_cmd(self, command):
01172         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01173         self.debug("Return: %s"% _rc)
01174 
01175         for output_line in _stdout.splitlines():
01176             self.debug("Output: %s"% output_line)
01177         for error_line in _stderr.splitlines():
01178             self.debug("Errors: %s"% error_line)
01179 
01180         if _rc != 0:
01181             for line in _stderr.splitlines():
01182                 self.tool_error(line)
01183             raise ToolException(_stderr)
01184 
01185     ### NOTIFICATIONS ###
01186     def info(self, message):
01187         self.notify({'type': 'info', 'message': message})
01188 
01189     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01190     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01191     def debug(self, message):
01192         if self.VERBOSE:
01193             if isinstance(message, list):
01194                 message = ' '.join(message)
01195             message = "[DEBUG] " + message
01196             self.notify({'type': 'debug', 'message': message})
01197 
01198     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01199     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01200     def cc_info(self, info=None):
01201         if info is not None:
01202             info['type'] = 'cc'
01203             self.notify(info)
01204 
01205     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01206     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01207     def cc_verbose(self, message, file=""):
01208         self.debug(message)
01209 
01210     def progress(self, action, file, build_update=False):
01211         msg = {'type': 'progress', 'action': action, 'file': file}
01212         if build_update:
01213             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01214         self.notify(msg)
01215 
01216     def tool_error(self, message):
01217         self.notify({'type': 'tool_error', 'message': message})
01218 
01219     def var(self, key, value):
01220         self.notify({'type': 'var', 'key': key, 'val': value})
01221 
01222     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01223     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01224     def mem_stats(self, map):
01225         """! Creates parser object
01226         @param map Path to linker map file to parse and decode
01227         @return None
01228         """
01229         toolchain = self.__class__.__name__
01230 
01231         # Create memap object
01232         memap = MemapParser()
01233 
01234         # Parse and decode a map file
01235         if memap.parse(abspath(map), toolchain) is False:
01236             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01237             return None
01238 
01239         # Store the memap instance for later use
01240         self.memap_instance = memap
01241 
01242         # Note: memory statistics are not returned.
01243         # Need call to generate_output later (depends on depth & output format)
01244 
01245         return None
01246 
01247     def add_regions(self):
01248         """Add regions to the build profile, if there are any.
01249         """
01250         print("Using regions in this build:")
01251         for region in self.config.regions:
01252             for define in [(region.name.upper() + "_ADDR", region.start),
01253                            (region.name.upper() + "_SIZE", region.size)]:
01254                 define_string = "-D%s=0x%x" %  define
01255                 self.cc.append(define_string)
01256                 self.cppc.append(define_string)
01257                 self.flags["common"].append(define_string)
01258             if region.active:
01259                 for define in [("MBED_APP_START", region.start),
01260                                ("MBED_APP_SIZE", region.size)]:
01261                     define_string = self.make_ld_define(*define)
01262                     self.ld.append(define_string)
01263                     self.flags["ld"].append(define_string)
01264             print("  Region %s size 0x%x, offset 0x%x"
01265                     % (region.name, region.size, region.start))
01266 
01267     # Set the configuration data
01268     def set_config_data(self, config_data):
01269         self.config_data = config_data
01270         if self.config.has_regions:
01271             self.add_regions()
01272 
01273     # Creates the configuration header if needed:
01274     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01275     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01276     # - if there is configuration data similar to the previous configuration data,
01277     #   "mbed_config.h" is left untouched.
01278     # - if there is new configuration data, "mbed_config.h" is overriden.
01279     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01280     # The "config_processed" variable (below) ensures this behaviour.
01281     # The function returns the location of the configuration file, or None if there is no
01282     # configuration data available (and thus no configuration file)
01283     def get_config_header(self):
01284         if self.config_processed: # this function was already called, return its result
01285             return self.config_file
01286         # The config file is located in the build directory
01287         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01288         # If the file exists, read its current content in prev_data
01289         if exists(self.config_file):
01290             with open(self.config_file, "r") as f:
01291                 prev_data = f.read()
01292         else:
01293             prev_data = None
01294         # Get the current configuration data
01295         crt_data = self.config.config_to_header(self.config_data) if self.config_data else None
01296         # "changed" indicates if a configuration change was detected
01297         changed = False
01298         if prev_data is not None: # a previous mbed_config.h exists
01299             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01300                 remove(self.config_file)
01301                 self.config_file = None # this means "config file not present"
01302                 changed = True
01303             elif crt_data != prev_data: # different content of config file
01304                 with open(self.config_file, "w") as f:
01305                     f.write(crt_data)
01306                 changed = True
01307         else: # a previous mbed_config.h does not exist
01308             if crt_data is not None: # there's configuration data available
01309                 with open(self.config_file, "w") as f:
01310                     f.write(crt_data)
01311                 changed = True
01312             else:
01313                 self.config_file = None # this means "config file not present"
01314         # If there was a change in configuration, rebuild everything
01315         self.build_all = changed
01316         # Make sure that this function will only return the location of the configuration
01317         # file for subsequent calls, without trying to manipulate its content in any way.
01318         self.config_processed = True
01319         return self.config_file
01320 
01321     def dump_build_profile(self):
01322         """Dump the current build profile and macros into the `.profile` file
01323         in the build directory"""
01324         for key in ["cxx", "c", "asm", "ld"]:
01325             to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
01326             if key in ["cxx", "c"]:
01327                 to_dump += str(self.flags['common'])
01328             where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
01329             self._overwrite_when_not_equal(where, to_dump)
01330 
01331     @staticmethod
01332     def _overwrite_when_not_equal(filename, content):
01333         if not exists(filename) or content != open(filename).read():
01334             with open(filename, "w") as out:
01335                 out.write(content)
01336 
01337     @staticmethod
01338     def generic_check_executable(tool_key, executable_name, levels_up,
01339                                  nested_dir=None):
01340         """
01341         Positional args:
01342         tool_key: the key to index TOOLCHAIN_PATHS
01343         executable_name: the toolchain's named executable (ex. armcc)
01344         levels_up: each toolchain joins the toolchain_path, some
01345         variable directories (bin, include), and the executable name,
01346         so the TOOLCHAIN_PATH value must be appropriately distanced
01347 
01348         Keyword args:
01349         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01350           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01351           that will be used by toolchain's compile)
01352 
01353         Returns True if the executable location specified by the user
01354         exists and is valid OR the executable can be found on the PATH.
01355         Returns False otherwise.
01356         """
01357         # Search PATH if user did not specify a path or specified path doesn't
01358         # exist.
01359         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01360             exe = find_executable(executable_name)
01361             if not exe:
01362                 return False
01363             for level in range(levels_up):
01364                 # move up the specified number of directories
01365                 exe = dirname(exe)
01366             TOOLCHAIN_PATHS[tool_key] = exe
01367         if nested_dir:
01368             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01369                           executable_name)
01370         else:
01371             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01372         # User could have specified a path that exists but does not contain exe
01373         return exists(subdir) or exists(subdir +'.exe')
01374 
01375     @abstractmethod
01376     def check_executable(self):
01377         """Returns True if the executable (armcc) location specified by the
01378          user exists OR the executable can be found on the PATH.
01379          Returns False otherwise."""
01380         raise NotImplemented
01381 
01382     @abstractmethod
01383     def get_config_option(self, config_header):
01384         """Generate the compiler option that forces the inclusion of the configuration
01385         header file.
01386 
01387         Positional arguments:
01388         config_header -- The configuration header that will be included within all source files
01389 
01390         Return value:
01391         A list of the command line arguments that will force the inclusion the specified header
01392 
01393         Side effects:
01394         None
01395         """
01396         raise NotImplemented
01397 
01398     @abstractmethod
01399     def get_compile_options(self, defines, includes, for_asm=False):
01400         """Generate the compiler options from the defines and includes
01401 
01402         Positional arguments:
01403         defines -- The preprocessor macros defined on the command line
01404         includes -- The include file search paths
01405 
01406         Keyword arguments:
01407         for_asm -- generate the assembler options instead of the compiler options
01408 
01409         Return value:
01410         A list of the command line arguments that will force the inclusion the specified header
01411 
01412         Side effects:
01413         None
01414         """
01415         raise NotImplemented
01416 
01417     @abstractmethod
01418     def assemble(self, source, object, includes):
01419         """Generate the command line that assembles.
01420 
01421         Positional arguments:
01422         source -- a file path that is the file to assemble
01423         object -- a file path that is the destination object
01424         includes -- a list of all directories where header files may be found
01425 
01426         Return value:
01427         The complete command line, as a list, that would invoke the assembler
01428         on the source file, include all the include paths, and generate
01429         the specified object file.
01430 
01431         Side effects:
01432         None
01433 
01434         Note:
01435         This method should be decorated with @hook_tool.
01436         """
01437         raise NotImplemented
01438 
01439     @abstractmethod
01440     def compile_c(self, source, object, includes):
01441         """Generate the command line that compiles a C source file.
01442 
01443         Positional arguments:
01444         source -- the C source file to compile
01445         object -- the destination object file
01446         includes -- a list of all the directories where header files may be found
01447 
01448         Return value:
01449         The complete command line, as a list, that would invoke the C compiler
01450         on the source file, include all the include paths, and generate the
01451         specified object file.
01452 
01453         Side effects:
01454         None
01455 
01456         Note:
01457         This method should be decorated with @hook_tool.
01458         """
01459         raise NotImplemented
01460 
01461     @abstractmethod
01462     def compile_cpp(self, source, object, includes):
01463         """Generate the command line that compiles a C++ source file.
01464 
01465         Positional arguments:
01466         source -- the C++ source file to compile
01467         object -- the destination object file
01468         includes -- a list of all the directories where header files may be found
01469 
01470         Return value:
01471         The complete command line, as a list, that would invoke the C++ compiler
01472         on the source file, include all the include paths, and generate the
01473         specified object file.
01474 
01475         Side effects:
01476         None
01477 
01478         Note:
01479         This method should be decorated with @hook_tool.
01480         """
01481         raise NotImplemented
01482 
01483     @abstractmethod
01484     def link(self, output, objects, libraries, lib_dirs, mem_map):
01485         """Run the linker to create an executable and memory map.
01486 
01487         Positional arguments:
01488         output -- the file name to place the executable in
01489         objects -- all of the object files to link
01490         libraries -- all of the required libraries
01491         lib_dirs -- where the required libraries are located
01492         mem_map -- the location where the memory map file should be stored
01493 
01494         Return value:
01495         None
01496 
01497         Side effect:
01498         Runs the linker to produce the executable.
01499 
01500         Note:
01501         This method should be decorated with @hook_tool.
01502         """
01503         raise NotImplemented
01504 
01505     @abstractmethod
01506     def archive(self, objects, lib_path):
01507         """Run the command line that creates an archive.
01508 
01509         Positional arguhments:
01510         objects -- a list of all the object files that should be archived
01511         lib_path -- the file name of the resulting library file
01512 
01513         Return value:
01514         None
01515 
01516         Side effect:
01517         Runs the archiving tool to produce the library file.
01518 
01519         Note:
01520         This method should be decorated with @hook_tool.
01521         """
01522         raise NotImplemented
01523 
01524     @abstractmethod
01525     def binary(self, resources, elf, bin):
01526         """Run the command line that will Extract a simplified binary file.
01527 
01528         Positional arguments:
01529         resources -- A resources object (Is not used in any of the toolchains)
01530         elf -- the executable file that is to be converted
01531         bin -- the file name of the to be created simplified binary file
01532 
01533         Return value:
01534         None
01535 
01536         Side effect:
01537         Runs the elf2bin tool to produce the simplified binary file.
01538 
01539         Note:
01540         This method should be decorated with @hook_tool.
01541         """
01542         raise NotImplemented
01543 
01544     @staticmethod
01545     @abstractmethod
01546     def name_mangle(name):
01547         """Mangle a name based on the conventional name mangling of this toolchain
01548 
01549         Positional arguments:
01550         name -- the name to mangle
01551 
01552         Return:
01553         the mangled name as a string
01554         """
01555         raise NotImplemented
01556 
01557     @staticmethod
01558     @abstractmethod
01559     def make_ld_define(name, value):
01560         """Create an argument to the linker that would define a symbol
01561 
01562         Positional arguments:
01563         name -- the symbol to define
01564         value -- the value to give the symbol
01565 
01566         Return:
01567         The linker flag as a string
01568         """
01569         raise NotImplemented
01570 
01571     @staticmethod
01572     @abstractmethod
01573     def redirect_symbol(source, sync, build_dir):
01574         """Redirect a symbol at link time to point at somewhere else
01575 
01576         Positional arguments:
01577         source -- the symbol doing the pointing
01578         sync -- the symbol being pointed to
01579         build_dir -- the directory to put "response files" if needed by the toolchain
01580 
01581         Side Effects:
01582         Possibly create a file in the build directory
01583 
01584         Return:
01585         The linker flag to redirect the symbol, as a string
01586         """
01587         raise NotImplemented
01588 
01589     # Return the list of macros geenrated by the build system
01590     def get_config_macros(self):
01591         return self.config.config_to_macros(self.config_data) if self.config_data else []
01592 
01593     @property
01594     def report(self):
01595         to_ret = {}
01596         to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
01597                                 'symbols': self.get_symbols()}
01598         to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
01599                                   'symbols': self.get_symbols()}
01600         to_ret['assembler'] = {'flags': copy(self.flags['asm']),
01601                                'symbols': self.get_symbols(True)}
01602         to_ret['linker'] = {'flags': copy(self.flags['ld'])}
01603         to_ret.update(self.config.report)
01604         return to_ret
01605 
01606 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH
01607 
01608 TOOLCHAIN_PATHS = {
01609     'ARM': ARM_PATH,
01610     'uARM': ARM_PATH,
01611     'ARMC6': ARMC6_PATH,
01612     'GCC_ARM': GCC_ARM_PATH,
01613     'IAR': IAR_PATH
01614 }
01615 
01616 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
01617 from tools.toolchains.gcc import GCC_ARM
01618 from tools.toolchains.iar import IAR
01619 
01620 TOOLCHAIN_CLASSES = {
01621     u'ARM': ARM_STD,
01622     u'uARM': ARM_MICRO,
01623     u'ARMC6': ARMC6,
01624     u'GCC_ARM': GCC_ARM,
01625     u'IAR': IAR
01626 }
01627 
01628 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())