Gleb Klochkov / Mbed OS Climatcontroll_Main

Dependencies:   esp8266-driver

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 """
00002 mbed SDK
00003 Copyright (c) 2011-2013 ARM Limited
00004 
00005 Licensed under the Apache License, Version 2.0 (the "License");
00006 you may not use this file except in compliance with the License.
00007 You may obtain a copy of the License at
00008 
00009     http://www.apache.org/licenses/LICENSE-2.0
00010 
00011 Unless required by applicable law or agreed to in writing, software
00012 distributed under the License is distributed on an "AS IS" BASIS,
00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014 See the License for the specific language governing permissions and
00015 limitations under the License.
00016 """
00017 from __future__ import print_function, division, absolute_import
00018 
00019 import re
00020 import sys
00021 from os import stat, walk, getcwd, sep, remove
00022 from copy import copy
00023 from time import time, sleep
00024 from shutil import copyfile
00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split,
00026                      abspath, isfile, isdir, normcase)
00027 from itertools import chain
00028 from inspect import getmro
00029 from copy import deepcopy
00030 from abc import ABCMeta, abstractmethod
00031 from distutils.spawn import find_executable
00032 from multiprocessing import Pool, cpu_count
00033 from hashlib import md5
00034 import fnmatch
00035 
00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException,
00037                     NotSupportedException, split_path, compile_worker)
00038 from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
00039 from .. import hooks
00040 from ..memap import MemapParser
00041 
00042 
00043 #Disables multiprocessing if set to higher number than the host machine CPUs
00044 CPU_COUNT_MIN = 1
00045 CPU_COEF = 1
00046 
00047 class LazyDict(dict):
00048     def __init__(self):
00049         self.eager = {}
00050         self.lazy = {}
00051 
00052     def add_lazy(self, key, thunk):
00053         if key in self.eager:
00054             del self.eager[key]
00055         self.lazy[key] = thunk
00056 
00057     def __getitem__(self, key):
00058         if  (key not in self.eager
00059              and key in self.lazy):
00060             self.eager[key] = self.lazy[key]()
00061             del self.lazy[key]
00062         return self.eager[key]
00063 
00064     def __setitem__(self, key, value):
00065         self.eager[key] = value
00066 
00067     def __delitem__(self, key):
00068         if key in self.eager:
00069             del self.eager[key]
00070         else:
00071             del self.lazy[key]
00072 
00073     def __contains__(self, key):
00074         return key in self.eager or key in self.lazy
00075 
00076     def __iter__(self):
00077         return chain(iter(self.eager), iter(self.lazy))
00078 
00079     def __len__(self):
00080         return len(self.eager) + len(self.lazy)
00081 
00082     def __str__(self):
00083         return "Lazy{%s}" % (
00084             ", ".join("%r: %r" % (k, v) for k, v in
00085                       chain(self.eager.items(), ((k, "not evaluated")
00086                                                      for k in self.lazy))))
00087 
00088     def update(self, other):
00089         if isinstance(other, LazyDict):
00090             self.eager.update(other.eager)
00091             self.lazy.update(other.lazy)
00092         else:
00093             self.eager.update(other)
00094 
00095     def items(self):
00096         """Warning: This forces the evaluation all of the items in this LazyDict
00097         that are iterated over."""
00098         for k, v in self.eager.items():
00099             yield k, v
00100         for k in self.lazy.keys():
00101             yield k, self[k]
00102 
00103     def apply(self, fn):
00104         """Delay the application of a computation to all items of the lazy dict.
00105         Does no computation now. Instead the comuptation is performed when a
00106         consumer attempts to access a value in this LazyDict"""
00107         new_lazy = {}
00108         for k, f in self.lazy.items():
00109             def closure(f=f):
00110                 return fn(f())
00111             new_lazy[k] = closure
00112         for k, v in self.eager.items():
00113             def closure(v=v):
00114                 return fn(v)
00115             new_lazy[k] = closure
00116         self.lazy = new_lazy
00117         self.eager = {}
00118 
00119 class Resources:
00120     def __init__(self, base_path=None, collect_ignores=False):
00121         self.base_path = base_path
00122         self.collect_ignores = collect_ignores
00123 
00124         self.file_basepath = {}
00125 
00126         self.inc_dirs = []
00127         self.headers = []
00128 
00129         self.s_sources = []
00130         self.c_sources = []
00131         self.cpp_sources = []
00132 
00133         self.lib_dirs = set([])
00134         self.objects = []
00135         self.libraries = []
00136 
00137         # mbed special files
00138         self.lib_builds = []
00139         self.lib_refs = []
00140 
00141         self.repo_dirs = []
00142         self.repo_files = []
00143 
00144         self.linker_script = None
00145 
00146         # Other files
00147         self.hex_files = []
00148         self.bin_files = []
00149         self.json_files = []
00150 
00151         # Features
00152         self.features = LazyDict()
00153         self.ignored_dirs = []
00154 
00155     def __add__(self, resources):
00156         if resources is None:
00157             return self
00158         else:
00159             return self.add(resources)
00160 
00161     def __radd__(self, resources):
00162         if resources is None:
00163             return self
00164         else:
00165             return self.add(resources)
00166 
00167     def ignore_dir(self, directory):
00168         if self.collect_ignores:
00169             self.ignored_dirs.append(directory)
00170 
00171     def add(self, resources):
00172         for f,p in resources.file_basepath.items():
00173             self.file_basepath[f] = p
00174 
00175         self.inc_dirs += resources.inc_dirs
00176         self.headers += resources.headers
00177 
00178         self.s_sources += resources.s_sources
00179         self.c_sources += resources.c_sources
00180         self.cpp_sources += resources.cpp_sources
00181 
00182         self.lib_dirs |= resources.lib_dirs
00183         self.objects += resources.objects
00184         self.libraries += resources.libraries
00185 
00186         self.lib_builds += resources.lib_builds
00187         self.lib_refs += resources.lib_refs
00188 
00189         self.repo_dirs += resources.repo_dirs
00190         self.repo_files += resources.repo_files
00191 
00192         if resources.linker_script is not None:
00193             self.linker_script = resources.linker_script
00194 
00195         self.hex_files += resources.hex_files
00196         self.bin_files += resources.bin_files
00197         self.json_files += resources.json_files
00198 
00199         self.features.update(resources.features)
00200         self.ignored_dirs += resources.ignored_dirs
00201 
00202         return self
00203 
00204     def _collect_duplicates(self, dupe_dict, dupe_headers):
00205         for filename in self.s_sources + self.c_sources + self.cpp_sources:
00206             objname, _ = splitext(basename(filename))
00207             dupe_dict.setdefault(objname, set())
00208             dupe_dict[objname] |= set([filename])
00209         for filename in self.headers:
00210             headername = basename(filename)
00211             dupe_headers.setdefault(headername, set())
00212             dupe_headers[headername] |= set([headername])
00213         for res in self.features.values():
00214             res._collect_duplicates(dupe_dict, dupe_headers)
00215         return dupe_dict, dupe_headers
00216 
00217     def detect_duplicates(self, toolchain):
00218         """Detect all potential ambiguities in filenames and report them with
00219         a toolchain notification
00220 
00221         Positional Arguments:
00222         toolchain - used for notifications
00223         """
00224         count = 0
00225         dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
00226         for objname, filenames in dupe_dict.items():
00227             if len(filenames) > 1:
00228                 count+=1
00229                 toolchain.tool_error(
00230                     "Object file %s.o is not unique! It could be made from: %s"\
00231                     % (objname, " ".join(filenames)))
00232         for headername, locations in dupe_headers.items():
00233             if len(locations) > 1:
00234                 count+=1
00235                 toolchain.tool_error(
00236                     "Header file %s is not unique! It could be: %s" %\
00237                     (headername, " ".join(locations)))
00238         return count
00239 
00240 
00241     def relative_to(self, base, dot=False):
00242         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00243                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00244                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00245                       'hex_files', 'bin_files', 'json_files']:
00246             v = [rel_path(f, base, dot) for f in getattr(self, field)]
00247             setattr(self, field, v)
00248 
00249         def to_apply(feature, base=base, dot=dot):
00250             feature.relative_to(base, dot)
00251         self.features.apply(to_apply)
00252 
00253         if self.linker_script is not None:
00254             self.linker_script = rel_path(self.linker_script, base, dot)
00255 
00256     def win_to_unix(self):
00257         for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
00258                       'cpp_sources', 'lib_dirs', 'objects', 'libraries',
00259                       'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
00260                       'hex_files', 'bin_files', 'json_files']:
00261             v = [f.replace('\\', '/') for f in getattr(self, field)]
00262             setattr(self, field, v)
00263 
00264         def to_apply(feature):
00265             feature.win_to_unix()
00266         self.features.apply(to_apply)
00267 
00268         if self.linker_script is not None:
00269             self.linker_script = self.linker_script.replace('\\', '/')
00270 
00271     def __str__(self):
00272         s = []
00273 
00274         for (label, resources) in (
00275                 ('Include Directories', self.inc_dirs),
00276                 ('Headers', self.headers),
00277 
00278                 ('Assembly sources', self.s_sources),
00279                 ('C sources', self.c_sources),
00280                 ('C++ sources', self.cpp_sources),
00281 
00282                 ('Library directories', self.lib_dirs),
00283                 ('Objects', self.objects),
00284                 ('Libraries', self.libraries),
00285 
00286                 ('Hex files', self.hex_files),
00287                 ('Bin files', self.bin_files),
00288 
00289                 ('Features', self.features),
00290             ):
00291             if resources:
00292                 s.append('%s:\n  ' % label + '\n  '.join(resources))
00293 
00294         if self.linker_script:
00295             s.append('Linker Script: ' + self.linker_script)
00296 
00297         return '\n'.join(s)
00298 
00299 # Support legacy build conventions: the original mbed build system did not have
00300 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
00301 # had the knowledge of a list of these directories to be ignored.
00302 LEGACY_IGNORE_DIRS = set([
00303     'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
00304     'ARM', 'uARM', 'IAR',
00305     'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
00306     'ARMC6'
00307 ])
00308 LEGACY_TOOLCHAIN_NAMES = {
00309     'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
00310     'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
00311     'IAR': 'IAR',
00312     'ARMC6': 'ARMC6',
00313 }
00314 
00315 
00316 class mbedToolchain:
00317     # Verbose logging
00318     VERBOSE = True
00319 
00320     # Compile C files as CPP
00321     COMPILE_C_AS_CPP = False
00322 
00323     # Response files for compiling, includes, linking and archiving.
00324     # Not needed on posix systems where the typical arg limit is 2 megabytes
00325     RESPONSE_FILES = True
00326 
00327     CORTEX_SYMBOLS = {
00328         "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00329         "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00330         "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00331         "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00332         "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00333         "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00334         "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00335         "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00336         "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00337         "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
00338         "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00339         "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00340         "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00341         "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00342         "Cortex-M33F-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00343         "Cortex-M33F": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"],
00344     }
00345 
00346     MBED_CONFIG_FILE_NAME="mbed_config.h"
00347 
00348     PROFILE_FILE_NAME = ".profile"
00349 
00350     __metaclass__ = ABCMeta
00351 
00352     profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]}
00353 
00354     def __init__(self, target, notify=None, macros=None, silent=False,
00355                  extra_verbose=False, build_profile=None, build_dir=None):
00356         self.target = target
00357         self.name = self.__class__.__name__
00358 
00359         # compile/assemble/link/binary hooks
00360         self.hook = hooks.Hook(target, self)
00361 
00362         # Toolchain flags
00363         self.flags = deepcopy(build_profile or self.profile_template)
00364 
00365         # System libraries provided by the toolchain
00366         self.sys_libs = []
00367 
00368         # User-defined macros
00369         self.macros = macros or []
00370 
00371         # Macros generated from toolchain and target rules/features
00372         self.asm_symbols = None
00373         self.cxx_symbols = None
00374 
00375         # Labels generated from toolchain and target rules/features (used for selective build)
00376         self.labels = None
00377 
00378         # This will hold the initialized config object
00379         self.config = None
00380 
00381         # This will hold the configuration data (as returned by Config.get_config_data())
00382         self.config_data = None
00383 
00384         # This will hold the location of the configuration file or None if there's no configuration available
00385         self.config_file = None
00386 
00387         # Call guard for "get_config_data" (see the comments of get_config_data for details)
00388         self.config_processed = False
00389 
00390         # Non-incremental compile
00391         self.build_all = False
00392 
00393         # Build output dir
00394         self.build_dir = build_dir
00395         self.timestamp = time()
00396 
00397         # Number of concurrent build jobs. 0 means auto (based on host system cores)
00398         self.jobs = 0
00399 
00400         # Ignore patterns from .mbedignore files
00401         self.ignore_patterns = []
00402         self._ignore_regex = re.compile("$^")
00403 
00404         # Pre-mbed 2.0 ignore dirs
00405         self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
00406 
00407         # Output notify function
00408         # This function is passed all events, and expected to handle notification of the
00409         # user, emit the events to a log, etc.
00410         # The API for all notify methods passed into the notify parameter is as follows:
00411         # def notify(Event, Silent)
00412         # Where *Event* is a dict representing the toolchain event that was generated
00413         #            e.g.: a compile succeeded, or a warning was emitted by the compiler
00414         #                  or an application was linked
00415         #       *Silent* is a boolean
00416         if notify:
00417             self.notify_fun = notify
00418         elif extra_verbose:
00419             self.notify_fun = self.print_notify_verbose
00420         else:
00421             self.notify_fun = self.print_notify
00422 
00423         # Silent builds (no output)
00424         self.silent = silent
00425 
00426         # Print output buffer
00427         self.output = str()
00428 
00429         # uVisor spepcific rules
00430         if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels:
00431             self.target.core = re.sub(r"F$", '', self.target.core)
00432 
00433         # Stats cache is used to reduce the amount of IO requests to stat
00434         # header files during dependency change. See need_update()
00435         self.stat_cache = {}
00436 
00437         # Used by the mbed Online Build System to build in chrooted environment
00438         self.CHROOT = None
00439 
00440         # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over
00441         self.init()
00442 
00443     # Used for post __init__() hooks
00444     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00445     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00446     def init(self):
00447         return True
00448 
00449     def get_output(self):
00450         return self.output
00451 
00452     def print_notify(self, event, silent=False):
00453         """ Default command line notification
00454         """
00455         msg = None
00456 
00457         if not self.VERBOSE and event['type'] == 'tool_error':
00458             msg = event['message']
00459 
00460         elif event['type'] in ['info', 'debug']:
00461             msg = event['message']
00462 
00463         elif event['type'] == 'cc':
00464             event['severity'] = event['severity'].title()
00465 
00466             if PRINT_COMPILER_OUTPUT_AS_LINK:
00467                 event['file'] = getcwd() + event['file'].strip('.')
00468                 msg = '[%(severity)s] %(file)s:%(line)s:%(col)s: %(message)s' % event
00469             else:
00470                 event['file'] = basename(event['file'])
00471                 msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event
00472 
00473         elif event['type'] == 'progress':
00474             if 'percent' in event:
00475                 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(),
00476                                                   event['percent'],
00477                                                   basename(event['file']))
00478             else:
00479                 msg = '{}: {}'.format(event['action'].title(),
00480                                       basename(event['file']))
00481 
00482         if msg:
00483             if not silent:
00484                 print(msg)
00485             self.output += msg + "\n"
00486 
00487     def print_notify_verbose(self, event, silent=False):
00488         """ Default command line notification with more verbose mode
00489         """
00490         if event['type'] in ['info', 'debug']:
00491             self.print_notify(event, silent=silent) # standard handle
00492 
00493         elif event['type'] == 'cc':
00494             event['severity'] = event['severity'].title()
00495             event['file'] = basename(event['file'])
00496             event['mcu_name'] = "None"
00497             event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
00498             event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
00499             msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
00500             if not silent:
00501                 print(msg)
00502             self.output += msg + "\n"
00503 
00504         elif event['type'] == 'progress':
00505             self.print_notify(event) # standard handle
00506 
00507     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00508     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00509     def notify(self, event):
00510         """ Little closure for notify functions
00511         """
00512         event['toolchain'] = self
00513         return self.notify_fun(event, self.silent)
00514 
00515     def get_symbols(self, for_asm=False):
00516         if for_asm:
00517             if self.asm_symbols is None:
00518                 self.asm_symbols = []
00519 
00520                 # Cortex CPU symbols
00521                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00522                     self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00523 
00524                 # Add target's symbols
00525                 self.asm_symbols += self.target.macros
00526                 # Add extra symbols passed via 'macros' parameter
00527                 self.asm_symbols += self.macros
00528             return list(set(self.asm_symbols))  # Return only unique symbols
00529         else:
00530             if self.cxx_symbols is None:
00531                 # Target and Toolchain symbols
00532                 labels = self.get_labels()
00533                 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
00534                 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
00535 
00536                 # Cortex CPU symbols
00537                 if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
00538                     self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
00539 
00540                 # Symbols defined by the on-line build.system
00541                 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
00542                 if MBED_ORG_USER:
00543                     self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
00544 
00545                 # Add target's symbols
00546                 self.cxx_symbols += self.target.macros
00547                 # Add target's hardware
00548                 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
00549                 # Add target's features
00550                 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
00551                 # Add extra symbols passed via 'macros' parameter
00552                 self.cxx_symbols += self.macros
00553 
00554                 # Form factor variables
00555                 if hasattr(self.target, 'supported_form_factors'):
00556                     self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
00557 
00558             return list(set(self.cxx_symbols))  # Return only unique symbols
00559 
00560     # Extend the internal list of macros
00561     def add_macros(self, new_macros):
00562         self.macros.extend(new_macros)
00563 
00564     def get_labels(self):
00565         if self.labels is None:
00566             toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
00567             toolchain_labels.remove('mbedToolchain')
00568             self.labels = {
00569                 'TARGET': self.target.labels,
00570                 'FEATURE': self.target.features,
00571                 'TOOLCHAIN': toolchain_labels
00572             }
00573 
00574             # This is a policy decision and it should /really/ be in the config system
00575             # ATM it's here for backward compatibility
00576             if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and
00577                  "-O0" in self.flags['common']) or
00578                 ("-r" in self.flags['common'] and
00579                  "-On" in self.flags['common'])):
00580                 self.labels['TARGET'].append("DEBUG")
00581             else:
00582                 self.labels['TARGET'].append("RELEASE")
00583         return self.labels
00584 
00585 
00586     # Determine whether a source file needs updating/compiling
00587     def need_update(self, target, dependencies):
00588         if self.build_all:
00589             return True
00590 
00591         if not exists(target):
00592             return True
00593 
00594         target_mod_time = stat(target).st_mtime
00595 
00596         for d in dependencies:
00597             # Some objects are not provided with full path and here we do not have
00598             # information about the library paths. Safe option: assume an update
00599             if not d or not exists(d):
00600                 return True
00601 
00602             if d not in self.stat_cache:
00603                 self.stat_cache[d] = stat(d).st_mtime
00604 
00605             if self.stat_cache[d] >= target_mod_time:
00606                 return True
00607 
00608         return False
00609 
00610     def is_ignored(self, file_path):
00611         """Check if file path is ignored by any .mbedignore thus far"""
00612         return self._ignore_regex.match(normcase(file_path))
00613 
00614     def add_ignore_patterns(self, root, base_path, patterns):
00615         """Add a series of patterns to the ignored paths
00616 
00617         Positional arguments:
00618         root - the directory containing the ignore file
00619         base_path - the location that the scan started from
00620         patterns - the list of patterns we will ignore in the future
00621         """
00622         real_base = relpath(root, base_path)
00623         if real_base == ".":
00624             self.ignore_patterns.extend(normcase(p) for p in patterns)
00625         else:
00626             self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
00627         if self.ignore_patterns:
00628             self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
00629 
00630     # Create a Resources object from the path pointed to by *path* by either traversing a
00631     # a directory structure, when *path* is a directory, or adding *path* to the resources,
00632     # when *path* is a file.
00633     # The parameter *base_path* is used to set the base_path attribute of the Resources
00634     # object and the parameter *exclude_paths* is used by the directory traversal to
00635     # exclude certain paths from the traversal.
00636     def scan_resources(self, path, exclude_paths=None, base_path=None,
00637                        collect_ignores=False):
00638         self.progress("scan", path)
00639 
00640         resources = Resources(path, collect_ignores=collect_ignores)
00641         if not base_path:
00642             if isfile(path):
00643                 base_path = dirname(path)
00644             else:
00645                 base_path = path
00646         resources.base_path = base_path
00647 
00648         if isfile(path):
00649             self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
00650         else:
00651             self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
00652         return resources
00653 
00654     # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
00655     # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
00656     # on every file it considers adding to the resources object.
00657     def _add_dir(self, path, resources, base_path, exclude_paths=None):
00658         """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
00659         When topdown is True, the caller can modify the dirnames list in-place
00660         (perhaps using del or slice assignment), and walk() will only recurse into
00661         the subdirectories whose names remain in dirnames; this can be used to prune
00662         the search, impose a specific order of visiting, or even to inform walk()
00663         about directories the caller creates or renames before it resumes walk()
00664         again. Modifying dirnames when topdown is False is ineffective, because in
00665         bottom-up mode the directories in dirnames are generated before dirpath
00666         itself is generated.
00667         """
00668         labels = self.get_labels()
00669         for root, dirs, files in walk(path, followlinks=True):
00670             # Check if folder contains .mbedignore
00671             if ".mbedignore" in files:
00672                 with open (join(root,".mbedignore"), "r") as f:
00673                     lines=f.readlines()
00674                     lines = [l.strip() for l in lines] # Strip whitespaces
00675                     lines = [l for l in lines if l != ""] # Strip empty lines
00676                     lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
00677                     # Append root path to glob patterns and append patterns to ignore_patterns
00678                     self.add_ignore_patterns(root, base_path, lines)
00679 
00680             # Skip the whole folder if ignored, e.g. .mbedignore containing '*'
00681             root_path =join(relpath(root, base_path))
00682             if  (self.is_ignored(join(root_path,"")) or
00683                  self.build_dir == root_path):
00684                 resources.ignore_dir(root_path)
00685                 dirs[:] = []
00686                 continue
00687 
00688             for d in copy(dirs):
00689                 dir_path = join(root, d)
00690                 # Add internal repo folders/files. This is needed for exporters
00691                 if d == '.hg' or d == '.git':
00692                     resources.repo_dirs.append(dir_path)
00693 
00694                 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
00695                     # Ignore targets that do not match the TARGET in extra_labels list
00696                     (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
00697                     # Ignore toolchain that do not match the current TOOLCHAIN
00698                     (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
00699                     # Ignore .mbedignore files
00700                     self.is_ignored(join(relpath(root, base_path), d,"")) or
00701                     # Ignore TESTS dir
00702                     (d == 'TESTS')):
00703                         resources.ignore_dir(dir_path)
00704                         dirs.remove(d)
00705                 elif d.startswith('FEATURE_'):
00706                     # Recursively scan features but ignore them in the current scan.
00707                     # These are dynamically added by the config system if the conditions are matched
00708                     def closure (dir_path=dir_path, base_path=base_path):
00709                         return self.scan_resources(dir_path, base_path=base_path,
00710                                                    collect_ignores=resources.collect_ignores)
00711                     resources.features.add_lazy(d[8:], closure)
00712                     resources.ignore_dir(dir_path)
00713                     dirs.remove(d)
00714                 elif exclude_paths:
00715                     for exclude_path in exclude_paths:
00716                         rel_path = relpath(dir_path, exclude_path)
00717                         if not (rel_path.startswith('..')):
00718                             resources.ignore_dir(dir_path)
00719                             dirs.remove(d)
00720                             break
00721 
00722             # Add root to include paths
00723             root = root.rstrip("/")
00724             resources.inc_dirs.append(root)
00725             resources.file_basepath[root] = base_path
00726 
00727             for file in files:
00728                 file_path = join(root, file)
00729                 self._add_file(file_path, resources, base_path)
00730 
00731     # A helper function for both scan_resources and _add_dir. _add_file adds one file
00732     # (*file_path*) to the resources object based on the file type.
00733     def _add_file(self, file_path, resources, base_path, exclude_paths=None):
00734 
00735         if  (self.is_ignored(relpath(file_path, base_path)) or
00736              basename(file_path).startswith(".")):
00737             resources.ignore_dir(relpath(file_path, base_path))
00738             return
00739 
00740         resources.file_basepath[file_path] = base_path
00741         _, ext = splitext(file_path)
00742         ext = ext.lower()
00743 
00744         if   ext == '.s':
00745             resources.s_sources.append(file_path)
00746 
00747         elif ext == '.c':
00748             resources.c_sources.append(file_path)
00749 
00750         elif ext == '.cpp':
00751             resources.cpp_sources.append(file_path)
00752 
00753         elif ext == '.h' or ext == '.hpp':
00754             resources.headers.append(file_path)
00755 
00756         elif ext == '.o':
00757             resources.objects.append(file_path)
00758 
00759         elif ext == self.LIBRARY_EXT:
00760             resources.libraries.append(file_path)
00761             resources.lib_dirs.add(dirname(file_path))
00762 
00763         elif ext == self.LINKER_EXT:
00764             if resources.linker_script is not None:
00765                 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
00766             resources.linker_script = file_path
00767 
00768         elif ext == '.lib':
00769             resources.lib_refs.append(file_path)
00770 
00771         elif ext == '.bld':
00772             resources.lib_builds.append(file_path)
00773 
00774         elif basename(file_path) == '.hgignore':
00775             resources.repo_files.append(file_path)
00776 
00777         elif basename(file_path) == '.gitignore':
00778             resources.repo_files.append(file_path)
00779 
00780         elif ext == '.hex':
00781             resources.hex_files.append(file_path)
00782 
00783         elif ext == '.bin':
00784             resources.bin_files.append(file_path)
00785 
00786         elif ext == '.json':
00787             resources.json_files.append(file_path)
00788 
00789 
00790     def scan_repository(self, path):
00791         resources = []
00792 
00793         for root, dirs, files in walk(path):
00794             # Remove ignored directories
00795             for d in copy(dirs):
00796                 if d == '.' or d == '..':
00797                     dirs.remove(d)
00798 
00799             for file in files:
00800                 file_path = join(root, file)
00801                 resources.append(file_path)
00802 
00803         return resources
00804 
00805     def copy_files(self, files_paths, trg_path, resources=None, rel_path=None):
00806         # Handle a single file
00807         if not isinstance(files_paths, list):
00808             files_paths = [files_paths]
00809 
00810         for source in files_paths:
00811             if source is None:
00812                 files_paths.remove(source)
00813 
00814         for source in files_paths:
00815             if resources is not None and source in resources.file_basepath:
00816                 relative_path = relpath(source, resources.file_basepath[source])
00817             elif rel_path is not None:
00818                 relative_path = relpath(source, rel_path)
00819             else:
00820                 _, relative_path = split(source)
00821 
00822             target = join(trg_path, relative_path)
00823 
00824             if (target != source) and (self.need_update(target, [source])):
00825                 self.progress("copy", relative_path)
00826                 mkdir(dirname(target))
00827                 copyfile(source, target)
00828 
00829     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
00830     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00831     def relative_object_path(self, build_path, base_dir, source):
00832         source_dir, name, _ = split_path(source)
00833 
00834         obj_dir = relpath(join(build_path, relpath(source_dir, base_dir)))
00835         if obj_dir is not self.prev_dir:
00836             self.prev_dir = obj_dir
00837             mkdir(obj_dir)
00838         return join(obj_dir, name + '.o')
00839 
00840     # Generate response file for all includes.
00841     # ARM, GCC, IAR cross compatible
00842     def get_inc_file(self, includes):
00843         include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5)
00844         if not exists(include_file):
00845             with open(include_file, "w") as f:
00846                 cmd_list = []
00847                 for c in includes:
00848                     if c:
00849                         c = c.replace("\\", "/")
00850                         if self.CHROOT:
00851                             c = c.replace(self.CHROOT, '')
00852                         cmd_list.append('"-I%s"' % c)
00853                 string = " ".join(cmd_list)
00854                 f.write(string)
00855         return include_file
00856 
00857     # Generate response file for all objects when linking.
00858     # ARM, GCC, IAR cross compatible
00859     def get_link_file(self, cmd):
00860         link_file = join(self.build_dir, ".link_files.txt")
00861         with open(link_file, "w") as f:
00862             cmd_list = []
00863             for c in cmd:
00864                 if c:
00865                     c = c.replace("\\", "/")
00866                     if self.CHROOT:
00867                         c = c.replace(self.CHROOT, '')
00868                     cmd_list.append(('"%s"' % c) if not c.startswith('-') else c)
00869             string = " ".join(cmd_list)
00870             f.write(string)
00871         return link_file
00872 
00873     # Generate response file for all objects when archiving.
00874     # ARM, GCC, IAR cross compatible
00875     def get_arch_file(self, objects):
00876         archive_file = join(self.build_dir, ".archive_files.txt")
00877         with open(archive_file, "w") as f:
00878             o_list = []
00879             for o in objects:
00880                 o_list.append('"%s"' % o)
00881             string = " ".join(o_list).replace("\\", "/")
00882             f.write(string)
00883         return archive_file
00884 
00885     # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
00886     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
00887     def compile_sources(self, resources, inc_dirs=None):
00888         # Web IDE progress bar for project build
00889         files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
00890         self.to_be_compiled = len(files_to_compile)
00891         self.compiled = 0
00892 
00893         self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
00894 
00895         inc_paths = resources.inc_dirs
00896         if inc_dirs is not None:
00897             if isinstance(inc_dirs, list):
00898                 inc_paths.extend(inc_dirs)
00899             else:
00900                 inc_paths.append(inc_dirs)
00901         # De-duplicate include paths
00902         inc_paths = set(inc_paths)
00903         # Sort include paths for consistency
00904         inc_paths = sorted(set(inc_paths))
00905         # Unique id of all include paths
00906         self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest()
00907 
00908         objects = []
00909         queue = []
00910         work_dir = getcwd()
00911         self.prev_dir = None
00912 
00913         # Generate configuration header (this will update self.build_all if needed)
00914         self.get_config_header()
00915         self.dump_build_profile()
00916 
00917         # Sort compile queue for consistency
00918         files_to_compile.sort()
00919         for source in files_to_compile:
00920             object = self.relative_object_path(
00921                 self.build_dir, resources.file_basepath[source], source)
00922 
00923             # Queue mode (multiprocessing)
00924             commands = self.compile_command(source, object, inc_paths)
00925             if commands is not None:
00926                 queue.append({
00927                     'source': source,
00928                     'object': object,
00929                     'commands': commands,
00930                     'work_dir': work_dir,
00931                     'chroot': self.CHROOT
00932                 })
00933             else:
00934                 self.compiled += 1
00935                 objects.append(object)
00936 
00937         # Use queues/multiprocessing if cpu count is higher than setting
00938         jobs = self.jobs if self.jobs else cpu_count()
00939         if jobs > CPU_COUNT_MIN and len(queue) > jobs:
00940             return self.compile_queue(queue, objects)
00941         else:
00942             return self.compile_seq(queue, objects)
00943 
00944     # Compile source files queue in sequential order
00945     def compile_seq(self, queue, objects):
00946         for item in queue:
00947             result = compile_worker(item)
00948 
00949             self.compiled += 1
00950             self.progress("compile", item['source'], build_update=True)
00951             for res in result['results']:
00952                 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00953                 self.compile_output([
00954                     res['code'],
00955                     res['output'],
00956                     res['command']
00957                 ])
00958             objects.append(result['object'])
00959         return objects
00960 
00961     # Compile source files queue in parallel by creating pool of worker threads
00962     def compile_queue(self, queue, objects):
00963         jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF)
00964         p = Pool(processes=jobs_count)
00965 
00966         results = []
00967         for i in range(len(queue)):
00968             results.append(p.apply_async(compile_worker, [queue[i]]))
00969         p.close()
00970 
00971         itr = 0
00972         while len(results):
00973             itr += 1
00974             if itr > 180000:
00975                 p.terminate()
00976                 p.join()
00977                 raise ToolException("Compile did not finish in 5 minutes")
00978 
00979             sleep(0.01)
00980             pending = 0
00981             for r in results:
00982                 if r.ready():
00983                     try:
00984                         result = r.get()
00985                         results.remove(r)
00986 
00987                         self.compiled += 1
00988                         self.progress("compile", result['source'], build_update=True)
00989                         for res in result['results']:
00990                             self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
00991                             self.compile_output([
00992                                 res['code'],
00993                                 res['output'],
00994                                 res['command']
00995                             ])
00996                         objects.append(result['object'])
00997                     except ToolException as err:
00998                         if p._taskqueue.queue:
00999                             p._taskqueue.queue.clear()
01000                             sleep(0.5)
01001                         p.terminate()
01002                         p.join()
01003                         raise ToolException(err)
01004                 else:
01005                     pending += 1
01006                     if pending >= jobs_count:
01007                         break
01008 
01009         results = None
01010         p.join()
01011 
01012         return objects
01013 
01014     # Determine the compile command based on type of source file
01015     def compile_command(self, source, object, includes):
01016         # Check dependencies
01017         _, ext = splitext(source)
01018         ext = ext.lower()
01019 
01020         if ext == '.c' or  ext == '.cpp':
01021             base, _ = splitext(object)
01022             dep_path = base + '.d'
01023             try:
01024                 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
01025             except (IOError, IndexError):
01026                 deps = []
01027             config_file = ([self.config.app_config_location]
01028                            if self.config.app_config_location else [])
01029             deps.extend(config_file)
01030             if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01031                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx"))
01032             else:
01033                 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c"))
01034             if len(deps) == 0 or self.need_update(object, deps):
01035                 if ext == '.cpp' or self.COMPILE_C_AS_CPP:
01036                     return self.compile_cpp(source, object, includes)
01037                 else:
01038                     return self.compile_c(source, object, includes)
01039         elif ext == '.s':
01040             deps = [source]
01041             deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm"))
01042             if self.need_update(object, deps):
01043                 return self.assemble(source, object, includes)
01044         else:
01045             return False
01046 
01047         return None
01048 
01049     def parse_dependencies(self, dep_path):
01050         """Parse the dependency information generated by the compiler.
01051 
01052         Positional arguments:
01053         dep_path -- the path to a file generated by a previous run of the compiler
01054 
01055         Return value:
01056         A list of all source files that the dependency file indicated were dependencies
01057 
01058         Side effects:
01059         None
01060 
01061         Note: A default implementation is provided for make-like file formats
01062         """
01063         dependencies = []
01064         buff = open(dep_path).readlines()
01065         if buff:
01066             buff[0] = re.sub('^(.*?)\: ', '', buff[0])
01067             for line in buff:
01068                 filename = line.replace('\\\n', '').strip()
01069                 if filename:
01070                     filename = filename.replace('\\ ', '\a')
01071                     dependencies.extend(((self.CHROOT if self.CHROOT else '') +
01072                                          f.replace('\a', ' '))
01073                                         for f in filename.split(" "))
01074         return list(filter(None, dependencies))
01075 
01076     def is_not_supported_error(self, output):
01077         return "#error directive: [NOT_SUPPORTED]" in output
01078 
01079     @abstractmethod
01080     def parse_output(self, output):
01081         """Take in compiler output and extract sinlge line warnings and errors from it.
01082 
01083         Positional arguments:
01084         output -- a string of all the messages emitted by a run of the compiler
01085 
01086         Return value:
01087         None
01088 
01089         Side effects:
01090         call self.cc_info or self.notify with a description of the event generated by the compiler
01091         """
01092         raise NotImplemented
01093 
01094     def compile_output(self, output=[]):
01095         _rc = output[0]
01096         _stderr = output[1].decode("utf-8")
01097         command = output[2]
01098 
01099         # Parse output for Warnings and Errors
01100         self.parse_output(_stderr)
01101         self.debug("Return: %s"% _rc)
01102         for error_line in _stderr.splitlines():
01103             self.debug("Output: %s"% error_line)
01104 
01105         # Check return code
01106         if _rc != 0:
01107             if self.is_not_supported_error(_stderr):
01108                 raise NotSupportedException(_stderr)
01109             else:
01110                 raise ToolException(_stderr)
01111 
01112     def build_library(self, objects, dir, name):
01113         needed_update = False
01114         lib = self.STD_LIB_NAME % name
01115         fout = join(dir, lib)
01116         if self.need_update(fout, objects):
01117             self.info("Library: %s" % lib)
01118             self.archive(objects, fout)
01119             needed_update = True
01120 
01121         return needed_update
01122 
01123     def link_program(self, r, tmp_path, name):
01124         needed_update = False
01125         ext = 'bin'
01126         if hasattr(self.target, 'OUTPUT_EXT'):
01127             ext = self.target.OUTPUT_EXT
01128 
01129         if hasattr(self.target, 'OUTPUT_NAMING'):
01130             self.var("binary_naming", self.target.OUTPUT_NAMING)
01131             if self.target.OUTPUT_NAMING == "8.3":
01132                 name = name[0:8]
01133                 ext = ext[0:3]
01134 
01135         # Create destination directory
01136         head, tail =  split(name)
01137         new_path = join(tmp_path, head)
01138         mkdir(new_path)
01139 
01140         filename = name+'.'+ext
01141         # Absolute path of the final linked file
01142         full_path = join(tmp_path, filename)
01143         elf = join(tmp_path, name + '.elf')
01144         bin = None if ext == 'elf' else full_path
01145         map = join(tmp_path, name + '.map')
01146 
01147         r.objects = sorted(set(r.objects))
01148         config_file = ([self.config.app_config_location]
01149                        if self.config.app_config_location else [])
01150         dependencies = r.objects + r.libraries + [r.linker_script] + config_file
01151         dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
01152         if self.need_update(elf, dependencies):
01153             needed_update = True
01154             self.progress("link", name)
01155             self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
01156 
01157         if bin and self.need_update(bin, [elf]):
01158             needed_update = True
01159             self.progress("elf2bin", name)
01160             self.binary(r, elf, bin)
01161 
01162         # Initialize memap and process map file. This doesn't generate output.
01163         self.mem_stats(map)
01164 
01165         self.var("compile_succeded", True)
01166         self.var("binary", filename)
01167 
01168         return full_path, needed_update
01169 
01170     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01171     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01172     def default_cmd(self, command):
01173         _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT)
01174         self.debug("Return: %s"% _rc)
01175 
01176         for output_line in _stdout.splitlines():
01177             self.debug("Output: %s"% output_line)
01178         for error_line in _stderr.splitlines():
01179             self.debug("Errors: %s"% error_line)
01180 
01181         if _rc != 0:
01182             for line in _stderr.splitlines():
01183                 self.tool_error(line)
01184             raise ToolException(_stderr)
01185 
01186     ### NOTIFICATIONS ###
01187     def info(self, message):
01188         self.notify({'type': 'info', 'message': message})
01189 
01190     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01191     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01192     def debug(self, message):
01193         if self.VERBOSE:
01194             if isinstance(message, list):
01195                 message = ' '.join(message)
01196             message = "[DEBUG] " + message
01197             self.notify({'type': 'debug', 'message': message})
01198 
01199     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01200     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01201     def cc_info(self, info=None):
01202         if info is not None:
01203             info['type'] = 'cc'
01204             self.notify(info)
01205 
01206     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01207     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01208     def cc_verbose(self, message, file=""):
01209         self.debug(message)
01210 
01211     def progress(self, action, file, build_update=False):
01212         msg = {'type': 'progress', 'action': action, 'file': file}
01213         if build_update:
01214             msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
01215         self.notify(msg)
01216 
01217     def tool_error(self, message):
01218         self.notify({'type': 'tool_error', 'message': message})
01219 
01220     def var(self, key, value):
01221         self.notify({'type': 'var', 'key': key, 'val': value})
01222 
01223     # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
01224     # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
01225     def mem_stats(self, map):
01226         """! Creates parser object
01227         @param map Path to linker map file to parse and decode
01228         @return None
01229         """
01230         toolchain = self.__class__.__name__
01231 
01232         # Create memap object
01233         memap = MemapParser()
01234 
01235         # Parse and decode a map file
01236         if memap.parse(abspath(map), toolchain) is False:
01237             self.info("Unknown toolchain for memory statistics %s" % toolchain)
01238             return None
01239 
01240         # Store the memap instance for later use
01241         self.memap_instance = memap
01242 
01243         # Note: memory statistics are not returned.
01244         # Need call to generate_output later (depends on depth & output format)
01245 
01246         return None
01247 
01248     def add_regions(self):
01249         """Add regions to the build profile, if there are any.
01250         """
01251         print("Using regions in this build:")
01252         for region in self.config.regions:
01253             for define in [(region.name.upper() + "_ADDR", region.start),
01254                            (region.name.upper() + "_SIZE", region.size)]:
01255                 define_string = "-D%s=0x%x" %  define
01256                 self.cc.append(define_string)
01257                 self.cppc.append(define_string)
01258                 self.flags["common"].append(define_string)
01259             if region.active:
01260                 for define in [("MBED_APP_START", region.start),
01261                                ("MBED_APP_SIZE", region.size)]:
01262                     define_string = self.make_ld_define(*define)
01263                     self.ld.append(define_string)
01264                     self.flags["ld"].append(define_string)
01265             print("  Region %s size 0x%x, offset 0x%x"
01266                     % (region.name, region.size, region.start))
01267 
01268     # Set the configuration data
01269     def set_config_data(self, config_data):
01270         self.config_data = config_data
01271         if self.config.has_regions:
01272             self.add_regions()
01273 
01274     # Creates the configuration header if needed:
01275     # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists).
01276     # - if there is configuration data and "mbed_config.h" does not exist, it is created.
01277     # - if there is configuration data similar to the previous configuration data,
01278     #   "mbed_config.h" is left untouched.
01279     # - if there is new configuration data, "mbed_config.h" is overriden.
01280     # The function needs to be called exactly once for the lifetime of this toolchain instance.
01281     # The "config_processed" variable (below) ensures this behaviour.
01282     # The function returns the location of the configuration file, or None if there is no
01283     # configuration data available (and thus no configuration file)
01284     def get_config_header(self):
01285         if self.config_processed: # this function was already called, return its result
01286             return self.config_file
01287         # The config file is located in the build directory
01288         self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME)
01289         # If the file exists, read its current content in prev_data
01290         if exists(self.config_file):
01291             with open(self.config_file, "r") as f:
01292                 prev_data = f.read()
01293         else:
01294             prev_data = None
01295         # Get the current configuration data
01296         crt_data = self.config.config_to_header(self.config_data) if self.config_data else None
01297         # "changed" indicates if a configuration change was detected
01298         changed = False
01299         if prev_data is not None: # a previous mbed_config.h exists
01300             if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed
01301                 remove(self.config_file)
01302                 self.config_file = None # this means "config file not present"
01303                 changed = True
01304             elif crt_data != prev_data: # different content of config file
01305                 with open(self.config_file, "w") as f:
01306                     f.write(crt_data)
01307                 changed = True
01308         else: # a previous mbed_config.h does not exist
01309             if crt_data is not None: # there's configuration data available
01310                 with open(self.config_file, "w") as f:
01311                     f.write(crt_data)
01312                 changed = True
01313             else:
01314                 self.config_file = None # this means "config file not present"
01315         # If there was a change in configuration, rebuild everything
01316         self.build_all = changed
01317         # Make sure that this function will only return the location of the configuration
01318         # file for subsequent calls, without trying to manipulate its content in any way.
01319         self.config_processed = True
01320         return self.config_file
01321 
01322     def dump_build_profile(self):
01323         """Dump the current build profile and macros into the `.profile` file
01324         in the build directory"""
01325         for key in ["cxx", "c", "asm", "ld"]:
01326             to_dump = (str(self.flags[key]) + str(sorted(self.macros)))
01327             if key in ["cxx", "c"]:
01328                 to_dump += str(self.flags['common'])
01329             where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key)
01330             self._overwrite_when_not_equal(where, to_dump)
01331 
01332     @staticmethod
01333     def _overwrite_when_not_equal(filename, content):
01334         if not exists(filename) or content != open(filename).read():
01335             with open(filename, "w") as out:
01336                 out.write(content)
01337 
01338     @staticmethod
01339     def generic_check_executable(tool_key, executable_name, levels_up,
01340                                  nested_dir=None):
01341         """
01342         Positional args:
01343         tool_key: the key to index TOOLCHAIN_PATHS
01344         executable_name: the toolchain's named executable (ex. armcc)
01345         levels_up: each toolchain joins the toolchain_path, some
01346         variable directories (bin, include), and the executable name,
01347         so the TOOLCHAIN_PATH value must be appropriately distanced
01348 
01349         Keyword args:
01350         nested_dir: the directory within TOOLCHAIN_PATHS where the executable
01351           is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path
01352           that will be used by toolchain's compile)
01353 
01354         Returns True if the executable location specified by the user
01355         exists and is valid OR the executable can be found on the PATH.
01356         Returns False otherwise.
01357         """
01358         # Search PATH if user did not specify a path or specified path doesn't
01359         # exist.
01360         if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]):
01361             exe = find_executable(executable_name)
01362             if not exe:
01363                 return False
01364             for level in range(levels_up):
01365                 # move up the specified number of directories
01366                 exe = dirname(exe)
01367             TOOLCHAIN_PATHS[tool_key] = exe
01368         if nested_dir:
01369             subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir,
01370                           executable_name)
01371         else:
01372             subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name)
01373         # User could have specified a path that exists but does not contain exe
01374         return exists(subdir) or exists(subdir +'.exe')
01375 
01376     @abstractmethod
01377     def check_executable(self):
01378         """Returns True if the executable (armcc) location specified by the
01379          user exists OR the executable can be found on the PATH.
01380          Returns False otherwise."""
01381         raise NotImplemented
01382 
01383     @abstractmethod
01384     def get_config_option(self, config_header):
01385         """Generate the compiler option that forces the inclusion of the configuration
01386         header file.
01387 
01388         Positional arguments:
01389         config_header -- The configuration header that will be included within all source files
01390 
01391         Return value:
01392         A list of the command line arguments that will force the inclusion the specified header
01393 
01394         Side effects:
01395         None
01396         """
01397         raise NotImplemented
01398 
01399     @abstractmethod
01400     def get_compile_options(self, defines, includes, for_asm=False):
01401         """Generate the compiler options from the defines and includes
01402 
01403         Positional arguments:
01404         defines -- The preprocessor macros defined on the command line
01405         includes -- The include file search paths
01406 
01407         Keyword arguments:
01408         for_asm -- generate the assembler options instead of the compiler options
01409 
01410         Return value:
01411         A list of the command line arguments that will force the inclusion the specified header
01412 
01413         Side effects:
01414         None
01415         """
01416         raise NotImplemented
01417 
01418     @abstractmethod
01419     def assemble(self, source, object, includes):
01420         """Generate the command line that assembles.
01421 
01422         Positional arguments:
01423         source -- a file path that is the file to assemble
01424         object -- a file path that is the destination object
01425         includes -- a list of all directories where header files may be found
01426 
01427         Return value:
01428         The complete command line, as a list, that would invoke the assembler
01429         on the source file, include all the include paths, and generate
01430         the specified object file.
01431 
01432         Side effects:
01433         None
01434 
01435         Note:
01436         This method should be decorated with @hook_tool.
01437         """
01438         raise NotImplemented
01439 
01440     @abstractmethod
01441     def compile_c(self, source, object, includes):
01442         """Generate the command line that compiles a C source file.
01443 
01444         Positional arguments:
01445         source -- the C source file to compile
01446         object -- the destination object file
01447         includes -- a list of all the directories where header files may be found
01448 
01449         Return value:
01450         The complete command line, as a list, that would invoke the C compiler
01451         on the source file, include all the include paths, and generate the
01452         specified object file.
01453 
01454         Side effects:
01455         None
01456 
01457         Note:
01458         This method should be decorated with @hook_tool.
01459         """
01460         raise NotImplemented
01461 
01462     @abstractmethod
01463     def compile_cpp(self, source, object, includes):
01464         """Generate the command line that compiles a C++ source file.
01465 
01466         Positional arguments:
01467         source -- the C++ source file to compile
01468         object -- the destination object file
01469         includes -- a list of all the directories where header files may be found
01470 
01471         Return value:
01472         The complete command line, as a list, that would invoke the C++ compiler
01473         on the source file, include all the include paths, and generate the
01474         specified object file.
01475 
01476         Side effects:
01477         None
01478 
01479         Note:
01480         This method should be decorated with @hook_tool.
01481         """
01482         raise NotImplemented
01483 
01484     @abstractmethod
01485     def link(self, output, objects, libraries, lib_dirs, mem_map):
01486         """Run the linker to create an executable and memory map.
01487 
01488         Positional arguments:
01489         output -- the file name to place the executable in
01490         objects -- all of the object files to link
01491         libraries -- all of the required libraries
01492         lib_dirs -- where the required libraries are located
01493         mem_map -- the location where the memory map file should be stored
01494 
01495         Return value:
01496         None
01497 
01498         Side effect:
01499         Runs the linker to produce the executable.
01500 
01501         Note:
01502         This method should be decorated with @hook_tool.
01503         """
01504         raise NotImplemented
01505 
01506     @abstractmethod
01507     def archive(self, objects, lib_path):
01508         """Run the command line that creates an archive.
01509 
01510         Positional arguhments:
01511         objects -- a list of all the object files that should be archived
01512         lib_path -- the file name of the resulting library file
01513 
01514         Return value:
01515         None
01516 
01517         Side effect:
01518         Runs the archiving tool to produce the library file.
01519 
01520         Note:
01521         This method should be decorated with @hook_tool.
01522         """
01523         raise NotImplemented
01524 
01525     @abstractmethod
01526     def binary(self, resources, elf, bin):
01527         """Run the command line that will Extract a simplified binary file.
01528 
01529         Positional arguments:
01530         resources -- A resources object (Is not used in any of the toolchains)
01531         elf -- the executable file that is to be converted
01532         bin -- the file name of the to be created simplified binary file
01533 
01534         Return value:
01535         None
01536 
01537         Side effect:
01538         Runs the elf2bin tool to produce the simplified binary file.
01539 
01540         Note:
01541         This method should be decorated with @hook_tool.
01542         """
01543         raise NotImplemented
01544 
01545     @staticmethod
01546     @abstractmethod
01547     def name_mangle(name):
01548         """Mangle a name based on the conventional name mangling of this toolchain
01549 
01550         Positional arguments:
01551         name -- the name to mangle
01552 
01553         Return:
01554         the mangled name as a string
01555         """
01556         raise NotImplemented
01557 
01558     @staticmethod
01559     @abstractmethod
01560     def make_ld_define(name, value):
01561         """Create an argument to the linker that would define a symbol
01562 
01563         Positional arguments:
01564         name -- the symbol to define
01565         value -- the value to give the symbol
01566 
01567         Return:
01568         The linker flag as a string
01569         """
01570         raise NotImplemented
01571 
01572     @staticmethod
01573     @abstractmethod
01574     def redirect_symbol(source, sync, build_dir):
01575         """Redirect a symbol at link time to point at somewhere else
01576 
01577         Positional arguments:
01578         source -- the symbol doing the pointing
01579         sync -- the symbol being pointed to
01580         build_dir -- the directory to put "response files" if needed by the toolchain
01581 
01582         Side Effects:
01583         Possibly create a file in the build directory
01584 
01585         Return:
01586         The linker flag to redirect the symbol, as a string
01587         """
01588         raise NotImplemented
01589 
01590     # Return the list of macros geenrated by the build system
01591     def get_config_macros(self):
01592         return self.config.config_to_macros(self.config_data) if self.config_data else []
01593 
01594     @property
01595     def report(self):
01596         to_ret = {}
01597         to_ret['c_compiler'] = {'flags': copy(self.flags['c']),
01598                                 'symbols': self.get_symbols()}
01599         to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']),
01600                                   'symbols': self.get_symbols()}
01601         to_ret['assembler'] = {'flags': copy(self.flags['asm']),
01602                                'symbols': self.get_symbols(True)}
01603         to_ret['linker'] = {'flags': copy(self.flags['ld'])}
01604         to_ret.update(self.config.report)
01605         return to_ret
01606 
01607 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH
01608 
01609 TOOLCHAIN_PATHS = {
01610     'ARM': ARM_PATH,
01611     'uARM': ARM_PATH,
01612     'ARMC6': ARMC6_PATH,
01613     'GCC_ARM': GCC_ARM_PATH,
01614     'IAR': IAR_PATH
01615 }
01616 
01617 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
01618 from tools.toolchains.gcc import GCC_ARM
01619 from tools.toolchains.iar import IAR
01620 
01621 TOOLCHAIN_CLASSES = {
01622     u'ARM': ARM_STD,
01623     u'uARM': ARM_MICRO,
01624     u'ARMC6': ARMC6,
01625     u'GCC_ARM': GCC_ARM,
01626     u'IAR': IAR
01627 }
01628 
01629 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())