Marco Zecchini
/
Example_RTOS
Rtos API example
Embed:
(wiki syntax)
Show/hide line numbers
__init__.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2013 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import sys 00020 from os import stat, walk, getcwd, sep, remove 00021 from copy import copy 00022 from time import time, sleep 00023 from types import ListType 00024 from shutil import copyfile 00025 from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir, normcase 00026 from itertools import chain 00027 from inspect import getmro 00028 from copy import deepcopy 00029 from abc import ABCMeta, abstractmethod 00030 from distutils.spawn import find_executable 00031 00032 from multiprocessing import Pool, cpu_count 00033 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker 00034 from tools.settings import MBED_ORG_USER 00035 import tools.hooks as hooks 00036 from tools.memap import MemapParser 00037 from hashlib import md5 00038 import fnmatch 00039 00040 00041 #Disables multiprocessing if set to higher number than the host machine CPUs 00042 CPU_COUNT_MIN = 1 00043 CPU_COEF = 1 00044 00045 class LazyDict(dict): 00046 def __init__(self): 00047 self.eager = {} 00048 self.lazy = {} 00049 00050 def add_lazy(self, key, thunk): 00051 if key in self.eager: 00052 del self.eager[key] 00053 self.lazy[key] = thunk 00054 00055 def __getitem__(self, key): 00056 if (key not in self.eager 00057 and key in self.lazy): 00058 self.eager[key] = self.lazy[key]() 00059 del self.lazy[key] 00060 return self.eager[key] 00061 00062 def __setitem__(self, key, value): 00063 self.eager[key] = value 00064 00065 def __delitem__(self, key): 00066 if key in self.eager: 00067 del self.eager[key] 00068 else: 00069 del self.lazy[key] 00070 00071 def __contains__(self, key): 00072 return key in self.eager or key in self.lazy 00073 00074 def __iter__(self): 00075 return chain(iter(self.eager), iter(self.lazy)) 00076 00077 def __len__(self): 00078 return len(self.eager) + len(self.lazy) 00079 00080 def __str__(self): 00081 return "Lazy{%s}" % ( 00082 ", ".join("%r: %r" % (k, v) for k, v in 00083 chain(self.eager.iteritems(), ((k, "not evaluated") 00084 for k in self.lazy)))) 00085 00086 def update(self, other): 00087 if isinstance(other, LazyDict): 00088 self.eager.update(other.eager) 00089 self.lazy.update(other.lazy) 00090 else: 00091 self.eager.update(other) 00092 00093 def iteritems(self): 00094 """Warning: This forces the evaluation all of the items in this LazyDict 00095 that are iterated over.""" 00096 for k, v in self.eager.iteritems(): 00097 yield k, v 00098 for k in self.lazy.keys(): 00099 yield k, self[k] 00100 00101 def apply(self, fn): 00102 """Delay the application of a computation to all items of the lazy dict. 00103 Does no computation now. Instead the comuptation is performed when a 00104 consumer attempts to access a value in this LazyDict""" 00105 new_lazy = {} 00106 for k, f in self.lazy.iteritems(): 00107 def closure(f=f): 00108 return fn(f()) 00109 new_lazy[k] = closure 00110 for k, v in self.eager.iteritems(): 00111 def closure(v=v): 00112 return fn(v) 00113 new_lazy[k] = closure 00114 self.lazy = new_lazy 00115 self.eager = {} 00116 00117 class Resources: 00118 def __init__(self, base_path=None, collect_ignores=False): 00119 self.base_path = base_path 00120 self.collect_ignores = collect_ignores 00121 00122 self.file_basepath = {} 00123 00124 self.inc_dirs = [] 00125 self.headers = [] 00126 00127 self.s_sources = [] 00128 self.c_sources = [] 00129 self.cpp_sources = [] 00130 00131 self.lib_dirs = set([]) 00132 self.objects = [] 00133 self.libraries = [] 00134 00135 # mbed special files 00136 self.lib_builds = [] 00137 self.lib_refs = [] 00138 00139 self.repo_dirs = [] 00140 self.repo_files = [] 00141 00142 self.linker_script = None 00143 00144 # Other files 00145 self.hex_files = [] 00146 self.bin_files = [] 00147 self.json_files = [] 00148 00149 # Features 00150 self.features = LazyDict() 00151 self.ignored_dirs = [] 00152 00153 def __add__(self, resources): 00154 if resources is None: 00155 return self 00156 else: 00157 return self.add(resources) 00158 00159 def __radd__(self, resources): 00160 if resources is None: 00161 return self 00162 else: 00163 return self.add(resources) 00164 00165 def ignore_dir(self, directory): 00166 if self.collect_ignores: 00167 self.ignored_dirs.append(directory) 00168 00169 def add(self, resources): 00170 for f,p in resources.file_basepath.items(): 00171 self.file_basepath[f] = p 00172 00173 self.inc_dirs += resources.inc_dirs 00174 self.headers += resources.headers 00175 00176 self.s_sources += resources.s_sources 00177 self.c_sources += resources.c_sources 00178 self.cpp_sources += resources.cpp_sources 00179 00180 self.lib_dirs |= resources.lib_dirs 00181 self.objects += resources.objects 00182 self.libraries += resources.libraries 00183 00184 self.lib_builds += resources.lib_builds 00185 self.lib_refs += resources.lib_refs 00186 00187 self.repo_dirs += resources.repo_dirs 00188 self.repo_files += resources.repo_files 00189 00190 if resources.linker_script is not None: 00191 self.linker_script = resources.linker_script 00192 00193 self.hex_files += resources.hex_files 00194 self.bin_files += resources.bin_files 00195 self.json_files += resources.json_files 00196 00197 self.features.update(resources.features) 00198 self.ignored_dirs += resources.ignored_dirs 00199 00200 return self 00201 00202 def _collect_duplicates(self, dupe_dict, dupe_headers): 00203 for filename in self.s_sources + self.c_sources + self.cpp_sources: 00204 objname, _ = splitext(basename(filename)) 00205 dupe_dict.setdefault(objname, set()) 00206 dupe_dict[objname] |= set([filename]) 00207 for filename in self.headers: 00208 headername = basename(filename) 00209 dupe_headers.setdefault(headername, set()) 00210 dupe_headers[headername] |= set([headername]) 00211 for res in self.features.values(): 00212 res._collect_duplicates(dupe_dict, dupe_headers) 00213 return dupe_dict, dupe_headers 00214 00215 def detect_duplicates(self, toolchain): 00216 """Detect all potential ambiguities in filenames and report them with 00217 a toolchain notification 00218 00219 Positional Arguments: 00220 toolchain - used for notifications 00221 """ 00222 count = 0 00223 dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict()) 00224 for objname, filenames in dupe_dict.iteritems(): 00225 if len(filenames) > 1: 00226 count+=1 00227 toolchain.tool_error( 00228 "Object file %s.o is not unique! It could be made from: %s"\ 00229 % (objname, " ".join(filenames))) 00230 for headername, locations in dupe_headers.iteritems(): 00231 if len(locations) > 1: 00232 count+=1 00233 toolchain.tool_error( 00234 "Header file %s is not unique! It could be: %s" %\ 00235 (headername, " ".join(locations))) 00236 return count 00237 00238 00239 def relative_to(self, base, dot=False): 00240 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00241 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00242 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00243 'hex_files', 'bin_files', 'json_files']: 00244 v = [rel_path(f, base, dot) for f in getattr(self, field)] 00245 setattr(self, field, v) 00246 00247 def to_apply(feature, base=base, dot=dot): 00248 feature.relative_to(base, dot) 00249 self.features.apply(to_apply) 00250 00251 if self.linker_script is not None: 00252 self.linker_script = rel_path(self.linker_script, base, dot) 00253 00254 def win_to_unix(self): 00255 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00256 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00257 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00258 'hex_files', 'bin_files', 'json_files']: 00259 v = [f.replace('\\', '/') for f in getattr(self, field)] 00260 setattr(self, field, v) 00261 00262 def to_apply(feature): 00263 feature.win_to_unix() 00264 self.features.apply(to_apply) 00265 00266 if self.linker_script is not None: 00267 self.linker_script = self.linker_script.replace('\\', '/') 00268 00269 def __str__(self): 00270 s = [] 00271 00272 for (label, resources) in ( 00273 ('Include Directories', self.inc_dirs), 00274 ('Headers', self.headers), 00275 00276 ('Assembly sources', self.s_sources), 00277 ('C sources', self.c_sources), 00278 ('C++ sources', self.cpp_sources), 00279 00280 ('Library directories', self.lib_dirs), 00281 ('Objects', self.objects), 00282 ('Libraries', self.libraries), 00283 00284 ('Hex files', self.hex_files), 00285 ('Bin files', self.bin_files), 00286 00287 ('Features', self.features), 00288 ): 00289 if resources: 00290 s.append('%s:\n ' % label + '\n '.join(resources)) 00291 00292 if self.linker_script: 00293 s.append('Linker Script: ' + self.linker_script) 00294 00295 return '\n'.join(s) 00296 00297 # Support legacy build conventions: the original mbed build system did not have 00298 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but 00299 # had the knowledge of a list of these directories to be ignored. 00300 LEGACY_IGNORE_DIRS = set([ 00301 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', 00302 'ARM', 'uARM', 'IAR', 00303 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', 00304 'ARMC6' 00305 ]) 00306 LEGACY_TOOLCHAIN_NAMES = { 00307 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', 00308 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 00309 'IAR': 'IAR', 00310 'ARMC6': 'ARMC6', 00311 } 00312 00313 00314 class mbedToolchain: 00315 # Verbose logging 00316 VERBOSE = True 00317 00318 # Compile C files as CPP 00319 COMPILE_C_AS_CPP = False 00320 00321 # Response files for compiling, includes, linking and archiving. 00322 # Not needed on posix systems where the typical arg limit is 2 megabytes 00323 RESPONSE_FILES = True 00324 00325 CORTEX_SYMBOLS = { 00326 "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00327 "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00328 "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00329 "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00330 "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00331 "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00332 "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00333 "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00334 "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00335 "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], 00336 "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00337 "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00338 "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00339 "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00340 } 00341 00342 MBED_CONFIG_FILE_NAME="mbed_config.h" 00343 00344 PROFILE_FILE_NAME = ".profile" 00345 00346 __metaclass__ = ABCMeta 00347 00348 profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]} 00349 00350 def __init__(self, target, notify=None, macros=None, silent=False, 00351 extra_verbose=False, build_profile=None, build_dir=None): 00352 self.target = target 00353 self.name = self.__class__.__name__ 00354 00355 # compile/assemble/link/binary hooks 00356 self.hook = hooks.Hook(target, self) 00357 00358 # Toolchain flags 00359 self.flags = deepcopy(build_profile or self.profile_template) 00360 00361 # System libraries provided by the toolchain 00362 self.sys_libs = [] 00363 00364 # User-defined macros 00365 self.macros = macros or [] 00366 00367 # Macros generated from toolchain and target rules/features 00368 self.asm_symbols = None 00369 self.cxx_symbols = None 00370 00371 # Labels generated from toolchain and target rules/features (used for selective build) 00372 self.labels = None 00373 00374 # This will hold the initialized config object 00375 self.config = None 00376 00377 # This will hold the configuration data (as returned by Config.get_config_data()) 00378 self.config_data = None 00379 00380 # This will hold the location of the configuration file or None if there's no configuration available 00381 self.config_file = None 00382 00383 # Call guard for "get_config_data" (see the comments of get_config_data for details) 00384 self.config_processed = False 00385 00386 # Non-incremental compile 00387 self.build_all = False 00388 00389 # Build output dir 00390 self.build_dir = build_dir 00391 self.timestamp = time() 00392 00393 # Number of concurrent build jobs. 0 means auto (based on host system cores) 00394 self.jobs = 0 00395 00396 # Ignore patterns from .mbedignore files 00397 self.ignore_patterns = [] 00398 self._ignore_regex = re.compile("$^") 00399 00400 # Pre-mbed 2.0 ignore dirs 00401 self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) 00402 00403 # Output notify function 00404 # This function is passed all events, and expected to handle notification of the 00405 # user, emit the events to a log, etc. 00406 # The API for all notify methods passed into the notify parameter is as follows: 00407 # def notify(Event, Silent) 00408 # Where *Event* is a dict representing the toolchain event that was generated 00409 # e.g.: a compile succeeded, or a warning was emitted by the compiler 00410 # or an application was linked 00411 # *Silent* is a boolean 00412 if notify: 00413 self.notify_fun = notify 00414 elif extra_verbose: 00415 self.notify_fun = self.print_notify_verbose 00416 else: 00417 self.notify_fun = self.print_notify 00418 00419 # Silent builds (no output) 00420 self.silent = silent 00421 00422 # Print output buffer 00423 self.output = str() 00424 00425 # uVisor spepcific rules 00426 if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels: 00427 self.target.core = re.sub(r"F$", '', self.target.core) 00428 00429 # Stats cache is used to reduce the amount of IO requests to stat 00430 # header files during dependency change. See need_update() 00431 self.stat_cache = {} 00432 00433 # Used by the mbed Online Build System to build in chrooted environment 00434 self.CHROOT = None 00435 00436 # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over 00437 self.init() 00438 00439 # Used for post __init__() hooks 00440 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00441 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00442 def init(self): 00443 return True 00444 00445 def get_output(self): 00446 return self.output 00447 00448 def print_notify(self, event, silent=False): 00449 """ Default command line notification 00450 """ 00451 msg = None 00452 00453 if not self.VERBOSE and event['type'] == 'tool_error': 00454 msg = event['message'] 00455 00456 elif event['type'] in ['info', 'debug']: 00457 msg = event['message'] 00458 00459 elif event['type'] == 'cc': 00460 event['severity'] = event['severity'].title() 00461 event['file'] = basename(event['file']) 00462 msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event 00463 00464 elif event['type'] == 'progress': 00465 if 'percent' in event: 00466 msg = '{} [{:>5.1f}%]: {}'.format(event['action'].title(), 00467 event['percent'], 00468 basename(event['file'])) 00469 else: 00470 msg = '{}: {}'.format(event['action'].title(), 00471 basename(event['file'])) 00472 00473 if msg: 00474 if not silent: 00475 print msg 00476 self.output += msg + "\n" 00477 00478 def print_notify_verbose(self, event, silent=False): 00479 """ Default command line notification with more verbose mode 00480 """ 00481 if event['type'] in ['info', 'debug']: 00482 self.print_notify(event, silent=silent) # standard handle 00483 00484 elif event['type'] == 'cc': 00485 event['severity'] = event['severity'].title() 00486 event['file'] = basename(event['file']) 00487 event['mcu_name'] = "None" 00488 event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown" 00489 event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown" 00490 msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event 00491 if not silent: 00492 print msg 00493 self.output += msg + "\n" 00494 00495 elif event['type'] == 'progress': 00496 self.print_notify(event) # standard handle 00497 00498 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00499 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00500 def notify(self, event): 00501 """ Little closure for notify functions 00502 """ 00503 event['toolchain'] = self 00504 return self.notify_fun(event, self.silent) 00505 00506 def get_symbols(self, for_asm=False): 00507 if for_asm: 00508 if self.asm_symbols is None: 00509 self.asm_symbols = [] 00510 00511 # Cortex CPU symbols 00512 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00513 self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00514 00515 # Add target's symbols 00516 self.asm_symbols += self.target.macros 00517 # Add extra symbols passed via 'macros' parameter 00518 self.asm_symbols += self.macros 00519 return list(set(self.asm_symbols)) # Return only unique symbols 00520 else: 00521 if self.cxx_symbols is None: 00522 # Target and Toolchain symbols 00523 labels = self.get_labels() 00524 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']] 00525 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']]) 00526 00527 # Cortex CPU symbols 00528 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00529 self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00530 00531 # Symbols defined by the on-line build.system 00532 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1']) 00533 if MBED_ORG_USER: 00534 self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER) 00535 00536 # Add target's symbols 00537 self.cxx_symbols += self.target.macros 00538 # Add target's hardware 00539 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has] 00540 # Add target's features 00541 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features] 00542 # Add extra symbols passed via 'macros' parameter 00543 self.cxx_symbols += self.macros 00544 00545 # Form factor variables 00546 if hasattr(self.target, 'supported_form_factors'): 00547 self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors]) 00548 00549 return list(set(self.cxx_symbols)) # Return only unique symbols 00550 00551 # Extend the internal list of macros 00552 def add_macros(self, new_macros): 00553 self.macros.extend(new_macros) 00554 00555 def get_labels(self): 00556 if self.labels is None: 00557 toolchain_labels = [c.__name__ for c in getmro(self.__class__)] 00558 toolchain_labels.remove('mbedToolchain') 00559 self.labels = { 00560 'TARGET': self.target.labels, 00561 'FEATURE': self.target.features, 00562 'TOOLCHAIN': toolchain_labels 00563 } 00564 00565 # This is a policy decision and it should /really/ be in the config system 00566 # ATM it's here for backward compatibility 00567 if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and 00568 "-O0" in self.flags['common']) or 00569 ("-r" in self.flags['common'] and 00570 "-On" in self.flags['common'])): 00571 self.labels['TARGET'].append("DEBUG") 00572 else: 00573 self.labels['TARGET'].append("RELEASE") 00574 return self.labels 00575 00576 00577 # Determine whether a source file needs updating/compiling 00578 def need_update(self, target, dependencies): 00579 if self.build_all: 00580 return True 00581 00582 if not exists(target): 00583 return True 00584 00585 target_mod_time = stat(target).st_mtime 00586 00587 for d in dependencies: 00588 # Some objects are not provided with full path and here we do not have 00589 # information about the library paths. Safe option: assume an update 00590 if not d or not exists(d): 00591 return True 00592 00593 if not self.stat_cache.has_key(d): 00594 self.stat_cache[d] = stat(d).st_mtime 00595 00596 if self.stat_cache[d] >= target_mod_time: 00597 return True 00598 00599 return False 00600 00601 def is_ignored(self, file_path): 00602 """Check if file path is ignored by any .mbedignore thus far""" 00603 return self._ignore_regex.match(normcase(file_path)) 00604 00605 def add_ignore_patterns(self, root, base_path, patterns): 00606 """Add a series of patterns to the ignored paths 00607 00608 Positional arguments: 00609 root - the directory containing the ignore file 00610 base_path - the location that the scan started from 00611 patterns - the list of patterns we will ignore in the future 00612 """ 00613 real_base = relpath(root, base_path) 00614 if real_base == ".": 00615 self.ignore_patterns.extend(normcase(p) for p in patterns) 00616 else: 00617 self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns) 00618 if self.ignore_patterns: 00619 self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) 00620 00621 # Create a Resources object from the path pointed to by *path* by either traversing a 00622 # a directory structure, when *path* is a directory, or adding *path* to the resources, 00623 # when *path* is a file. 00624 # The parameter *base_path* is used to set the base_path attribute of the Resources 00625 # object and the parameter *exclude_paths* is used by the directory traversal to 00626 # exclude certain paths from the traversal. 00627 def scan_resources(self, path, exclude_paths=None, base_path=None, 00628 collect_ignores=False): 00629 self.progress("scan", path) 00630 00631 resources = Resources(path, collect_ignores=collect_ignores) 00632 if not base_path: 00633 if isfile(path): 00634 base_path = dirname(path) 00635 else: 00636 base_path = path 00637 resources.base_path = base_path 00638 00639 if isfile(path): 00640 self._add_file(path, resources, base_path, exclude_paths=exclude_paths) 00641 else: 00642 self._add_dir(path, resources, base_path, exclude_paths=exclude_paths) 00643 return resources 00644 00645 # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a 00646 # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file 00647 # on every file it considers adding to the resources object. 00648 def _add_dir(self, path, resources, base_path, exclude_paths=None): 00649 """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) 00650 When topdown is True, the caller can modify the dirnames list in-place 00651 (perhaps using del or slice assignment), and walk() will only recurse into 00652 the subdirectories whose names remain in dirnames; this can be used to prune 00653 the search, impose a specific order of visiting, or even to inform walk() 00654 about directories the caller creates or renames before it resumes walk() 00655 again. Modifying dirnames when topdown is False is ineffective, because in 00656 bottom-up mode the directories in dirnames are generated before dirpath 00657 itself is generated. 00658 """ 00659 labels = self.get_labels() 00660 for root, dirs, files in walk(path, followlinks=True): 00661 # Check if folder contains .mbedignore 00662 if ".mbedignore" in files: 00663 with open (join(root,".mbedignore"), "r") as f: 00664 lines=f.readlines() 00665 lines = [l.strip() for l in lines] # Strip whitespaces 00666 lines = [l for l in lines if l != ""] # Strip empty lines 00667 lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines 00668 # Append root path to glob patterns and append patterns to ignore_patterns 00669 self.add_ignore_patterns(root, base_path, lines) 00670 00671 # Skip the whole folder if ignored, e.g. .mbedignore containing '*' 00672 root_path =join(relpath(root, base_path)) 00673 if (self.is_ignored(join(root_path,"")) or 00674 self.build_dir == root_path): 00675 resources.ignore_dir(root_path) 00676 dirs[:] = [] 00677 continue 00678 00679 for d in copy(dirs): 00680 dir_path = join(root, d) 00681 # Add internal repo folders/files. This is needed for exporters 00682 if d == '.hg' or d == '.git': 00683 resources.repo_dirs.append(dir_path) 00684 00685 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or 00686 # Ignore targets that do not match the TARGET in extra_labels list 00687 (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or 00688 # Ignore toolchain that do not match the current TOOLCHAIN 00689 (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or 00690 # Ignore .mbedignore files 00691 self.is_ignored(join(relpath(root, base_path), d,"")) or 00692 # Ignore TESTS dir 00693 (d == 'TESTS')): 00694 resources.ignore_dir(dir_path) 00695 dirs.remove(d) 00696 elif d.startswith('FEATURE_'): 00697 # Recursively scan features but ignore them in the current scan. 00698 # These are dynamically added by the config system if the conditions are matched 00699 def closure (dir_path=dir_path, base_path=base_path): 00700 return self.scan_resources(dir_path, base_path=base_path, 00701 collect_ignores=resources.collect_ignores) 00702 resources.features.add_lazy(d[8:], closure) 00703 resources.ignore_dir(dir_path) 00704 dirs.remove(d) 00705 elif exclude_paths: 00706 for exclude_path in exclude_paths: 00707 rel_path = relpath(dir_path, exclude_path) 00708 if not (rel_path.startswith('..')): 00709 resources.ignore_dir(dir_path) 00710 dirs.remove(d) 00711 break 00712 00713 # Add root to include paths 00714 root = root.rstrip("/") 00715 resources.inc_dirs.append(root) 00716 resources.file_basepath[root] = base_path 00717 00718 for file in files: 00719 file_path = join(root, file) 00720 self._add_file(file_path, resources, base_path) 00721 00722 # A helper function for both scan_resources and _add_dir. _add_file adds one file 00723 # (*file_path*) to the resources object based on the file type. 00724 def _add_file(self, file_path, resources, base_path, exclude_paths=None): 00725 resources.file_basepath[file_path] = base_path 00726 00727 if self.is_ignored(relpath(file_path, base_path)): 00728 return 00729 00730 _, ext = splitext(file_path) 00731 ext = ext.lower() 00732 00733 if ext == '.s': 00734 resources.s_sources.append(file_path) 00735 00736 elif ext == '.c': 00737 resources.c_sources.append(file_path) 00738 00739 elif ext == '.cpp': 00740 resources.cpp_sources.append(file_path) 00741 00742 elif ext == '.h' or ext == '.hpp': 00743 resources.headers.append(file_path) 00744 00745 elif ext == '.o': 00746 resources.objects.append(file_path) 00747 00748 elif ext == self.LIBRARY_EXT: 00749 resources.libraries.append(file_path) 00750 resources.lib_dirs.add(dirname(file_path)) 00751 00752 elif ext == self.LINKER_EXT: 00753 if resources.linker_script is not None: 00754 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) 00755 resources.linker_script = file_path 00756 00757 elif ext == '.lib': 00758 resources.lib_refs.append(file_path) 00759 00760 elif ext == '.bld': 00761 resources.lib_builds.append(file_path) 00762 00763 elif basename(file_path) == '.hgignore': 00764 resources.repo_files.append(file_path) 00765 00766 elif basename(file_path) == '.gitignore': 00767 resources.repo_files.append(file_path) 00768 00769 elif ext == '.hex': 00770 resources.hex_files.append(file_path) 00771 00772 elif ext == '.bin': 00773 resources.bin_files.append(file_path) 00774 00775 elif ext == '.json': 00776 resources.json_files.append(file_path) 00777 00778 00779 def scan_repository(self, path): 00780 resources = [] 00781 00782 for root, dirs, files in walk(path): 00783 # Remove ignored directories 00784 for d in copy(dirs): 00785 if d == '.' or d == '..': 00786 dirs.remove(d) 00787 00788 for file in files: 00789 file_path = join(root, file) 00790 resources.append(file_path) 00791 00792 return resources 00793 00794 def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): 00795 # Handle a single file 00796 if type(files_paths) != ListType: files_paths = [files_paths] 00797 00798 for source in files_paths: 00799 if source is None: 00800 files_paths.remove(source) 00801 00802 for source in files_paths: 00803 if resources is not None and resources.file_basepath.has_key(source): 00804 relative_path = relpath(source, resources.file_basepath[source]) 00805 elif rel_path is not None: 00806 relative_path = relpath(source, rel_path) 00807 else: 00808 _, relative_path = split(source) 00809 00810 target = join(trg_path, relative_path) 00811 00812 if (target != source) and (self.need_update(target, [source])): 00813 self.progress("copy", relative_path) 00814 mkdir(dirname(target)) 00815 copyfile(source, target) 00816 00817 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00818 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00819 def relative_object_path(self, build_path, base_dir, source): 00820 source_dir, name, _ = split_path(source) 00821 00822 obj_dir = join(build_path, relpath(source_dir, base_dir)) 00823 if obj_dir is not self.prev_dir: 00824 self.prev_dir = obj_dir 00825 mkdir(obj_dir) 00826 return join(obj_dir, name + '.o') 00827 00828 # Generate response file for all includes. 00829 # ARM, GCC, IAR cross compatible 00830 def get_inc_file(self, includes): 00831 include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5) 00832 if not exists(include_file): 00833 with open(include_file, "wb") as f: 00834 cmd_list = [] 00835 for c in includes: 00836 if c: 00837 c = c.replace("\\", "/") 00838 if self.CHROOT: 00839 c = c.replace(self.CHROOT, '') 00840 cmd_list.append('"-I%s"' % c) 00841 string = " ".join(cmd_list) 00842 f.write(string) 00843 return include_file 00844 00845 # Generate response file for all objects when linking. 00846 # ARM, GCC, IAR cross compatible 00847 def get_link_file(self, cmd): 00848 link_file = join(self.build_dir, ".link_files.txt") 00849 with open(link_file, "wb") as f: 00850 cmd_list = [] 00851 for c in cmd: 00852 if c: 00853 c = c.replace("\\", "/") 00854 if self.CHROOT: 00855 c = c.replace(self.CHROOT, '') 00856 cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) 00857 string = " ".join(cmd_list) 00858 f.write(string) 00859 return link_file 00860 00861 # Generate response file for all objects when archiving. 00862 # ARM, GCC, IAR cross compatible 00863 def get_arch_file(self, objects): 00864 archive_file = join(self.build_dir, ".archive_files.txt") 00865 with open(archive_file, "wb") as f: 00866 o_list = [] 00867 for o in objects: 00868 o_list.append('"%s"' % o) 00869 string = " ".join(o_list).replace("\\", "/") 00870 f.write(string) 00871 return archive_file 00872 00873 # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM 00874 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00875 def compile_sources(self, resources, inc_dirs=None): 00876 # Web IDE progress bar for project build 00877 files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources 00878 self.to_be_compiled = len(files_to_compile) 00879 self.compiled = 0 00880 00881 self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) 00882 00883 inc_paths = resources.inc_dirs 00884 if inc_dirs is not None: 00885 if isinstance(inc_dirs, list): 00886 inc_paths.extend(inc_dirs) 00887 else: 00888 inc_paths.append(inc_dirs) 00889 # De-duplicate include paths 00890 inc_paths = set(inc_paths) 00891 # Sort include paths for consistency 00892 inc_paths = sorted(set(inc_paths)) 00893 # Unique id of all include paths 00894 self.inc_md5 = md5(' '.join(inc_paths)).hexdigest() 00895 00896 objects = [] 00897 queue = [] 00898 work_dir = getcwd() 00899 self.prev_dir = None 00900 00901 # Generate configuration header (this will update self.build_all if needed) 00902 self.get_config_header() 00903 self.dump_build_profile() 00904 00905 # Sort compile queue for consistency 00906 files_to_compile.sort() 00907 for source in files_to_compile: 00908 object = self.relative_object_path( 00909 self.build_dir, resources.file_basepath[source], source) 00910 00911 # Queue mode (multiprocessing) 00912 commands = self.compile_command(source, object, inc_paths) 00913 if commands is not None: 00914 queue.append({ 00915 'source': source, 00916 'object': object, 00917 'commands': commands, 00918 'work_dir': work_dir, 00919 'chroot': self.CHROOT 00920 }) 00921 else: 00922 self.compiled += 1 00923 objects.append(object) 00924 00925 # Use queues/multiprocessing if cpu count is higher than setting 00926 jobs = self.jobs if self.jobs else cpu_count() 00927 if jobs > CPU_COUNT_MIN and len(queue) > jobs: 00928 return self.compile_queue(queue, objects) 00929 else: 00930 return self.compile_seq(queue, objects) 00931 00932 # Compile source files queue in sequential order 00933 def compile_seq(self, queue, objects): 00934 for item in queue: 00935 result = compile_worker(item) 00936 00937 self.compiled += 1 00938 self.progress("compile", item['source'], build_update=True) 00939 for res in result['results']: 00940 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00941 self.compile_output([ 00942 res['code'], 00943 res['output'], 00944 res['command'] 00945 ]) 00946 objects.append(result['object']) 00947 return objects 00948 00949 # Compile source files queue in parallel by creating pool of worker threads 00950 def compile_queue(self, queue, objects): 00951 jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF) 00952 p = Pool(processes=jobs_count) 00953 00954 results = [] 00955 for i in range(len(queue)): 00956 results.append(p.apply_async(compile_worker, [queue[i]])) 00957 p.close() 00958 00959 itr = 0 00960 while len(results): 00961 itr += 1 00962 if itr > 180000: 00963 p.terminate() 00964 p.join() 00965 raise ToolException("Compile did not finish in 5 minutes") 00966 00967 sleep(0.01) 00968 pending = 0 00969 for r in results: 00970 if r._ready is True: 00971 try: 00972 result = r.get() 00973 results.remove(r) 00974 00975 self.compiled += 1 00976 self.progress("compile", result['source'], build_update=True) 00977 for res in result['results']: 00978 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00979 self.compile_output([ 00980 res['code'], 00981 res['output'], 00982 res['command'] 00983 ]) 00984 objects.append(result['object']) 00985 except ToolException, err: 00986 if p._taskqueue.queue: 00987 p._taskqueue.queue.clear() 00988 sleep(0.5) 00989 p.terminate() 00990 p.join() 00991 raise ToolException(err) 00992 else: 00993 pending += 1 00994 if pending >= jobs_count: 00995 break 00996 00997 results = None 00998 p.join() 00999 01000 return objects 01001 01002 # Determine the compile command based on type of source file 01003 def compile_command(self, source, object, includes): 01004 # Check dependencies 01005 _, ext = splitext(source) 01006 ext = ext.lower() 01007 01008 if ext == '.c' or ext == '.cpp': 01009 base, _ = splitext(object) 01010 dep_path = base + '.d' 01011 try: 01012 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else [] 01013 except IOError, IndexError: 01014 deps = [] 01015 config_file = ([self.config.app_config_location] 01016 if self.config.app_config_location else []) 01017 deps.extend(config_file) 01018 if ext == '.cpp' or self.COMPILE_C_AS_CPP: 01019 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx")) 01020 else: 01021 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c")) 01022 if len(deps) == 0 or self.need_update(object, deps): 01023 if ext == '.cpp' or self.COMPILE_C_AS_CPP: 01024 return self.compile_cpp(source, object, includes) 01025 else: 01026 return self.compile_c(source, object, includes) 01027 elif ext == '.s': 01028 deps = [source] 01029 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm")) 01030 if self.need_update(object, deps): 01031 return self.assemble(source, object, includes) 01032 else: 01033 return False 01034 01035 return None 01036 01037 def parse_dependencies(self, dep_path): 01038 """Parse the dependency information generated by the compiler. 01039 01040 Positional arguments: 01041 dep_path -- the path to a file generated by a previous run of the compiler 01042 01043 Return value: 01044 A list of all source files that the dependency file indicated were dependencies 01045 01046 Side effects: 01047 None 01048 01049 Note: A default implementation is provided for make-like file formats 01050 """ 01051 dependencies = [] 01052 buff = open(dep_path).readlines() 01053 if buff: 01054 buff[0] = re.sub('^(.*?)\: ', '', buff[0]) 01055 for line in buff: 01056 filename = line.replace('\\\n', '').strip() 01057 if file: 01058 filename = filename.replace('\\ ', '\a') 01059 dependencies.extend(((self.CHROOT if self.CHROOT else '') + 01060 f.replace('\a', ' ')) 01061 for f in filename.split(" ")) 01062 return list(filter(None, dependencies)) 01063 01064 def is_not_supported_error(self, output): 01065 return "#error directive: [NOT_SUPPORTED]" in output 01066 01067 @abstractmethod 01068 def parse_output(self, output): 01069 """Take in compiler output and extract sinlge line warnings and errors from it. 01070 01071 Positional arguments: 01072 output -- a string of all the messages emitted by a run of the compiler 01073 01074 Return value: 01075 None 01076 01077 Side effects: 01078 call self.cc_info or self.notify with a description of the event generated by the compiler 01079 """ 01080 raise NotImplemented 01081 01082 def compile_output(self, output=[]): 01083 _rc = output[0] 01084 _stderr = output[1].decode("utf-8") 01085 command = output[2] 01086 01087 # Parse output for Warnings and Errors 01088 self.parse_output(_stderr) 01089 self.debug("Return: %s"% _rc) 01090 for error_line in _stderr.splitlines(): 01091 self.debug("Output: %s"% error_line) 01092 01093 # Check return code 01094 if _rc != 0: 01095 if self.is_not_supported_error(_stderr): 01096 raise NotSupportedException(_stderr) 01097 else: 01098 raise ToolException(_stderr) 01099 01100 def build_library(self, objects, dir, name): 01101 needed_update = False 01102 lib = self.STD_LIB_NAME % name 01103 fout = join(dir, lib) 01104 if self.need_update(fout, objects): 01105 self.info("Library: %s" % lib) 01106 self.archive(objects, fout) 01107 needed_update = True 01108 01109 return needed_update 01110 01111 def link_program(self, r, tmp_path, name): 01112 needed_update = False 01113 ext = 'bin' 01114 if hasattr(self.target, 'OUTPUT_EXT'): 01115 ext = self.target.OUTPUT_EXT 01116 01117 if hasattr(self.target, 'OUTPUT_NAMING'): 01118 self.var("binary_naming", self.target.OUTPUT_NAMING) 01119 if self.target.OUTPUT_NAMING == "8.3": 01120 name = name[0:8] 01121 ext = ext[0:3] 01122 01123 # Create destination directory 01124 head, tail = split(name) 01125 new_path = join(tmp_path, head) 01126 mkdir(new_path) 01127 01128 filename = name+'.'+ext 01129 elf = join(tmp_path, name + '.elf') 01130 bin = None if ext is 'elf' else join(tmp_path, filename) 01131 map = join(tmp_path, name + '.map') 01132 01133 r.objects = sorted(set(r.objects)) 01134 config_file = ([self.config.app_config_location] 01135 if self.config.app_config_location else []) 01136 dependencies = r.objects + r.libraries + [r.linker_script] + config_file 01137 dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) 01138 if self.need_update(elf, dependencies): 01139 needed_update = True 01140 self.progress("link", name) 01141 self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) 01142 01143 if bin and self.need_update(bin, [elf]): 01144 needed_update = True 01145 self.progress("elf2bin", name) 01146 self.binary(r, elf, bin) 01147 01148 # Initialize memap and process map file. This doesn't generate output. 01149 self.mem_stats(map) 01150 01151 self.var("compile_succeded", True) 01152 self.var("binary", filename) 01153 01154 return bin, needed_update 01155 01156 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01157 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01158 def default_cmd(self, command): 01159 _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT) 01160 self.debug("Return: %s"% _rc) 01161 01162 for output_line in _stdout.splitlines(): 01163 self.debug("Output: %s"% output_line) 01164 for error_line in _stderr.splitlines(): 01165 self.debug("Errors: %s"% error_line) 01166 01167 if _rc != 0: 01168 for line in _stderr.splitlines(): 01169 self.tool_error(line) 01170 raise ToolException(_stderr) 01171 01172 ### NOTIFICATIONS ### 01173 def info(self, message): 01174 self.notify({'type': 'info', 'message': message}) 01175 01176 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01177 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01178 def debug(self, message): 01179 if self.VERBOSE: 01180 if type(message) is ListType: 01181 message = ' '.join(message) 01182 message = "[DEBUG] " + message 01183 self.notify({'type': 'debug', 'message': message}) 01184 01185 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01186 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01187 def cc_info(self, info=None): 01188 if info is not None: 01189 info['type'] = 'cc' 01190 self.notify(info) 01191 01192 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01193 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01194 def cc_verbose(self, message, file=""): 01195 self.debug(message) 01196 01197 def progress(self, action, file, build_update=False): 01198 msg = {'type': 'progress', 'action': action, 'file': file} 01199 if build_update: 01200 msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled) 01201 self.notify(msg) 01202 01203 def tool_error(self, message): 01204 self.notify({'type': 'tool_error', 'message': message}) 01205 01206 def var(self, key, value): 01207 self.notify({'type': 'var', 'key': key, 'val': value}) 01208 01209 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01210 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01211 def mem_stats(self, map): 01212 """! Creates parser object 01213 @param map Path to linker map file to parse and decode 01214 @return None 01215 """ 01216 toolchain = self.__class__.__name__ 01217 01218 # Create memap object 01219 memap = MemapParser() 01220 01221 # Parse and decode a map file 01222 if memap.parse(abspath(map), toolchain) is False: 01223 self.info("Unknown toolchain for memory statistics %s" % toolchain) 01224 return None 01225 01226 # Store the memap instance for later use 01227 self.memap_instance = memap 01228 01229 # Note: memory statistics are not returned. 01230 # Need call to generate_output later (depends on depth & output format) 01231 01232 return None 01233 01234 # Set the configuration data 01235 def set_config_data(self, config_data): 01236 self.config_data = config_data 01237 01238 # Creates the configuration header if needed: 01239 # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists). 01240 # - if there is configuration data and "mbed_config.h" does not exist, it is created. 01241 # - if there is configuration data similar to the previous configuration data, 01242 # "mbed_config.h" is left untouched. 01243 # - if there is new configuration data, "mbed_config.h" is overriden. 01244 # The function needs to be called exactly once for the lifetime of this toolchain instance. 01245 # The "config_processed" variable (below) ensures this behaviour. 01246 # The function returns the location of the configuration file, or None if there is no 01247 # configuration data available (and thus no configuration file) 01248 def get_config_header(self): 01249 if self.config_processed: # this function was already called, return its result 01250 return self.config_file 01251 # The config file is located in the build directory 01252 self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME) 01253 # If the file exists, read its current content in prev_data 01254 if exists(self.config_file): 01255 with open(self.config_file, "rt") as f: 01256 prev_data = f.read() 01257 else: 01258 prev_data = None 01259 # Get the current configuration data 01260 crt_data = self.config.config_to_header(self.config_data) if self.config_data else None 01261 # "changed" indicates if a configuration change was detected 01262 changed = False 01263 if prev_data is not None: # a previous mbed_config.h exists 01264 if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed 01265 remove(self.config_file) 01266 self.config_file = None # this means "config file not present" 01267 changed = True 01268 elif crt_data != prev_data: # different content of config file 01269 with open(self.config_file, "wt") as f: 01270 f.write(crt_data) 01271 changed = True 01272 else: # a previous mbed_config.h does not exist 01273 if crt_data is not None: # there's configuration data available 01274 with open(self.config_file, "wt") as f: 01275 f.write(crt_data) 01276 changed = True 01277 else: 01278 self.config_file = None # this means "config file not present" 01279 # If there was a change in configuration, rebuild everything 01280 self.build_all = changed 01281 # Make sure that this function will only return the location of the configuration 01282 # file for subsequent calls, without trying to manipulate its content in any way. 01283 self.config_processed = True 01284 return self.config_file 01285 01286 def dump_build_profile(self): 01287 """Dump the current build profile and macros into the `.profile` file 01288 in the build directory""" 01289 for key in ["cxx", "c", "asm", "ld"]: 01290 to_dump = (str(self.flags[key]) + str(sorted(self.macros))) 01291 if key in ["cxx", "c"]: 01292 to_dump += str(self.flags['common']) 01293 where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key) 01294 self._overwrite_when_not_equal(where, to_dump) 01295 01296 @staticmethod 01297 def _overwrite_when_not_equal(filename, content): 01298 if not exists(filename) or content != open(filename).read(): 01299 with open(filename, "wb") as out: 01300 out.write(content) 01301 01302 @staticmethod 01303 def generic_check_executable(tool_key, executable_name, levels_up, 01304 nested_dir=None): 01305 """ 01306 Positional args: 01307 tool_key: the key to index TOOLCHAIN_PATHS 01308 executable_name: the toolchain's named executable (ex. armcc) 01309 levels_up: each toolchain joins the toolchain_path, some 01310 variable directories (bin, include), and the executable name, 01311 so the TOOLCHAIN_PATH value must be appropriately distanced 01312 01313 Keyword args: 01314 nested_dir: the directory within TOOLCHAIN_PATHS where the executable 01315 is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path 01316 that will be used by toolchain's compile) 01317 01318 Returns True if the executable location specified by the user 01319 exists and is valid OR the executable can be found on the PATH. 01320 Returns False otherwise. 01321 """ 01322 # Search PATH if user did not specify a path or specified path doesn't 01323 # exist. 01324 if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]): 01325 exe = find_executable(executable_name) 01326 if not exe: 01327 return False 01328 for level in range(levels_up): 01329 # move up the specified number of directories 01330 exe = dirname(exe) 01331 TOOLCHAIN_PATHS[tool_key] = exe 01332 if nested_dir: 01333 subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir, 01334 executable_name) 01335 else: 01336 subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name) 01337 # User could have specified a path that exists but does not contain exe 01338 return exists(subdir) or exists(subdir +'.exe') 01339 01340 @abstractmethod 01341 def check_executable(self): 01342 """Returns True if the executable (armcc) location specified by the 01343 user exists OR the executable can be found on the PATH. 01344 Returns False otherwise.""" 01345 raise NotImplemented 01346 01347 @abstractmethod 01348 def get_config_option(self, config_header): 01349 """Generate the compiler option that forces the inclusion of the configuration 01350 header file. 01351 01352 Positional arguments: 01353 config_header -- The configuration header that will be included within all source files 01354 01355 Return value: 01356 A list of the command line arguments that will force the inclusion the specified header 01357 01358 Side effects: 01359 None 01360 """ 01361 raise NotImplemented 01362 01363 @abstractmethod 01364 def get_compile_options(self, defines, includes, for_asm=False): 01365 """Generate the compiler options from the defines and includes 01366 01367 Positional arguments: 01368 defines -- The preprocessor macros defined on the command line 01369 includes -- The include file search paths 01370 01371 Keyword arguments: 01372 for_asm -- generate the assembler options instead of the compiler options 01373 01374 Return value: 01375 A list of the command line arguments that will force the inclusion the specified header 01376 01377 Side effects: 01378 None 01379 """ 01380 raise NotImplemented 01381 01382 @abstractmethod 01383 def assemble(self, source, object, includes): 01384 """Generate the command line that assembles. 01385 01386 Positional arguments: 01387 source -- a file path that is the file to assemble 01388 object -- a file path that is the destination object 01389 includes -- a list of all directories where header files may be found 01390 01391 Return value: 01392 The complete command line, as a list, that would invoke the assembler 01393 on the source file, include all the include paths, and generate 01394 the specified object file. 01395 01396 Side effects: 01397 None 01398 01399 Note: 01400 This method should be decorated with @hook_tool. 01401 """ 01402 raise NotImplemented 01403 01404 @abstractmethod 01405 def compile_c(self, source, object, includes): 01406 """Generate the command line that compiles a C source file. 01407 01408 Positional arguments: 01409 source -- the C source file to compile 01410 object -- the destination object file 01411 includes -- a list of all the directories where header files may be found 01412 01413 Return value: 01414 The complete command line, as a list, that would invoke the C compiler 01415 on the source file, include all the include paths, and generate the 01416 specified object file. 01417 01418 Side effects: 01419 None 01420 01421 Note: 01422 This method should be decorated with @hook_tool. 01423 """ 01424 raise NotImplemented 01425 01426 @abstractmethod 01427 def compile_cpp(self, source, object, includes): 01428 """Generate the command line that compiles a C++ source file. 01429 01430 Positional arguments: 01431 source -- the C++ source file to compile 01432 object -- the destination object file 01433 includes -- a list of all the directories where header files may be found 01434 01435 Return value: 01436 The complete command line, as a list, that would invoke the C++ compiler 01437 on the source file, include all the include paths, and generate the 01438 specified object file. 01439 01440 Side effects: 01441 None 01442 01443 Note: 01444 This method should be decorated with @hook_tool. 01445 """ 01446 raise NotImplemented 01447 01448 @abstractmethod 01449 def link(self, output, objects, libraries, lib_dirs, mem_map): 01450 """Run the linker to create an executable and memory map. 01451 01452 Positional arguments: 01453 output -- the file name to place the executable in 01454 objects -- all of the object files to link 01455 libraries -- all of the required libraries 01456 lib_dirs -- where the required libraries are located 01457 mem_map -- the location where the memory map file should be stored 01458 01459 Return value: 01460 None 01461 01462 Side effect: 01463 Runs the linker to produce the executable. 01464 01465 Note: 01466 This method should be decorated with @hook_tool. 01467 """ 01468 raise NotImplemented 01469 01470 @abstractmethod 01471 def archive(self, objects, lib_path): 01472 """Run the command line that creates an archive. 01473 01474 Positional arguhments: 01475 objects -- a list of all the object files that should be archived 01476 lib_path -- the file name of the resulting library file 01477 01478 Return value: 01479 None 01480 01481 Side effect: 01482 Runs the archiving tool to produce the library file. 01483 01484 Note: 01485 This method should be decorated with @hook_tool. 01486 """ 01487 raise NotImplemented 01488 01489 @abstractmethod 01490 def binary(self, resources, elf, bin): 01491 """Run the command line that will Extract a simplified binary file. 01492 01493 Positional arguments: 01494 resources -- A resources object (Is not used in any of the toolchains) 01495 elf -- the executable file that is to be converted 01496 bin -- the file name of the to be created simplified binary file 01497 01498 Return value: 01499 None 01500 01501 Side effect: 01502 Runs the elf2bin tool to produce the simplified binary file. 01503 01504 Note: 01505 This method should be decorated with @hook_tool. 01506 """ 01507 raise NotImplemented 01508 01509 @staticmethod 01510 @abstractmethod 01511 def name_mangle(name): 01512 """Mangle a name based on the conventional name mangling of this toolchain 01513 01514 Positional arguments: 01515 name -- the name to mangle 01516 01517 Return: 01518 the mangled name as a string 01519 """ 01520 raise NotImplemented 01521 01522 @staticmethod 01523 @abstractmethod 01524 def make_ld_define(name, value): 01525 """Create an argument to the linker that would define a symbol 01526 01527 Positional arguments: 01528 name -- the symbol to define 01529 value -- the value to give the symbol 01530 01531 Return: 01532 The linker flag as a string 01533 """ 01534 raise NotImplemented 01535 01536 @staticmethod 01537 @abstractmethod 01538 def redirect_symbol(source, sync, build_dir): 01539 """Redirect a symbol at link time to point at somewhere else 01540 01541 Positional arguments: 01542 source -- the symbol doing the pointing 01543 sync -- the symbol being pointed to 01544 build_dir -- the directory to put "response files" if needed by the toolchain 01545 01546 Side Effects: 01547 Possibly create a file in the build directory 01548 01549 Return: 01550 The linker flag to redirect the symbol, as a string 01551 """ 01552 raise NotImplemented 01553 01554 # Return the list of macros geenrated by the build system 01555 def get_config_macros(self): 01556 return self.config.config_to_macros(self.config_data) if self.config_data else [] 01557 01558 @property 01559 def report(self): 01560 to_ret = {} 01561 to_ret['c_compiler'] = {'flags': copy(self.flags['c']), 01562 'symbols': self.get_symbols()} 01563 to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']), 01564 'symbols': self.get_symbols()} 01565 to_ret['assembler'] = {'flags': copy(self.flags['asm']), 01566 'symbols': self.get_symbols(True)} 01567 to_ret['linker'] = {'flags': copy(self.flags['ld'])} 01568 to_ret.update(self.config.report) 01569 return to_ret 01570 01571 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH 01572 01573 TOOLCHAIN_PATHS = { 01574 'ARM': ARM_PATH, 01575 'uARM': ARM_PATH, 01576 'ARMC6': ARMC6_PATH, 01577 'GCC_ARM': GCC_ARM_PATH, 01578 'IAR': IAR_PATH 01579 } 01580 01581 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6 01582 from tools.toolchains.gcc import GCC_ARM 01583 from tools.toolchains.iar import IAR 01584 01585 TOOLCHAIN_CLASSES = { 01586 'ARM': ARM_STD, 01587 'uARM': ARM_MICRO, 01588 'ARMC6': ARMC6, 01589 'GCC_ARM': GCC_ARM, 01590 'IAR': IAR 01591 } 01592 01593 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
Generated on Sun Jul 17 2022 08:25:18 by 1.7.2