Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
__init__.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2013 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 from __future__ import print_function, division, absolute_import 00018 00019 import re 00020 import sys 00021 from os import stat, walk, getcwd, sep, remove 00022 from copy import copy 00023 from time import time, sleep 00024 from shutil import copyfile 00025 from os.path import (join, splitext, exists, relpath, dirname, basename, split, 00026 abspath, isfile, isdir, normcase) 00027 from itertools import chain 00028 from inspect import getmro 00029 from copy import deepcopy 00030 from abc import ABCMeta, abstractmethod 00031 from distutils.spawn import find_executable 00032 from multiprocessing import Pool, cpu_count 00033 from hashlib import md5 00034 import fnmatch 00035 00036 from ..utils import (run_cmd, mkdir, rel_path, ToolException, 00037 NotSupportedException, split_path, compile_worker) 00038 from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK 00039 from .. import hooks 00040 from ..notifier.term import TerminalNotifier 00041 from ..memap import MemapParser 00042 00043 00044 #Disables multiprocessing if set to higher number than the host machine CPUs 00045 CPU_COUNT_MIN = 1 00046 CPU_COEF = 1 00047 00048 class LazyDict(object): 00049 def __init__(self): 00050 self.eager = {} 00051 self.lazy = {} 00052 00053 def add_lazy(self, key, thunk): 00054 if key in self.eager: 00055 del self.eager[key] 00056 self.lazy[key] = thunk 00057 00058 def __getitem__(self, key): 00059 if (key not in self.eager 00060 and key in self.lazy): 00061 self.eager[key] = self.lazy[key]() 00062 del self.lazy[key] 00063 return self.eager[key] 00064 00065 def __setitem__(self, key, value): 00066 self.eager[key] = value 00067 00068 def __delitem__(self, key): 00069 if key in self.eager: 00070 del self.eager[key] 00071 else: 00072 del self.lazy[key] 00073 00074 def __contains__(self, key): 00075 return key in self.eager or key in self.lazy 00076 00077 def __iter__(self): 00078 return chain(iter(self.eager), iter(self.lazy)) 00079 00080 def __len__(self): 00081 return len(self.eager) + len(self.lazy) 00082 00083 def __str__(self): 00084 return "Lazy{%s}" % ( 00085 ", ".join("%r: %r" % (k, v) for k, v in 00086 chain(self.eager.items(), ((k, "not evaluated") 00087 for k in self.lazy)))) 00088 00089 def update(self, other): 00090 if isinstance(other, LazyDict): 00091 self.eager.update(other.eager) 00092 self.lazy.update(other.lazy) 00093 else: 00094 self.eager.update(other) 00095 00096 def items(self): 00097 """Warning: This forces the evaluation all of the items in this LazyDict 00098 that are iterated over.""" 00099 for k, v in self.eager.items(): 00100 yield k, v 00101 for k in self.lazy.keys(): 00102 yield k, self[k] 00103 00104 def apply(self, fn): 00105 """Delay the application of a computation to all items of the lazy dict. 00106 Does no computation now. Instead the comuptation is performed when a 00107 consumer attempts to access a value in this LazyDict""" 00108 new_lazy = {} 00109 for k, f in self.lazy.items(): 00110 def closure(f=f): 00111 return fn(f()) 00112 new_lazy[k] = closure 00113 for k, v in self.eager.items(): 00114 def closure(v=v): 00115 return fn(v) 00116 new_lazy[k] = closure 00117 self.lazy = new_lazy 00118 self.eager = {} 00119 00120 class Resources: 00121 def __init__(self, base_path=None, collect_ignores=False): 00122 self.base_path = base_path 00123 self.collect_ignores = collect_ignores 00124 00125 self.file_basepath = {} 00126 00127 self.inc_dirs = [] 00128 self.headers = [] 00129 00130 self.s_sources = [] 00131 self.c_sources = [] 00132 self.cpp_sources = [] 00133 00134 self.lib_dirs = set([]) 00135 self.objects = [] 00136 self.libraries = [] 00137 00138 # mbed special files 00139 self.lib_builds = [] 00140 self.lib_refs = [] 00141 00142 self.repo_dirs = [] 00143 self.repo_files = [] 00144 00145 self.linker_script = None 00146 00147 # Other files 00148 self.hex_files = [] 00149 self.bin_files = [] 00150 self.json_files = [] 00151 00152 # Features 00153 self.features = LazyDict() 00154 self.ignored_dirs = [] 00155 00156 def __add__(self, resources): 00157 if resources is None: 00158 return self 00159 else: 00160 return self.add(resources) 00161 00162 def __radd__(self, resources): 00163 if resources is None: 00164 return self 00165 else: 00166 return self.add(resources) 00167 00168 def ignore_dir(self, directory): 00169 if self.collect_ignores: 00170 self.ignored_dirs.append(directory) 00171 00172 def add(self, resources): 00173 self.file_basepath.update(resources.file_basepath) 00174 00175 self.inc_dirs += resources.inc_dirs 00176 self.headers += resources.headers 00177 00178 self.s_sources += resources.s_sources 00179 self.c_sources += resources.c_sources 00180 self.cpp_sources += resources.cpp_sources 00181 00182 self.lib_dirs |= resources.lib_dirs 00183 self.objects += resources.objects 00184 self.libraries += resources.libraries 00185 00186 self.lib_builds += resources.lib_builds 00187 self.lib_refs += resources.lib_refs 00188 00189 self.repo_dirs += resources.repo_dirs 00190 self.repo_files += resources.repo_files 00191 00192 if resources.linker_script is not None: 00193 self.linker_script = resources.linker_script 00194 00195 self.hex_files += resources.hex_files 00196 self.bin_files += resources.bin_files 00197 self.json_files += resources.json_files 00198 00199 self.features.update(resources.features) 00200 self.ignored_dirs += resources.ignored_dirs 00201 00202 return self 00203 00204 def rewrite_basepath(self, file_name, export_path, loc): 00205 """ Replace the basepath of filename with export_path 00206 00207 Positional arguments: 00208 file_name - the absolute path to a file 00209 export_path - the final destination of the file after export 00210 """ 00211 new_f = join(loc, relpath(file_name, self.file_basepath[file_name])) 00212 self.file_basepath[new_f] = export_path 00213 return new_f 00214 00215 def subtract_basepath(self, export_path, loc=""): 00216 """ Rewrite all of the basepaths with the export_path 00217 00218 Positional arguments: 00219 export_path - the final destination of the resources with respect to the 00220 generated project files 00221 """ 00222 keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', 00223 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script', 00224 'lib_dirs'] 00225 for key in keys: 00226 vals = getattr(self, key) 00227 if isinstance(vals, set): 00228 vals = list(vals) 00229 if isinstance(vals, list): 00230 new_vals = [] 00231 for val in vals: 00232 new_vals.append(self.rewrite_basepath( 00233 val, export_path, loc)) 00234 if isinstance(getattr(self, key), set): 00235 setattr(self, key, set(new_vals)) 00236 else: 00237 setattr(self, key, new_vals) 00238 elif vals: 00239 setattr(self, key, self.rewrite_basepath( 00240 vals, export_path, loc)) 00241 def closure(res, export_path=export_path, loc=loc): 00242 res.subtract_basepath(export_path, loc) 00243 return res 00244 self.features.apply(closure) 00245 00246 def _collect_duplicates(self, dupe_dict, dupe_headers): 00247 for filename in self.s_sources + self.c_sources + self.cpp_sources: 00248 objname, _ = splitext(basename(filename)) 00249 dupe_dict.setdefault(objname, set()) 00250 dupe_dict[objname] |= set([filename]) 00251 for filename in self.headers: 00252 headername = basename(filename) 00253 dupe_headers.setdefault(headername, set()) 00254 dupe_headers[headername] |= set([headername]) 00255 return dupe_dict, dupe_headers 00256 00257 def detect_duplicates(self, toolchain): 00258 """Detect all potential ambiguities in filenames and report them with 00259 a toolchain notification 00260 00261 Positional Arguments: 00262 toolchain - used for notifications 00263 """ 00264 count = 0 00265 dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict()) 00266 for objname, filenames in dupe_dict.items(): 00267 if len(filenames) > 1: 00268 count+=1 00269 toolchain.notify.tool_error( 00270 "Object file %s.o is not unique! It could be made from: %s"\ 00271 % (objname, " ".join(filenames))) 00272 for headername, locations in dupe_headers.items(): 00273 if len(locations) > 1: 00274 count+=1 00275 toolchain.notify.tool_error( 00276 "Header file %s is not unique! It could be: %s" %\ 00277 (headername, " ".join(locations))) 00278 return count 00279 00280 00281 def relative_to(self, base, dot=False): 00282 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00283 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00284 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00285 'hex_files', 'bin_files', 'json_files']: 00286 v = [rel_path(f, base, dot) for f in getattr(self, field)] 00287 setattr(self, field, v) 00288 00289 def to_apply(feature, base=base, dot=dot): 00290 feature.relative_to(base, dot) 00291 self.features.apply(to_apply) 00292 00293 if self.linker_script is not None: 00294 self.linker_script = rel_path(self.linker_script, base, dot) 00295 00296 def win_to_unix(self): 00297 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00298 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00299 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00300 'hex_files', 'bin_files', 'json_files']: 00301 v = [f.replace('\\', '/') for f in getattr(self, field)] 00302 setattr(self, field, v) 00303 00304 def to_apply(feature): 00305 feature.win_to_unix() 00306 self.features.apply(to_apply) 00307 00308 if self.linker_script is not None: 00309 self.linker_script = self.linker_script.replace('\\', '/') 00310 00311 def __str__(self): 00312 s = [] 00313 00314 for (label, resources) in ( 00315 ('Include Directories', self.inc_dirs), 00316 ('Headers', self.headers), 00317 00318 ('Assembly sources', self.s_sources), 00319 ('C sources', self.c_sources), 00320 ('C++ sources', self.cpp_sources), 00321 00322 ('Library directories', self.lib_dirs), 00323 ('Objects', self.objects), 00324 ('Libraries', self.libraries), 00325 00326 ('Hex files', self.hex_files), 00327 ('Bin files', self.bin_files), 00328 00329 ('Features', self.features), 00330 ): 00331 if resources: 00332 s.append('%s:\n ' % label + '\n '.join(resources)) 00333 00334 if self.linker_script: 00335 s.append('Linker Script: ' + self.linker_script) 00336 00337 return '\n'.join(s) 00338 00339 # Support legacy build conventions: the original mbed build system did not have 00340 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but 00341 # had the knowledge of a list of these directories to be ignored. 00342 LEGACY_IGNORE_DIRS = set([ 00343 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', 00344 'ARM', 'uARM', 'IAR', 00345 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', 00346 'ARMC6' 00347 ]) 00348 LEGACY_TOOLCHAIN_NAMES = { 00349 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', 00350 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 00351 'IAR': 'IAR', 00352 'ARMC6': 'ARMC6', 00353 } 00354 00355 00356 class mbedToolchain: 00357 # Verbose logging 00358 VERBOSE = True 00359 00360 # Compile C files as CPP 00361 COMPILE_C_AS_CPP = False 00362 00363 # Response files for compiling, includes, linking and archiving. 00364 # Not needed on posix systems where the typical arg limit is 2 megabytes 00365 RESPONSE_FILES = True 00366 00367 CORTEX_SYMBOLS = { 00368 "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00369 "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00370 "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00371 "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00372 "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00373 "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00374 "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00375 "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00376 "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00377 "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], 00378 "Cortex-M23-NS": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00379 "Cortex-M23": ["__CORTEX_M23", "ARM_MATH_ARMV8MBL", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00380 "Cortex-M33-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "DOMAIN_NS=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00381 "Cortex-M33": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00382 "Cortex-M33F-NS": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "DOMAIN_NS=1", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00383 "Cortex-M33F": ["__CORTEX_M33", "ARM_MATH_ARMV8MML", "__FPU_PRESENT", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00384 } 00385 00386 MBED_CONFIG_FILE_NAME="mbed_config.h" 00387 00388 PROFILE_FILE_NAME = ".profile" 00389 00390 __metaclass__ = ABCMeta 00391 00392 profile_template = {'common':[], 'c':[], 'cxx':[], 'asm':[], 'ld':[]} 00393 00394 def __init__(self, target, notify=None, macros=None, build_profile=None, 00395 build_dir=None): 00396 self.target = target 00397 self.name = self.__class__.__name__ 00398 00399 # compile/assemble/link/binary hooks 00400 self.hook = hooks.Hook(target, self) 00401 00402 # Toolchain flags 00403 self.flags = deepcopy(build_profile or self.profile_template) 00404 00405 # System libraries provided by the toolchain 00406 self.sys_libs = [] 00407 00408 # User-defined macros 00409 self.macros = macros or [] 00410 00411 # Macros generated from toolchain and target rules/features 00412 self.asm_symbols = None 00413 self.cxx_symbols = None 00414 00415 # Labels generated from toolchain and target rules/features (used for selective build) 00416 self.labels = None 00417 00418 # This will hold the initialized config object 00419 self.config = None 00420 00421 # This will hold the configuration data (as returned by Config.get_config_data()) 00422 self.config_data = None 00423 00424 # This will hold the location of the configuration file or None if there's no configuration available 00425 self.config_file = None 00426 00427 # Call guard for "get_config_data" (see the comments of get_config_data for details) 00428 self.config_processed = False 00429 00430 # Non-incremental compile 00431 self.build_all = False 00432 00433 # Build output dir 00434 self.build_dir = abspath(build_dir) if PRINT_COMPILER_OUTPUT_AS_LINK else build_dir 00435 self.timestamp = time() 00436 00437 # Number of concurrent build jobs. 0 means auto (based on host system cores) 00438 self.jobs = 0 00439 00440 # Ignore patterns from .mbedignore files 00441 self.ignore_patterns = [] 00442 self._ignore_regex = re.compile("$^") 00443 00444 # Pre-mbed 2.0 ignore dirs 00445 self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) 00446 00447 # Output notify function 00448 # This function is passed all events, and expected to handle notification of the 00449 # user, emit the events to a log, etc. 00450 # The API for all notify methods passed into the notify parameter is as follows: 00451 # def notify(Event, Silent) 00452 # Where *Event* is a dict representing the toolchain event that was generated 00453 # e.g.: a compile succeeded, or a warning was emitted by the compiler 00454 # or an application was linked 00455 # *Silent* is a boolean 00456 if notify: 00457 self.notify = notify 00458 else: 00459 self.notify = TerminalNotifier() 00460 00461 # uVisor spepcific rules 00462 if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels: 00463 self.target.core = re.sub(r"F$", '', self.target.core) 00464 00465 # Stats cache is used to reduce the amount of IO requests to stat 00466 # header files during dependency change. See need_update() 00467 self.stat_cache = {} 00468 00469 # Used by the mbed Online Build System to build in chrooted environment 00470 self.CHROOT = None 00471 00472 # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over 00473 self.init() 00474 00475 # Used for post __init__() hooks 00476 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00477 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00478 def init(self): 00479 return True 00480 00481 def get_output(self): 00482 return self.notifier.get_output() 00483 00484 def get_symbols(self, for_asm=False): 00485 if for_asm: 00486 if self.asm_symbols is None: 00487 self.asm_symbols = [] 00488 00489 # Cortex CPU symbols 00490 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00491 self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00492 00493 # Add target's symbols 00494 self.asm_symbols += self.target.macros 00495 # Add extra symbols passed via 'macros' parameter 00496 self.asm_symbols += self.macros 00497 return list(set(self.asm_symbols)) # Return only unique symbols 00498 else: 00499 if self.cxx_symbols is None: 00500 # Target and Toolchain symbols 00501 labels = self.get_labels() 00502 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']] 00503 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']]) 00504 00505 # Cortex CPU symbols 00506 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00507 self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00508 00509 # Symbols defined by the on-line build.system 00510 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1']) 00511 if MBED_ORG_USER: 00512 self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER) 00513 00514 # Add target's symbols 00515 self.cxx_symbols += self.target.macros 00516 # Add target's hardware 00517 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has] 00518 # Add target's features 00519 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features] 00520 # Add extra symbols passed via 'macros' parameter 00521 self.cxx_symbols += self.macros 00522 00523 # Form factor variables 00524 if hasattr(self.target, 'supported_form_factors'): 00525 self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors]) 00526 00527 return list(set(self.cxx_symbols)) # Return only unique symbols 00528 00529 # Extend the internal list of macros 00530 def add_macros(self, new_macros): 00531 self.macros.extend(new_macros) 00532 00533 def get_labels(self): 00534 if self.labels is None: 00535 toolchain_labels = self._get_toolchain_labels() 00536 self.labels = { 00537 'TARGET': self.target.labels, 00538 'FEATURE': self.target.features, 00539 'TOOLCHAIN': toolchain_labels 00540 } 00541 00542 # This is a policy decision and it should /really/ be in the config system 00543 # ATM it's here for backward compatibility 00544 if ((("-g" in self.flags['common'] or "-g3" in self.flags['common']) and 00545 "-O0" in self.flags['common']) or 00546 ("-r" in self.flags['common'] and 00547 "-On" in self.flags['common'])): 00548 self.labels['TARGET'].append("DEBUG") 00549 else: 00550 self.labels['TARGET'].append("RELEASE") 00551 return self.labels 00552 00553 def _get_toolchain_labels(self): 00554 toolchain_labels = [c.__name__ for c in getmro(self.__class__)] 00555 toolchain_labels.remove('mbedToolchain') 00556 toolchain_labels.remove('object') 00557 return toolchain_labels 00558 00559 00560 # Determine whether a source file needs updating/compiling 00561 def need_update(self, target, dependencies): 00562 if self.build_all: 00563 return True 00564 00565 if not exists(target): 00566 return True 00567 00568 target_mod_time = stat(target).st_mtime 00569 00570 for d in dependencies: 00571 # Some objects are not provided with full path and here we do not have 00572 # information about the library paths. Safe option: assume an update 00573 if not d or not exists(d): 00574 return True 00575 00576 if d not in self.stat_cache: 00577 self.stat_cache[d] = stat(d).st_mtime 00578 00579 if self.stat_cache[d] >= target_mod_time: 00580 return True 00581 00582 return False 00583 00584 def is_ignored(self, file_path): 00585 """Check if file path is ignored by any .mbedignore thus far""" 00586 return self._ignore_regex.match(normcase(file_path)) 00587 00588 def add_ignore_patterns(self, root, base_path, patterns): 00589 """Add a series of patterns to the ignored paths 00590 00591 Positional arguments: 00592 root - the directory containing the ignore file 00593 base_path - the location that the scan started from 00594 patterns - the list of patterns we will ignore in the future 00595 """ 00596 real_base = relpath(root, base_path) 00597 if real_base == ".": 00598 self.ignore_patterns.extend(normcase(p) for p in patterns) 00599 else: 00600 self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns) 00601 if self.ignore_patterns: 00602 self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) 00603 00604 # Create a Resources object from the path pointed to by *path* by either traversing a 00605 # a directory structure, when *path* is a directory, or adding *path* to the resources, 00606 # when *path* is a file. 00607 # The parameter *base_path* is used to set the base_path attribute of the Resources 00608 # object and the parameter *exclude_paths* is used by the directory traversal to 00609 # exclude certain paths from the traversal. 00610 def scan_resources(self, path, exclude_paths=None, base_path=None, 00611 collect_ignores=False): 00612 self.progress("scan", path) 00613 00614 resources = Resources(path, collect_ignores=collect_ignores) 00615 if not base_path: 00616 if isfile(path): 00617 base_path = dirname(path) 00618 else: 00619 base_path = path 00620 resources.base_path = base_path 00621 00622 if isfile(path): 00623 self._add_file(path, resources, base_path, exclude_paths=exclude_paths) 00624 else: 00625 self._add_dir(path, resources, base_path, exclude_paths=exclude_paths) 00626 return resources 00627 00628 # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a 00629 # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file 00630 # on every file it considers adding to the resources object. 00631 def _add_dir(self, path, resources, base_path, exclude_paths=None): 00632 """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) 00633 When topdown is True, the caller can modify the dirnames list in-place 00634 (perhaps using del or slice assignment), and walk() will only recurse into 00635 the subdirectories whose names remain in dirnames; this can be used to prune 00636 the search, impose a specific order of visiting, or even to inform walk() 00637 about directories the caller creates or renames before it resumes walk() 00638 again. Modifying dirnames when topdown is False is ineffective, because in 00639 bottom-up mode the directories in dirnames are generated before dirpath 00640 itself is generated. 00641 """ 00642 labels = self.get_labels() 00643 for root, dirs, files in walk(path, followlinks=True): 00644 # Check if folder contains .mbedignore 00645 if ".mbedignore" in files: 00646 with open (join(root,".mbedignore"), "r") as f: 00647 lines=f.readlines() 00648 lines = [l.strip() for l in lines] # Strip whitespaces 00649 lines = [l for l in lines if l != ""] # Strip empty lines 00650 lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines 00651 # Append root path to glob patterns and append patterns to ignore_patterns 00652 self.add_ignore_patterns(root, base_path, lines) 00653 00654 # Skip the whole folder if ignored, e.g. .mbedignore containing '*' 00655 root_path =join(relpath(root, base_path)) 00656 if (self.is_ignored(join(root_path,"")) or 00657 self.build_dir == root_path): 00658 resources.ignore_dir(root_path) 00659 dirs[:] = [] 00660 continue 00661 00662 for d in copy(dirs): 00663 dir_path = join(root, d) 00664 # Add internal repo folders/files. This is needed for exporters 00665 if d == '.hg' or d == '.git': 00666 resources.repo_dirs.append(dir_path) 00667 00668 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or 00669 # Ignore targets that do not match the TARGET in extra_labels list 00670 (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or 00671 # Ignore toolchain that do not match the current TOOLCHAIN 00672 (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or 00673 # Ignore .mbedignore files 00674 self.is_ignored(join(relpath(root, base_path), d,"")) or 00675 # Ignore TESTS dir 00676 (d == 'TESTS')): 00677 resources.ignore_dir(dir_path) 00678 dirs.remove(d) 00679 elif d.startswith('FEATURE_'): 00680 # Recursively scan features but ignore them in the current scan. 00681 # These are dynamically added by the config system if the conditions are matched 00682 def closure (dir_path=dir_path, base_path=base_path): 00683 return self.scan_resources(dir_path, base_path=base_path, 00684 collect_ignores=resources.collect_ignores) 00685 resources.features.add_lazy(d[8:], closure) 00686 resources.ignore_dir(dir_path) 00687 dirs.remove(d) 00688 elif exclude_paths: 00689 for exclude_path in exclude_paths: 00690 rel_path = relpath(dir_path, exclude_path) 00691 if not (rel_path.startswith('..')): 00692 resources.ignore_dir(dir_path) 00693 dirs.remove(d) 00694 break 00695 00696 # Add root to include paths 00697 root = root.rstrip("/") 00698 resources.inc_dirs.append(root) 00699 resources.file_basepath[root] = base_path 00700 00701 for file in files: 00702 file_path = join(root, file) 00703 self._add_file(file_path, resources, base_path) 00704 00705 # A helper function for both scan_resources and _add_dir. _add_file adds one file 00706 # (*file_path*) to the resources object based on the file type. 00707 def _add_file(self, file_path, resources, base_path, exclude_paths=None): 00708 00709 if (self.is_ignored(relpath(file_path, base_path)) or 00710 basename(file_path).startswith(".")): 00711 resources.ignore_dir(relpath(file_path, base_path)) 00712 return 00713 00714 resources.file_basepath[file_path] = base_path 00715 _, ext = splitext(file_path) 00716 ext = ext.lower() 00717 00718 if ext == '.s': 00719 resources.s_sources.append(file_path) 00720 00721 elif ext == '.c': 00722 resources.c_sources.append(file_path) 00723 00724 elif ext == '.cpp': 00725 resources.cpp_sources.append(file_path) 00726 00727 elif ext == '.h' or ext == '.hpp': 00728 resources.headers.append(file_path) 00729 00730 elif ext == '.o': 00731 resources.objects.append(file_path) 00732 00733 elif ext == self.LIBRARY_EXT: 00734 resources.libraries.append(file_path) 00735 resources.lib_dirs.add(dirname(file_path)) 00736 00737 elif ext == self.LINKER_EXT: 00738 if resources.linker_script is not None: 00739 self.notify.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) 00740 resources.linker_script = file_path 00741 00742 elif ext == '.lib': 00743 resources.lib_refs.append(file_path) 00744 00745 elif ext == '.bld': 00746 resources.lib_builds.append(file_path) 00747 00748 elif basename(file_path) == '.hgignore': 00749 resources.repo_files.append(file_path) 00750 00751 elif basename(file_path) == '.gitignore': 00752 resources.repo_files.append(file_path) 00753 00754 elif ext == '.hex': 00755 resources.hex_files.append(file_path) 00756 00757 elif ext == '.bin': 00758 resources.bin_files.append(file_path) 00759 00760 elif ext == '.json': 00761 resources.json_files.append(file_path) 00762 00763 00764 def scan_repository(self, path): 00765 resources = [] 00766 00767 for root, dirs, files in walk(path): 00768 # Remove ignored directories 00769 for d in copy(dirs): 00770 if d == '.' or d == '..': 00771 dirs.remove(d) 00772 00773 for file in files: 00774 file_path = join(root, file) 00775 resources.append(file_path) 00776 00777 return resources 00778 00779 def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): 00780 # Handle a single file 00781 if not isinstance(files_paths, list): 00782 files_paths = [files_paths] 00783 00784 for source in files_paths: 00785 if source is None: 00786 files_paths.remove(source) 00787 00788 for source in files_paths: 00789 if resources is not None and source in resources.file_basepath: 00790 relative_path = relpath(source, resources.file_basepath[source]) 00791 elif rel_path is not None: 00792 relative_path = relpath(source, rel_path) 00793 else: 00794 _, relative_path = split(source) 00795 00796 target = join(trg_path, relative_path) 00797 00798 if (target != source) and (self.need_update(target, [source])): 00799 self.progress("copy", relative_path) 00800 mkdir(dirname(target)) 00801 copyfile(source, target) 00802 00803 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00804 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00805 def relative_object_path(self, build_path, base_dir, source): 00806 source_dir, name, _ = split_path(source) 00807 00808 obj_dir = relpath(join(build_path, relpath(source_dir, base_dir))) 00809 if obj_dir is not self.prev_dir: 00810 self.prev_dir = obj_dir 00811 mkdir(obj_dir) 00812 return join(obj_dir, name + '.o') 00813 00814 # Generate response file for all includes. 00815 # ARM, GCC, IAR cross compatible 00816 def get_inc_file(self, includes): 00817 include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5) 00818 if not exists(include_file): 00819 with open(include_file, "w") as f: 00820 cmd_list = [] 00821 for c in includes: 00822 if c: 00823 c = c.replace("\\", "/") 00824 if self.CHROOT: 00825 c = c.replace(self.CHROOT, '') 00826 cmd_list.append('"-I%s"' % c) 00827 string = " ".join(cmd_list) 00828 f.write(string) 00829 return include_file 00830 00831 # Generate response file for all objects when linking. 00832 # ARM, GCC, IAR cross compatible 00833 def get_link_file(self, cmd): 00834 link_file = join(self.build_dir, ".link_files.txt") 00835 with open(link_file, "w") as f: 00836 cmd_list = [] 00837 for c in cmd: 00838 if c: 00839 c = c.replace("\\", "/") 00840 if self.CHROOT: 00841 c = c.replace(self.CHROOT, '') 00842 cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) 00843 string = " ".join(cmd_list) 00844 f.write(string) 00845 return link_file 00846 00847 # Generate response file for all objects when archiving. 00848 # ARM, GCC, IAR cross compatible 00849 def get_arch_file(self, objects): 00850 archive_file = join(self.build_dir, ".archive_files.txt") 00851 with open(archive_file, "w") as f: 00852 o_list = [] 00853 for o in objects: 00854 o_list.append('"%s"' % o) 00855 string = " ".join(o_list).replace("\\", "/") 00856 f.write(string) 00857 return archive_file 00858 00859 # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM 00860 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00861 def compile_sources(self, resources, inc_dirs=None): 00862 # Web IDE progress bar for project build 00863 files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources 00864 self.to_be_compiled = len(files_to_compile) 00865 self.compiled = 0 00866 00867 self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) 00868 00869 inc_paths = resources.inc_dirs 00870 if inc_dirs is not None: 00871 if isinstance(inc_dirs, list): 00872 inc_paths.extend(inc_dirs) 00873 else: 00874 inc_paths.append(inc_dirs) 00875 # De-duplicate include paths 00876 inc_paths = set(inc_paths) 00877 # Sort include paths for consistency 00878 inc_paths = sorted(set(inc_paths)) 00879 # Unique id of all include paths 00880 self.inc_md5 = md5(' '.join(inc_paths).encode('utf-8')).hexdigest() 00881 00882 objects = [] 00883 queue = [] 00884 work_dir = getcwd() 00885 self.prev_dir = None 00886 00887 # Generate configuration header (this will update self.build_all if needed) 00888 self.get_config_header() 00889 self.dump_build_profile() 00890 00891 # Sort compile queue for consistency 00892 files_to_compile.sort() 00893 for source in files_to_compile: 00894 object = self.relative_object_path( 00895 self.build_dir, resources.file_basepath[source], source) 00896 00897 # Queue mode (multiprocessing) 00898 commands = self.compile_command(source, object, inc_paths) 00899 if commands is not None: 00900 queue.append({ 00901 'source': source, 00902 'object': object, 00903 'commands': commands, 00904 'work_dir': work_dir, 00905 'chroot': self.CHROOT 00906 }) 00907 else: 00908 self.compiled += 1 00909 objects.append(object) 00910 00911 # Use queues/multiprocessing if cpu count is higher than setting 00912 jobs = self.jobs if self.jobs else cpu_count() 00913 if jobs > CPU_COUNT_MIN and len(queue) > jobs: 00914 return self.compile_queue(queue, objects) 00915 else: 00916 return self.compile_seq(queue, objects) 00917 00918 # Compile source files queue in sequential order 00919 def compile_seq(self, queue, objects): 00920 for item in queue: 00921 result = compile_worker(item) 00922 00923 self.compiled += 1 00924 self.progress("compile", item['source'], build_update=True) 00925 for res in result['results']: 00926 self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00927 self.compile_output([ 00928 res['code'], 00929 res['output'], 00930 res['command'] 00931 ]) 00932 objects.append(result['object']) 00933 return objects 00934 00935 # Compile source files queue in parallel by creating pool of worker threads 00936 def compile_queue(self, queue, objects): 00937 jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF) 00938 p = Pool(processes=jobs_count) 00939 00940 results = [] 00941 for i in range(len(queue)): 00942 results.append(p.apply_async(compile_worker, [queue[i]])) 00943 p.close() 00944 00945 itr = 0 00946 while len(results): 00947 itr += 1 00948 if itr > 180000: 00949 p.terminate() 00950 p.join() 00951 raise ToolException("Compile did not finish in 5 minutes") 00952 00953 sleep(0.01) 00954 pending = 0 00955 for r in results: 00956 if r.ready(): 00957 try: 00958 result = r.get() 00959 results.remove(r) 00960 00961 self.compiled += 1 00962 self.progress("compile", result['source'], build_update=True) 00963 for res in result['results']: 00964 self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00965 self.compile_output([ 00966 res['code'], 00967 res['output'], 00968 res['command'] 00969 ]) 00970 objects.append(result['object']) 00971 except ToolException as err: 00972 if p._taskqueue.queue: 00973 p._taskqueue.queue.clear() 00974 sleep(0.5) 00975 p.terminate() 00976 p.join() 00977 raise ToolException(err) 00978 else: 00979 pending += 1 00980 if pending >= jobs_count: 00981 break 00982 00983 results = None 00984 p.join() 00985 00986 return objects 00987 00988 # Determine the compile command based on type of source file 00989 def compile_command(self, source, object, includes): 00990 # Check dependencies 00991 _, ext = splitext(source) 00992 ext = ext.lower() 00993 00994 source = abspath(source) if PRINT_COMPILER_OUTPUT_AS_LINK else source 00995 00996 if ext == '.c' or ext == '.cpp': 00997 base, _ = splitext(object) 00998 dep_path = base + '.d' 00999 try: 01000 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else [] 01001 except (IOError, IndexError): 01002 deps = [] 01003 config_file = ([self.config.app_config_location] 01004 if self.config.app_config_location else []) 01005 deps.extend(config_file) 01006 if ext == '.cpp' or self.COMPILE_C_AS_CPP: 01007 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-cxx")) 01008 else: 01009 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-c")) 01010 if len(deps) == 0 or self.need_update(object, deps): 01011 if ext == '.cpp' or self.COMPILE_C_AS_CPP: 01012 return self.compile_cpp(source, object, includes) 01013 else: 01014 return self.compile_c(source, object, includes) 01015 elif ext == '.s': 01016 deps = [source] 01017 deps.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-asm")) 01018 if self.need_update(object, deps): 01019 return self.assemble(source, object, includes) 01020 else: 01021 return False 01022 01023 return None 01024 01025 def parse_dependencies(self, dep_path): 01026 """Parse the dependency information generated by the compiler. 01027 01028 Positional arguments: 01029 dep_path -- the path to a file generated by a previous run of the compiler 01030 01031 Return value: 01032 A list of all source files that the dependency file indicated were dependencies 01033 01034 Side effects: 01035 None 01036 01037 Note: A default implementation is provided for make-like file formats 01038 """ 01039 dependencies = [] 01040 buff = open(dep_path).readlines() 01041 if buff: 01042 buff[0] = re.sub('^(.*?)\: ', '', buff[0]) 01043 for line in buff: 01044 filename = line.replace('\\\n', '').strip() 01045 if filename: 01046 filename = filename.replace('\\ ', '\a') 01047 dependencies.extend(((self.CHROOT if self.CHROOT else '') + 01048 f.replace('\a', ' ')) 01049 for f in filename.split(" ")) 01050 return list(filter(None, dependencies)) 01051 01052 def is_not_supported_error(self, output): 01053 return "#error directive: [NOT_SUPPORTED]" in output 01054 01055 @abstractmethod 01056 def parse_output(self, output): 01057 """Take in compiler output and extract sinlge line warnings and errors from it. 01058 01059 Positional arguments: 01060 output -- a string of all the messages emitted by a run of the compiler 01061 01062 Return value: 01063 None 01064 01065 Side effects: 01066 call self.cc_info or self.notify with a description of the event generated by the compiler 01067 """ 01068 raise NotImplemented 01069 01070 def compile_output(self, output=[]): 01071 _rc = output[0] 01072 _stderr = output[1].decode("utf-8") 01073 command = output[2] 01074 01075 # Parse output for Warnings and Errors 01076 self.parse_output(_stderr) 01077 self.notify.debug("Return: %s"% _rc) 01078 for error_line in _stderr.splitlines(): 01079 self.notify.debug("Output: %s"% error_line) 01080 01081 # Check return code 01082 if _rc != 0: 01083 if self.is_not_supported_error(_stderr): 01084 raise NotSupportedException(_stderr) 01085 else: 01086 raise ToolException(_stderr) 01087 01088 def build_library(self, objects, dir, name): 01089 needed_update = False 01090 lib = self.STD_LIB_NAME % name 01091 fout = join(dir, lib) 01092 if self.need_update(fout, objects): 01093 self.notify.info("Library: %s" % lib) 01094 self.archive(objects, fout) 01095 needed_update = True 01096 01097 return needed_update 01098 01099 def link_program(self, r, tmp_path, name): 01100 needed_update = False 01101 ext = 'bin' 01102 if hasattr(self.target, 'OUTPUT_EXT'): 01103 ext = self.target.OUTPUT_EXT 01104 01105 if hasattr(self.target, 'OUTPUT_NAMING'): 01106 self.notify.var("binary_naming", self.target.OUTPUT_NAMING) 01107 if self.target.OUTPUT_NAMING == "8.3": 01108 name = name[0:8] 01109 ext = ext[0:3] 01110 01111 # Create destination directory 01112 head, tail = split(name) 01113 new_path = join(tmp_path, head) 01114 mkdir(new_path) 01115 01116 filename = name+'.'+ext 01117 # Absolute path of the final linked file 01118 full_path = join(tmp_path, filename) 01119 elf = join(tmp_path, name + '.elf') 01120 bin = None if ext == 'elf' else full_path 01121 map = join(tmp_path, name + '.map') 01122 01123 r.objects = sorted(set(r.objects)) 01124 config_file = ([self.config.app_config_location] 01125 if self.config.app_config_location else []) 01126 dependencies = r.objects + r.libraries + [r.linker_script] + config_file 01127 dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) 01128 if self.need_update(elf, dependencies): 01129 needed_update = True 01130 self.progress("link", name) 01131 self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) 01132 01133 if bin and self.need_update(bin, [elf]): 01134 needed_update = True 01135 self.progress("elf2bin", name) 01136 self.binary(r, elf, bin) 01137 01138 # Initialize memap and process map file. This doesn't generate output. 01139 self.mem_stats(map) 01140 01141 self.notify.var("compile_succeded", True) 01142 self.notify.var("binary", filename) 01143 01144 return full_path, needed_update 01145 01146 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01147 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01148 def default_cmd(self, command): 01149 _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT) 01150 self.notify.debug("Return: %s"% _rc) 01151 01152 for output_line in _stdout.splitlines(): 01153 self.notify.debug("Output: %s"% output_line) 01154 for error_line in _stderr.splitlines(): 01155 self.notify.debug("Errors: %s"% error_line) 01156 01157 if _rc != 0: 01158 for line in _stderr.splitlines(): 01159 self.notify.tool_error(line) 01160 raise ToolException(_stderr) 01161 01162 def progress(self, action, file, build_update=False): 01163 if build_update: 01164 percent = 100. * float(self.compiled) / float(self.to_be_compiled) 01165 else: 01166 percent = None 01167 self.notify.progress(action, file, percent) 01168 01169 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01170 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01171 def mem_stats(self, map): 01172 """! Creates parser object 01173 @param map Path to linker map file to parse and decode 01174 @return None 01175 """ 01176 toolchain = self.__class__.__name__ 01177 01178 # Create memap object 01179 memap = MemapParser() 01180 01181 # Parse and decode a map file 01182 if memap.parse(abspath(map), toolchain) is False: 01183 self.notify.info("Unknown toolchain for memory statistics %s" % toolchain) 01184 return None 01185 01186 # Store the memap instance for later use 01187 self.memap_instance = memap 01188 01189 # Note: memory statistics are not returned. 01190 # Need call to generate_output later (depends on depth & output format) 01191 01192 return None 01193 01194 def add_regions(self): 01195 """Add regions to the build profile, if there are any. 01196 """ 01197 regions = list(self.config.regions) 01198 self.notify.info("Using regions %s in this build." 01199 % ", ".join(region.name for region in regions)) 01200 for region in regions: 01201 for define in [(region.name.upper() + "_ADDR", region.start), 01202 (region.name.upper() + "_SIZE", region.size)]: 01203 define_string = "-D%s=0x%x" % define 01204 self.cc.append(define_string) 01205 self.cppc.append(define_string) 01206 self.flags["common"].append(define_string) 01207 if region.active: 01208 for define in [("MBED_APP_START", region.start), 01209 ("MBED_APP_SIZE", region.size)]: 01210 define_string = self.make_ld_define(*define) 01211 self.ld.append(define_string) 01212 self.flags["ld"].append(define_string) 01213 self.notify.info(" Region %s: size 0x%x, offset 0x%x" 01214 % (region.name, region.size, region.start)) 01215 01216 # Set the configuration data 01217 def set_config_data(self, config_data): 01218 self.config_data = config_data 01219 if self.config.has_regions: 01220 self.add_regions() 01221 01222 # Creates the configuration header if needed: 01223 # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists). 01224 # - if there is configuration data and "mbed_config.h" does not exist, it is created. 01225 # - if there is configuration data similar to the previous configuration data, 01226 # "mbed_config.h" is left untouched. 01227 # - if there is new configuration data, "mbed_config.h" is overriden. 01228 # The function needs to be called exactly once for the lifetime of this toolchain instance. 01229 # The "config_processed" variable (below) ensures this behaviour. 01230 # The function returns the location of the configuration file, or None if there is no 01231 # configuration data available (and thus no configuration file) 01232 def get_config_header(self): 01233 if self.config_processed: # this function was already called, return its result 01234 return self.config_file 01235 # The config file is located in the build directory 01236 self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME) 01237 # If the file exists, read its current content in prev_data 01238 if exists(self.config_file): 01239 with open(self.config_file, "r") as f: 01240 prev_data = f.read() 01241 else: 01242 prev_data = None 01243 # Get the current configuration data 01244 crt_data = self.config.config_to_header(self.config_data) if self.config_data else None 01245 # "changed" indicates if a configuration change was detected 01246 changed = False 01247 if prev_data is not None: # a previous mbed_config.h exists 01248 if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed 01249 remove(self.config_file) 01250 self.config_file = None # this means "config file not present" 01251 changed = True 01252 elif crt_data != prev_data: # different content of config file 01253 with open(self.config_file, "w") as f: 01254 f.write(crt_data) 01255 changed = True 01256 else: # a previous mbed_config.h does not exist 01257 if crt_data is not None: # there's configuration data available 01258 with open(self.config_file, "w") as f: 01259 f.write(crt_data) 01260 changed = True 01261 else: 01262 self.config_file = None # this means "config file not present" 01263 # If there was a change in configuration, rebuild everything 01264 self.build_all = changed 01265 # Make sure that this function will only return the location of the configuration 01266 # file for subsequent calls, without trying to manipulate its content in any way. 01267 self.config_processed = True 01268 return self.config_file 01269 01270 def dump_build_profile(self): 01271 """Dump the current build profile and macros into the `.profile` file 01272 in the build directory""" 01273 for key in ["cxx", "c", "asm", "ld"]: 01274 to_dump = (str(self.flags[key]) + str(sorted(self.macros))) 01275 if key in ["cxx", "c"]: 01276 to_dump += str(self.flags['common']) 01277 where = join(self.build_dir, self.PROFILE_FILE_NAME + "-" + key) 01278 self._overwrite_when_not_equal(where, to_dump) 01279 01280 @staticmethod 01281 def _overwrite_when_not_equal(filename, content): 01282 if not exists(filename) or content != open(filename).read(): 01283 with open(filename, "w") as out: 01284 out.write(content) 01285 01286 @staticmethod 01287 def generic_check_executable(tool_key, executable_name, levels_up, 01288 nested_dir=None): 01289 """ 01290 Positional args: 01291 tool_key: the key to index TOOLCHAIN_PATHS 01292 executable_name: the toolchain's named executable (ex. armcc) 01293 levels_up: each toolchain joins the toolchain_path, some 01294 variable directories (bin, include), and the executable name, 01295 so the TOOLCHAIN_PATH value must be appropriately distanced 01296 01297 Keyword args: 01298 nested_dir: the directory within TOOLCHAIN_PATHS where the executable 01299 is found (ex: 'bin' for ARM\bin\armcc (necessary to check for path 01300 that will be used by toolchain's compile) 01301 01302 Returns True if the executable location specified by the user 01303 exists and is valid OR the executable can be found on the PATH. 01304 Returns False otherwise. 01305 """ 01306 # Search PATH if user did not specify a path or specified path doesn't 01307 # exist. 01308 if not TOOLCHAIN_PATHS[tool_key] or not exists(TOOLCHAIN_PATHS[tool_key]): 01309 exe = find_executable(executable_name) 01310 if not exe: 01311 return False 01312 for level in range(levels_up): 01313 # move up the specified number of directories 01314 exe = dirname(exe) 01315 TOOLCHAIN_PATHS[tool_key] = exe 01316 if nested_dir: 01317 subdir = join(TOOLCHAIN_PATHS[tool_key], nested_dir, 01318 executable_name) 01319 else: 01320 subdir = join(TOOLCHAIN_PATHS[tool_key],executable_name) 01321 # User could have specified a path that exists but does not contain exe 01322 return exists(subdir) or exists(subdir +'.exe') 01323 01324 @abstractmethod 01325 def check_executable(self): 01326 """Returns True if the executable (armcc) location specified by the 01327 user exists OR the executable can be found on the PATH. 01328 Returns False otherwise.""" 01329 raise NotImplemented 01330 01331 @abstractmethod 01332 def get_config_option(self, config_header): 01333 """Generate the compiler option that forces the inclusion of the configuration 01334 header file. 01335 01336 Positional arguments: 01337 config_header -- The configuration header that will be included within all source files 01338 01339 Return value: 01340 A list of the command line arguments that will force the inclusion the specified header 01341 01342 Side effects: 01343 None 01344 """ 01345 raise NotImplemented 01346 01347 @abstractmethod 01348 def get_compile_options(self, defines, includes, for_asm=False): 01349 """Generate the compiler options from the defines and includes 01350 01351 Positional arguments: 01352 defines -- The preprocessor macros defined on the command line 01353 includes -- The include file search paths 01354 01355 Keyword arguments: 01356 for_asm -- generate the assembler options instead of the compiler options 01357 01358 Return value: 01359 A list of the command line arguments that will force the inclusion the specified header 01360 01361 Side effects: 01362 None 01363 """ 01364 raise NotImplemented 01365 01366 @abstractmethod 01367 def assemble(self, source, object, includes): 01368 """Generate the command line that assembles. 01369 01370 Positional arguments: 01371 source -- a file path that is the file to assemble 01372 object -- a file path that is the destination object 01373 includes -- a list of all directories where header files may be found 01374 01375 Return value: 01376 The complete command line, as a list, that would invoke the assembler 01377 on the source file, include all the include paths, and generate 01378 the specified object file. 01379 01380 Side effects: 01381 None 01382 01383 Note: 01384 This method should be decorated with @hook_tool. 01385 """ 01386 raise NotImplemented 01387 01388 @abstractmethod 01389 def compile_c(self, source, object, includes): 01390 """Generate the command line that compiles a C source file. 01391 01392 Positional arguments: 01393 source -- the C source file to compile 01394 object -- the destination object file 01395 includes -- a list of all the directories where header files may be found 01396 01397 Return value: 01398 The complete command line, as a list, that would invoke the C compiler 01399 on the source file, include all the include paths, and generate the 01400 specified object file. 01401 01402 Side effects: 01403 None 01404 01405 Note: 01406 This method should be decorated with @hook_tool. 01407 """ 01408 raise NotImplemented 01409 01410 @abstractmethod 01411 def compile_cpp(self, source, object, includes): 01412 """Generate the command line that compiles a C++ source file. 01413 01414 Positional arguments: 01415 source -- the C++ source file to compile 01416 object -- the destination object file 01417 includes -- a list of all the directories where header files may be found 01418 01419 Return value: 01420 The complete command line, as a list, that would invoke the C++ compiler 01421 on the source file, include all the include paths, and generate the 01422 specified object file. 01423 01424 Side effects: 01425 None 01426 01427 Note: 01428 This method should be decorated with @hook_tool. 01429 """ 01430 raise NotImplemented 01431 01432 @abstractmethod 01433 def link(self, output, objects, libraries, lib_dirs, mem_map): 01434 """Run the linker to create an executable and memory map. 01435 01436 Positional arguments: 01437 output -- the file name to place the executable in 01438 objects -- all of the object files to link 01439 libraries -- all of the required libraries 01440 lib_dirs -- where the required libraries are located 01441 mem_map -- the location where the memory map file should be stored 01442 01443 Return value: 01444 None 01445 01446 Side effect: 01447 Runs the linker to produce the executable. 01448 01449 Note: 01450 This method should be decorated with @hook_tool. 01451 """ 01452 raise NotImplemented 01453 01454 @abstractmethod 01455 def archive(self, objects, lib_path): 01456 """Run the command line that creates an archive. 01457 01458 Positional arguhments: 01459 objects -- a list of all the object files that should be archived 01460 lib_path -- the file name of the resulting library file 01461 01462 Return value: 01463 None 01464 01465 Side effect: 01466 Runs the archiving tool to produce the library file. 01467 01468 Note: 01469 This method should be decorated with @hook_tool. 01470 """ 01471 raise NotImplemented 01472 01473 @abstractmethod 01474 def binary(self, resources, elf, bin): 01475 """Run the command line that will Extract a simplified binary file. 01476 01477 Positional arguments: 01478 resources -- A resources object (Is not used in any of the toolchains) 01479 elf -- the executable file that is to be converted 01480 bin -- the file name of the to be created simplified binary file 01481 01482 Return value: 01483 None 01484 01485 Side effect: 01486 Runs the elf2bin tool to produce the simplified binary file. 01487 01488 Note: 01489 This method should be decorated with @hook_tool. 01490 """ 01491 raise NotImplemented 01492 01493 @staticmethod 01494 @abstractmethod 01495 def name_mangle(name): 01496 """Mangle a name based on the conventional name mangling of this toolchain 01497 01498 Positional arguments: 01499 name -- the name to mangle 01500 01501 Return: 01502 the mangled name as a string 01503 """ 01504 raise NotImplemented 01505 01506 @staticmethod 01507 @abstractmethod 01508 def make_ld_define(name, value): 01509 """Create an argument to the linker that would define a symbol 01510 01511 Positional arguments: 01512 name -- the symbol to define 01513 value -- the value to give the symbol 01514 01515 Return: 01516 The linker flag as a string 01517 """ 01518 raise NotImplemented 01519 01520 @staticmethod 01521 @abstractmethod 01522 def redirect_symbol(source, sync, build_dir): 01523 """Redirect a symbol at link time to point at somewhere else 01524 01525 Positional arguments: 01526 source -- the symbol doing the pointing 01527 sync -- the symbol being pointed to 01528 build_dir -- the directory to put "response files" if needed by the toolchain 01529 01530 Side Effects: 01531 Possibly create a file in the build directory 01532 01533 Return: 01534 The linker flag to redirect the symbol, as a string 01535 """ 01536 raise NotImplemented 01537 01538 # Return the list of macros geenrated by the build system 01539 def get_config_macros(self): 01540 return self.config.config_to_macros(self.config_data) if self.config_data else [] 01541 01542 @property 01543 def report(self): 01544 to_ret = {} 01545 to_ret['c_compiler'] = {'flags': copy(self.flags['c']), 01546 'symbols': self.get_symbols()} 01547 to_ret['cxx_compiler'] = {'flags': copy(self.flags['cxx']), 01548 'symbols': self.get_symbols()} 01549 to_ret['assembler'] = {'flags': copy(self.flags['asm']), 01550 'symbols': self.get_symbols(True)} 01551 to_ret['linker'] = {'flags': copy(self.flags['ld'])} 01552 to_ret.update(self.config.report) 01553 return to_ret 01554 01555 from tools.settings import ARM_PATH, ARMC6_PATH, GCC_ARM_PATH, IAR_PATH 01556 01557 TOOLCHAIN_PATHS = { 01558 'ARM': ARM_PATH, 01559 'uARM': ARM_PATH, 01560 'ARMC6': ARMC6_PATH, 01561 'GCC_ARM': GCC_ARM_PATH, 01562 'IAR': IAR_PATH 01563 } 01564 01565 from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6 01566 from tools.toolchains.gcc import GCC_ARM 01567 from tools.toolchains.iar import IAR 01568 01569 TOOLCHAIN_CLASSES = { 01570 u'ARM': ARM_STD, 01571 u'uARM': ARM_MICRO, 01572 u'ARMC6': ARMC6, 01573 u'GCC_ARM': GCC_ARM, 01574 u'IAR': IAR 01575 } 01576 01577 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
Generated on Tue Jul 12 2022 12:43:27 by
1.7.2