Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Dependencies: MAX44000 PWM_Tone_Library nexpaq_mdk
Fork of LED_Demo by
__init__.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2013 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import sys 00020 from os import stat, walk, getcwd, sep, remove 00021 from copy import copy 00022 from time import time, sleep 00023 from types import ListType 00024 from shutil import copyfile 00025 from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir 00026 from inspect import getmro 00027 from copy import deepcopy 00028 from tools.config import Config 00029 from abc import ABCMeta, abstractmethod 00030 00031 from multiprocessing import Pool, cpu_count 00032 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker 00033 from tools.settings import BUILD_OPTIONS, MBED_ORG_USER 00034 import tools.hooks as hooks 00035 from tools.memap import MemapParser 00036 from hashlib import md5 00037 import fnmatch 00038 00039 00040 #Disables multiprocessing if set to higher number than the host machine CPUs 00041 CPU_COUNT_MIN = 1 00042 CPU_COEF = 1 00043 00044 class Resources: 00045 def __init__(self, base_path=None): 00046 self.base_path = base_path 00047 00048 self.file_basepath = {} 00049 00050 self.inc_dirs = [] 00051 self.headers = [] 00052 00053 self.s_sources = [] 00054 self.c_sources = [] 00055 self.cpp_sources = [] 00056 00057 self.lib_dirs = set([]) 00058 self.objects = [] 00059 self.libraries = [] 00060 00061 # mbed special files 00062 self.lib_builds = [] 00063 self.lib_refs = [] 00064 00065 self.repo_dirs = [] 00066 self.repo_files = [] 00067 00068 self.linker_script = None 00069 00070 # Other files 00071 self.hex_files = [] 00072 self.bin_files = [] 00073 self.json_files = [] 00074 00075 # Features 00076 self.features = {} 00077 00078 def __add__(self, resources): 00079 if resources is None: 00080 return self 00081 else: 00082 return self.add(resources) 00083 00084 def __radd__(self, resources): 00085 if resources is None: 00086 return self 00087 else: 00088 return self.add(resources) 00089 00090 def add(self, resources): 00091 for f,p in resources.file_basepath.items(): 00092 self.file_basepath[f] = p 00093 00094 self.inc_dirs += resources.inc_dirs 00095 self.headers += resources.headers 00096 00097 self.s_sources += resources.s_sources 00098 self.c_sources += resources.c_sources 00099 self.cpp_sources += resources.cpp_sources 00100 00101 self.lib_dirs |= resources.lib_dirs 00102 self.objects += resources.objects 00103 self.libraries += resources.libraries 00104 00105 self.lib_builds += resources.lib_builds 00106 self.lib_refs += resources.lib_refs 00107 00108 self.repo_dirs += resources.repo_dirs 00109 self.repo_files += resources.repo_files 00110 00111 if resources.linker_script is not None: 00112 self.linker_script = resources.linker_script 00113 00114 self.hex_files += resources.hex_files 00115 self.bin_files += resources.bin_files 00116 self.json_files += resources.json_files 00117 00118 self.features.update(resources.features) 00119 00120 return self 00121 00122 def relative_to(self, base, dot=False): 00123 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00124 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00125 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00126 'hex_files', 'bin_files', 'json_files']: 00127 v = [rel_path(f, base, dot) for f in getattr(self, field)] 00128 setattr(self, field, v) 00129 00130 self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f} 00131 00132 if self.linker_script is not None: 00133 self.linker_script = rel_path(self.linker_script, base, dot) 00134 00135 def win_to_unix(self): 00136 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00137 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00138 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 00139 'hex_files', 'bin_files', 'json_files']: 00140 v = [f.replace('\\', '/') for f in getattr(self, field)] 00141 setattr(self, field, v) 00142 00143 self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f} 00144 00145 if self.linker_script is not None: 00146 self.linker_script = self.linker_script.replace('\\', '/') 00147 00148 def __str__(self): 00149 s = [] 00150 00151 for (label, resources) in ( 00152 ('Include Directories', self.inc_dirs), 00153 ('Headers', self.headers), 00154 00155 ('Assembly sources', self.s_sources), 00156 ('C sources', self.c_sources), 00157 ('C++ sources', self.cpp_sources), 00158 00159 ('Library directories', self.lib_dirs), 00160 ('Objects', self.objects), 00161 ('Libraries', self.libraries), 00162 00163 ('Hex files', self.hex_files), 00164 ('Bin files', self.bin_files), 00165 00166 ('Features', self.features), 00167 ): 00168 if resources: 00169 s.append('%s:\n ' % label + '\n '.join(resources)) 00170 00171 if self.linker_script: 00172 s.append('Linker Script: ' + self.linker_script) 00173 00174 return '\n'.join(s) 00175 00176 # Support legacy build conventions: the original mbed build system did not have 00177 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but 00178 # had the knowledge of a list of these directories to be ignored. 00179 LEGACY_IGNORE_DIRS = set([ 00180 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', 00181 'ARM', 'uARM', 'IAR', 00182 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', 00183 ]) 00184 LEGACY_TOOLCHAIN_NAMES = { 00185 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', 00186 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 00187 'IAR': 'IAR', 00188 } 00189 00190 00191 def check_toolchain_path (function): 00192 """Check if the path to toolchain is valid. Exit if not. 00193 Use this function as a decorator. Causes a system exit if the path does 00194 not exist. Execute the function as normal if the path does exist. 00195 00196 Positional arguments: 00197 function -- the function to decorate 00198 """ 00199 def perform_check(self, *args, **kwargs): 00200 if not exists(self.toolchain_path) and not exists(self.toolchain_path+'.exe'): 00201 error_string = 'Could not find executable for %s.\n Currently ' \ 00202 'set search path: %s'% (self.name, self.toolchain_path) 00203 raise Exception(error_string) 00204 return function(self, *args, **kwargs) 00205 return perform_check 00206 00207 00208 class mbedToolchain: 00209 # Verbose logging 00210 VERBOSE = True 00211 00212 # Compile C files as CPP 00213 COMPILE_C_AS_CPP = False 00214 00215 # Response files for compiling, includes, linking and archiving. 00216 # Not needed on posix systems where the typical arg limit is 2 megabytes 00217 RESPONSE_FILES = True 00218 00219 CORTEX_SYMBOLS = { 00220 "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00221 "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00222 "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00223 "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00224 "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00225 "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00226 "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00227 "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00228 "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00229 "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], 00230 } 00231 00232 MBED_CONFIG_FILE_NAME="mbed_config.h" 00233 00234 __metaclass__ = ABCMeta 00235 00236 def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): 00237 self.target = target 00238 self.name = self.__class__.__name__ 00239 00240 # compile/assemble/link/binary hooks 00241 self.hook = hooks.Hook(target, self) 00242 00243 # Toolchain flags 00244 self.flags = deepcopy(self.DEFAULT_FLAGS) 00245 00246 # User-defined macros 00247 self.macros = macros or [] 00248 00249 # Macros generated from toolchain and target rules/features 00250 self.asm_symbols = None 00251 self.cxx_symbols = None 00252 00253 # Labels generated from toolchain and target rules/features (used for selective build) 00254 self.labels = None 00255 00256 # This will hold the initialized config object 00257 self.config = None 00258 00259 # This will hold the configuration data (as returned by Config.get_config_data()) 00260 self.config_data = None 00261 00262 # This will hold the location of the configuration file or None if there's no configuration available 00263 self.config_file = None 00264 00265 # Call guard for "get_config_data" (see the comments of get_config_data for details) 00266 self.config_processed = False 00267 00268 # Non-incremental compile 00269 self.build_all = False 00270 00271 # Build output dir 00272 self.build_dir = None 00273 self.timestamp = time() 00274 00275 # Output build naming based on target+toolchain combo (mbed 2.0 builds) 00276 self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name) 00277 00278 # Number of concurrent build jobs. 0 means auto (based on host system cores) 00279 self.jobs = 0 00280 00281 # Ignore patterns from .mbedignore files 00282 self.ignore_patterns = [] 00283 00284 # Pre-mbed 2.0 ignore dirs 00285 self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) 00286 00287 # Output notify function 00288 # This function is passed all events, and expected to handle notification of the 00289 # user, emit the events to a log, etc. 00290 # The API for all notify methods passed into the notify parameter is as follows: 00291 # def notify(Event, Silent) 00292 # Where *Event* is a dict representing the toolchain event that was generated 00293 # e.g.: a compile succeeded, or a warning was emitted by the compiler 00294 # or an application was linked 00295 # *Silent* is a boolean 00296 if notify: 00297 self.notify_fun = notify 00298 elif extra_verbose: 00299 self.notify_fun = self.print_notify_verbose 00300 else: 00301 self.notify_fun = self.print_notify 00302 00303 # Silent builds (no output) 00304 self.silent = silent 00305 00306 # Print output buffer 00307 self.output = str() 00308 self.map_outputs = list() # Place to store memmap scan results in JSON like data structures 00309 00310 # Build options passed by -o flag 00311 self.options = options if options is not None else [] 00312 00313 # Build options passed by settings.py or mbed_settings.py 00314 self.options.extend(BUILD_OPTIONS) 00315 00316 if self.options: 00317 self.info("Build Options: %s" % (', '.join(self.options))) 00318 00319 # uVisor spepcific rules 00320 if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels: 00321 self.target.core = re.sub(r"F$", '', self.target.core) 00322 00323 # Stats cache is used to reduce the amount of IO requests to stat 00324 # header files during dependency change. See need_update() 00325 self.stat_cache = {} 00326 00327 # Used by the mbed Online Build System to build in chrooted environment 00328 self.CHROOT = None 00329 00330 # Call post __init__() hooks before the ARM/GCC_ARM/IAR toolchain __init__() takes over 00331 self.init() 00332 00333 # Used for post __init__() hooks 00334 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00335 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00336 def init(self): 00337 return True 00338 00339 def get_output(self): 00340 return self.output 00341 00342 def print_notify(self, event, silent=False): 00343 """ Default command line notification 00344 """ 00345 msg = None 00346 00347 if not self.VERBOSE and event['type'] == 'tool_error': 00348 msg = event['message'] 00349 00350 elif event['type'] in ['info', 'debug']: 00351 msg = event['message'] 00352 00353 elif event['type'] == 'cc': 00354 event['severity'] = event['severity'].title() 00355 event['file'] = basename(event['file']) 00356 msg = '[%(severity)s] %(file)s@%(line)s,%(col)s: %(message)s' % event 00357 00358 elif event['type'] == 'progress': 00359 if not silent: 00360 msg = '%s: %s' % (event['action'].title(), basename(event['file'])) 00361 00362 if msg: 00363 print msg 00364 self.output += msg + "\n" 00365 00366 def print_notify_verbose(self, event, silent=False): 00367 """ Default command line notification with more verbose mode 00368 """ 00369 if event['type'] in ['info', 'debug']: 00370 self.print_notify(event) # standard handle 00371 00372 elif event['type'] == 'cc': 00373 event['severity'] = event['severity'].title() 00374 event['file'] = basename(event['file']) 00375 event['mcu_name'] = "None" 00376 event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown" 00377 event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown" 00378 msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event 00379 print msg 00380 self.output += msg + "\n" 00381 00382 elif event['type'] == 'progress': 00383 self.print_notify(event) # standard handle 00384 00385 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00386 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00387 def notify(self, event): 00388 """ Little closure for notify functions 00389 """ 00390 event['toolchain'] = self 00391 return self.notify_fun(event, self.silent) 00392 00393 def get_symbols(self, for_asm=False): 00394 if for_asm: 00395 if self.asm_symbols is None: 00396 self.asm_symbols = [] 00397 00398 # Cortex CPU symbols 00399 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00400 self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00401 00402 # Add target's symbols 00403 self.asm_symbols += self.target.macros 00404 # Add extra symbols passed via 'macros' parameter 00405 self.asm_symbols += self.macros 00406 return list(set(self.asm_symbols)) # Return only unique symbols 00407 else: 00408 if self.cxx_symbols is None: 00409 # Target and Toolchain symbols 00410 labels = self.get_labels() 00411 self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']] 00412 self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']]) 00413 00414 # Cortex CPU symbols 00415 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00416 self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00417 00418 # Symbols defined by the on-line build.system 00419 self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1']) 00420 if MBED_ORG_USER: 00421 self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER) 00422 00423 # Add target's symbols 00424 self.cxx_symbols += self.target.macros 00425 # Add target's hardware 00426 self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has] 00427 # Add target's features 00428 self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features] 00429 # Add extra symbols passed via 'macros' parameter 00430 self.cxx_symbols += self.macros 00431 00432 # Form factor variables 00433 if hasattr(self.target, 'supported_form_factors'): 00434 self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors]) 00435 00436 return list(set(self.cxx_symbols)) # Return only unique symbols 00437 00438 # Extend the internal list of macros 00439 def add_macros(self, new_macros): 00440 self.macros.extend(new_macros) 00441 00442 def get_labels(self): 00443 if self.labels is None: 00444 toolchain_labels = [c.__name__ for c in getmro(self.__class__)] 00445 toolchain_labels.remove('mbedToolchain') 00446 self.labels = { 00447 'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"], 00448 'FEATURE': self.target.features, 00449 'TOOLCHAIN': toolchain_labels 00450 } 00451 return self.labels 00452 00453 00454 # Determine whether a source file needs updating/compiling 00455 def need_update(self, target, dependencies): 00456 if self.build_all: 00457 return True 00458 00459 if not exists(target): 00460 return True 00461 00462 target_mod_time = stat(target).st_mtime 00463 00464 for d in dependencies: 00465 # Some objects are not provided with full path and here we do not have 00466 # information about the library paths. Safe option: assume an update 00467 if not d or not exists(d): 00468 return True 00469 00470 if not self.stat_cache.has_key(d): 00471 self.stat_cache[d] = stat(d).st_mtime 00472 00473 if self.stat_cache[d] >= target_mod_time: 00474 return True 00475 00476 return False 00477 00478 def is_ignored(self, file_path): 00479 for pattern in self.ignore_patterns: 00480 if fnmatch.fnmatch(file_path, pattern): 00481 return True 00482 return False 00483 00484 # Create a Resources object from the path pointed to by *path* by either traversing a 00485 # a directory structure, when *path* is a directory, or adding *path* to the resources, 00486 # when *path* is a file. 00487 # The parameter *base_path* is used to set the base_path attribute of the Resources 00488 # object and the parameter *exclude_paths* is used by the directory traversal to 00489 # exclude certain paths from the traversal. 00490 def scan_resources(self, path, exclude_paths=None, base_path=None): 00491 self.progress("scan", path) 00492 00493 resources = Resources(path) 00494 if not base_path: 00495 if isfile(path): 00496 base_path = dirname(path) 00497 else: 00498 base_path = path 00499 resources.base_path = base_path 00500 00501 if isfile(path): 00502 self._add_file(path, resources, base_path, exclude_paths=exclude_paths) 00503 else: 00504 self._add_dir(path, resources, base_path, exclude_paths=exclude_paths) 00505 return resources 00506 00507 # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a 00508 # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file 00509 # on every file it considers adding to the resources object. 00510 def _add_dir(self, path, resources, base_path, exclude_paths=None): 00511 """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) 00512 When topdown is True, the caller can modify the dirnames list in-place 00513 (perhaps using del or slice assignment), and walk() will only recurse into 00514 the subdirectories whose names remain in dirnames; this can be used to prune 00515 the search, impose a specific order of visiting, or even to inform walk() 00516 about directories the caller creates or renames before it resumes walk() 00517 again. Modifying dirnames when topdown is False is ineffective, because in 00518 bottom-up mode the directories in dirnames are generated before dirpath 00519 itself is generated. 00520 """ 00521 labels = self.get_labels() 00522 for root, dirs, files in walk(path, followlinks=True): 00523 # Check if folder contains .mbedignore 00524 if ".mbedignore" in files: 00525 with open (join(root,".mbedignore"), "r") as f: 00526 lines=f.readlines() 00527 lines = [l.strip() for l in lines] # Strip whitespaces 00528 lines = [l for l in lines if l != ""] # Strip empty lines 00529 lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines 00530 # Append root path to glob patterns and append patterns to ignore_patterns 00531 self.ignore_patterns.extend([join(root,line.strip()) for line in lines]) 00532 00533 # Skip the whole folder if ignored, e.g. .mbedignore containing '*' 00534 if self.is_ignored(join(root,"")): 00535 continue 00536 00537 for d in copy(dirs): 00538 dir_path = join(root, d) 00539 # Add internal repo folders/files. This is needed for exporters 00540 if d == '.hg' or d == '.git': 00541 resources.repo_dirs.append(dir_path) 00542 00543 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or 00544 # Ignore targets that do not match the TARGET in extra_labels list 00545 (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or 00546 # Ignore toolchain that do not match the current TOOLCHAIN 00547 (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or 00548 # Ignore .mbedignore files 00549 self.is_ignored(join(dir_path,"")) or 00550 # Ignore TESTS dir 00551 (d == 'TESTS')): 00552 dirs.remove(d) 00553 elif d.startswith('FEATURE_'): 00554 # Recursively scan features but ignore them in the current scan. 00555 # These are dynamically added by the config system if the conditions are matched 00556 resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path) 00557 dirs.remove(d) 00558 elif exclude_paths: 00559 for exclude_path in exclude_paths: 00560 rel_path = relpath(dir_path, exclude_path) 00561 if not (rel_path.startswith('..')): 00562 dirs.remove(d) 00563 break 00564 00565 # Add root to include paths 00566 resources.inc_dirs.append(root) 00567 resources.file_basepath[root] = base_path 00568 00569 for file in files: 00570 file_path = join(root, file) 00571 self._add_file(file_path, resources, base_path) 00572 00573 # A helper function for both scan_resources and _add_dir. _add_file adds one file 00574 # (*file_path*) to the resources object based on the file type. 00575 def _add_file(self, file_path, resources, base_path, exclude_paths=None): 00576 resources.file_basepath[file_path] = base_path 00577 00578 if self.is_ignored(file_path): 00579 return 00580 00581 _, ext = splitext(file_path) 00582 ext = ext.lower() 00583 00584 if ext == '.s': 00585 resources.s_sources.append(file_path) 00586 00587 elif ext == '.c': 00588 resources.c_sources.append(file_path) 00589 00590 elif ext == '.cpp': 00591 resources.cpp_sources.append(file_path) 00592 00593 elif ext == '.h' or ext == '.hpp': 00594 resources.headers.append(file_path) 00595 00596 elif ext == '.o': 00597 resources.objects.append(file_path) 00598 00599 elif ext == self.LIBRARY_EXT: 00600 resources.libraries.append(file_path) 00601 resources.lib_dirs.add(dirname(file_path)) 00602 00603 elif ext == self.LINKER_EXT: 00604 if resources.linker_script is not None: 00605 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) 00606 resources.linker_script = file_path 00607 00608 elif ext == '.lib': 00609 resources.lib_refs.append(file_path) 00610 00611 elif ext == '.bld': 00612 resources.lib_builds.append(file_path) 00613 00614 elif file == '.hgignore': 00615 resources.repo_files.append(file_path) 00616 00617 elif ext == '.hex': 00618 resources.hex_files.append(file_path) 00619 00620 elif ext == '.bin': 00621 resources.bin_files.append(file_path) 00622 00623 elif ext == '.json': 00624 resources.json_files.append(file_path) 00625 00626 00627 def scan_repository(self, path): 00628 resources = [] 00629 00630 for root, dirs, files in walk(path): 00631 # Remove ignored directories 00632 for d in copy(dirs): 00633 if d == '.' or d == '..': 00634 dirs.remove(d) 00635 00636 for file in files: 00637 file_path = join(root, file) 00638 resources.append(file_path) 00639 00640 return resources 00641 00642 def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): 00643 # Handle a single file 00644 if type(files_paths) != ListType: files_paths = [files_paths] 00645 00646 for source in files_paths: 00647 if source is None: 00648 files_paths.remove(source) 00649 00650 for source in files_paths: 00651 if resources is not None and resources.file_basepath.has_key(source): 00652 relative_path = relpath(source, resources.file_basepath[source]) 00653 elif rel_path is not None: 00654 relative_path = relpath(source, rel_path) 00655 else: 00656 _, relative_path = split(source) 00657 00658 target = join(trg_path, relative_path) 00659 00660 if (target != source) and (self.need_update(target, [source])): 00661 self.progress("copy", relative_path) 00662 mkdir(dirname(target)) 00663 copyfile(source, target) 00664 00665 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00666 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00667 def relative_object_path(self, build_path, base_dir, source): 00668 source_dir, name, _ = split_path(source) 00669 00670 obj_dir = join(build_path, relpath(source_dir, base_dir)) 00671 if obj_dir is not self.prev_dir: 00672 self.prev_dir = obj_dir 00673 mkdir(obj_dir) 00674 return join(obj_dir, name + '.o') 00675 00676 # Generate response file for all includes. 00677 # ARM, GCC, IAR cross compatible 00678 def get_inc_file(self, includes): 00679 include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5) 00680 if not exists(include_file): 00681 with open(include_file, "wb") as f: 00682 cmd_list = [] 00683 for c in includes: 00684 if c: 00685 c = c.replace("\\", "/") 00686 if self.CHROOT: 00687 c = c.replace(self.CHROOT, '') 00688 cmd_list.append('-I%s' % c) 00689 string = " ".join(cmd_list) 00690 f.write(string) 00691 return include_file 00692 00693 # Generate response file for all objects when linking. 00694 # ARM, GCC, IAR cross compatible 00695 def get_link_file(self, cmd): 00696 link_file = join(self.build_dir, ".link_files.txt") 00697 with open(link_file, "wb") as f: 00698 cmd_list = [] 00699 for c in cmd: 00700 if c: 00701 c = c.replace("\\", "/") 00702 if self.CHROOT: 00703 c = c.replace(self.CHROOT, '') 00704 cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) 00705 string = " ".join(cmd_list) 00706 f.write(string) 00707 return link_file 00708 00709 # Generate response file for all objects when archiving. 00710 # ARM, GCC, IAR cross compatible 00711 def get_arch_file(self, objects): 00712 archive_file = join(self.build_dir, ".archive_files.txt") 00713 with open(archive_file, "wb") as f: 00714 o_list = [] 00715 for o in objects: 00716 o_list.append('"%s"' % o) 00717 string = " ".join(o_list).replace("\\", "/") 00718 f.write(string) 00719 return archive_file 00720 00721 # THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM 00722 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00723 @check_toolchain_path 00724 def compile_sources(self, resources, build_path, inc_dirs=None): 00725 # Web IDE progress bar for project build 00726 files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources 00727 self.to_be_compiled = len(files_to_compile) 00728 self.compiled = 0 00729 00730 self.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) 00731 00732 inc_paths = resources.inc_dirs 00733 if inc_dirs is not None: 00734 inc_paths.extend(inc_dirs) 00735 # De-duplicate include paths 00736 inc_paths = set(inc_paths) 00737 # Sort include paths for consistency 00738 inc_paths = sorted(set(inc_paths)) 00739 # Unique id of all include paths 00740 self.inc_md5 = md5(' '.join(inc_paths)).hexdigest() 00741 # Where to store response files 00742 self.build_dir = build_path 00743 00744 objects = [] 00745 queue = [] 00746 work_dir = getcwd() 00747 self.prev_dir = None 00748 00749 # Generate configuration header (this will update self.build_all if needed) 00750 self.get_config_header() 00751 00752 # Sort compile queue for consistency 00753 files_to_compile.sort() 00754 for source in files_to_compile: 00755 object = self.relative_object_path(build_path, resources.file_basepath[source], source) 00756 00757 # Queue mode (multiprocessing) 00758 commands = self.compile_command(source, object, inc_paths) 00759 if commands is not None: 00760 queue.append({ 00761 'source': source, 00762 'object': object, 00763 'commands': commands, 00764 'work_dir': work_dir, 00765 'chroot': self.CHROOT 00766 }) 00767 else: 00768 objects.append(object) 00769 00770 # Use queues/multiprocessing if cpu count is higher than setting 00771 jobs = self.jobs if self.jobs else cpu_count() 00772 if jobs > CPU_COUNT_MIN and len(queue) > jobs: 00773 return self.compile_queue(queue, objects) 00774 else: 00775 return self.compile_seq(queue, objects) 00776 00777 # Compile source files queue in sequential order 00778 def compile_seq(self, queue, objects): 00779 for item in queue: 00780 result = compile_worker(item) 00781 00782 self.compiled += 1 00783 self.progress("compile", item['source'], build_update=True) 00784 for res in result['results']: 00785 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00786 self.compile_output([ 00787 res['code'], 00788 res['output'], 00789 res['command'] 00790 ]) 00791 objects.append(result['object']) 00792 return objects 00793 00794 # Compile source files queue in parallel by creating pool of worker threads 00795 def compile_queue(self, queue, objects): 00796 jobs_count = int(self.jobs if self.jobs else cpu_count() * CPU_COEF) 00797 p = Pool(processes=jobs_count) 00798 00799 results = [] 00800 for i in range(len(queue)): 00801 results.append(p.apply_async(compile_worker, [queue[i]])) 00802 p.close() 00803 00804 itr = 0 00805 while len(results): 00806 itr += 1 00807 if itr > 180000: 00808 p.terminate() 00809 p.join() 00810 raise ToolException("Compile did not finish in 5 minutes") 00811 00812 sleep(0.01) 00813 pending = 0 00814 for r in results: 00815 if r._ready is True: 00816 try: 00817 result = r.get() 00818 results.remove(r) 00819 00820 self.compiled += 1 00821 self.progress("compile", result['source'], build_update=True) 00822 for res in result['results']: 00823 self.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) 00824 self.compile_output([ 00825 res['code'], 00826 res['output'], 00827 res['command'] 00828 ]) 00829 objects.append(result['object']) 00830 except ToolException, err: 00831 if p._taskqueue.queue: 00832 p._taskqueue.queue.clear() 00833 sleep(0.5) 00834 p.terminate() 00835 p.join() 00836 raise ToolException(err) 00837 else: 00838 pending += 1 00839 if pending >= jobs_count: 00840 break 00841 00842 results = None 00843 p.join() 00844 00845 return objects 00846 00847 # Determine the compile command based on type of source file 00848 def compile_command(self, source, object, includes): 00849 # Check dependencies 00850 _, ext = splitext(source) 00851 ext = ext.lower() 00852 00853 if ext == '.c' or ext == '.cpp': 00854 base, _ = splitext(object) 00855 dep_path = base + '.d' 00856 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else [] 00857 if len(deps) == 0 or self.need_update(object, deps): 00858 if ext == '.cpp' or self.COMPILE_C_AS_CPP: 00859 return self.compile_cpp(source, object, includes) 00860 else: 00861 return self.compile_c(source, object, includes) 00862 elif ext == '.s': 00863 deps = [source] 00864 if self.need_update(object, deps): 00865 return self.assemble(source, object, includes) 00866 else: 00867 return False 00868 00869 return None 00870 00871 @abstractmethod 00872 def parse_dependencies(self, dep_path): 00873 """Parse the dependency information generated by the compiler. 00874 00875 Positional arguments: 00876 dep_path -- the path to a file generated by a previous run of the compiler 00877 00878 Return value: 00879 A list of all source files that the dependency file indicated were dependencies 00880 00881 Side effects: 00882 None 00883 """ 00884 raise NotImplemented 00885 00886 def is_not_supported_error(self, output): 00887 return "#error directive: [NOT_SUPPORTED]" in output 00888 00889 @abstractmethod 00890 def parse_output(self, output): 00891 """Take in compiler output and extract sinlge line warnings and errors from it. 00892 00893 Positional arguments: 00894 output -- a string of all the messages emitted by a run of the compiler 00895 00896 Return value: 00897 None 00898 00899 Side effects: 00900 call self.cc_info or self.notify with a description of the event generated by the compiler 00901 """ 00902 raise NotImplemented 00903 00904 def compile_output(self, output=[]): 00905 _rc = output[0] 00906 _stderr = output[1] 00907 command = output[2] 00908 00909 # Parse output for Warnings and Errors 00910 self.parse_output(_stderr) 00911 self.debug("Return: %s"% _rc) 00912 for error_line in _stderr.splitlines(): 00913 self.debug("Output: %s"% error_line) 00914 00915 # Check return code 00916 if _rc != 0: 00917 if self.is_not_supported_error(_stderr): 00918 raise NotSupportedException(_stderr) 00919 else: 00920 raise ToolException(_stderr) 00921 00922 @check_toolchain_path 00923 def build_library(self, objects, dir, name): 00924 needed_update = False 00925 lib = self.STD_LIB_NAME % name 00926 fout = join(dir, lib) 00927 if self.need_update(fout, objects): 00928 self.info("Library: %s" % lib) 00929 self.archive(objects, fout) 00930 needed_update = True 00931 00932 return needed_update 00933 00934 @check_toolchain_path 00935 def link_program(self, r, tmp_path, name): 00936 needed_update = False 00937 ext = 'bin' 00938 if hasattr(self.target, 'OUTPUT_EXT'): 00939 ext = self.target.OUTPUT_EXT 00940 00941 if hasattr(self.target, 'OUTPUT_NAMING'): 00942 self.var("binary_naming", self.target.OUTPUT_NAMING) 00943 if self.target.OUTPUT_NAMING == "8.3": 00944 name = name[0:8] 00945 ext = ext[0:3] 00946 00947 # Create destination directory 00948 head, tail = split(name) 00949 new_path = join(tmp_path, head) 00950 mkdir(new_path) 00951 00952 filename = name+'.'+ext 00953 elf = join(tmp_path, name + '.elf') 00954 bin = join(tmp_path, filename) 00955 map = join(tmp_path, name + '.map') 00956 00957 r.objects = sorted(set(r.objects)) 00958 if self.need_update(elf, r.objects + r.libraries + [r.linker_script]): 00959 needed_update = True 00960 self.progress("link", name) 00961 self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) 00962 00963 if self.need_update(bin, [elf]): 00964 needed_update = True 00965 self.progress("elf2bin", name) 00966 self.binary(r, elf, bin) 00967 00968 self.map_outputs = self.mem_stats(map) 00969 00970 self.var("compile_succeded", True) 00971 self.var("binary", filename) 00972 00973 return bin, needed_update 00974 00975 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00976 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00977 def default_cmd(self, command): 00978 _stdout, _stderr, _rc = run_cmd(command, work_dir=getcwd(), chroot=self.CHROOT) 00979 self.debug("Return: %s"% _rc) 00980 00981 for output_line in _stdout.splitlines(): 00982 self.debug("Output: %s"% output_line) 00983 for error_line in _stderr.splitlines(): 00984 self.debug("Errors: %s"% error_line) 00985 00986 if _rc != 0: 00987 for line in _stderr.splitlines(): 00988 self.tool_error(line) 00989 raise ToolException(_stderr) 00990 00991 ### NOTIFICATIONS ### 00992 def info(self, message): 00993 self.notify({'type': 'info', 'message': message}) 00994 00995 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 00996 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 00997 def debug(self, message): 00998 if self.VERBOSE: 00999 if type(message) is ListType: 01000 message = ' '.join(message) 01001 message = "[DEBUG] " + message 01002 self.notify({'type': 'debug', 'message': message}) 01003 01004 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01005 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01006 def cc_info(self, info=None): 01007 if info is not None: 01008 info['type'] = 'cc' 01009 self.notify(info) 01010 01011 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01012 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01013 def cc_verbose(self, message, file=""): 01014 self.debug(message) 01015 01016 def progress(self, action, file, build_update=False): 01017 msg = {'type': 'progress', 'action': action, 'file': file} 01018 if build_update: 01019 msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled) 01020 self.notify(msg) 01021 01022 def tool_error(self, message): 01023 self.notify({'type': 'tool_error', 'message': message}) 01024 01025 def var(self, key, value): 01026 self.notify({'type': 'var', 'key': key, 'val': value}) 01027 01028 # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM 01029 # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY 01030 def mem_stats(self, map): 01031 """! Creates parser object 01032 @param map Path to linker map file to parse and decode 01033 @return Memory summary structure with memory usage statistics 01034 None if map file can't be opened and processed 01035 """ 01036 toolchain = self.__class__.__name__ 01037 01038 # Create memap object 01039 memap = MemapParser() 01040 01041 # Parse and decode a map file 01042 if memap.parse(abspath(map), toolchain) is False: 01043 self.info("Unknown toolchain for memory statistics %s" % toolchain) 01044 return None 01045 01046 # Write output to stdout in text (pretty table) format 01047 memap.generate_output('table') 01048 01049 # Write output to file in JSON format 01050 map_out = splitext(map)[0] + "_map.json" 01051 memap.generate_output('json', map_out) 01052 01053 # Write output to file in CSV format for the CI 01054 map_csv = splitext(map)[0] + "_map.csv" 01055 memap.generate_output('csv-ci', map_csv) 01056 01057 # Here we return memory statistics structure (constructed after 01058 # call to generate_output) which contains raw data in bytes 01059 # about sections + summary 01060 return memap.mem_summary 01061 01062 # Set the configuration data 01063 def set_config_data(self, config_data): 01064 self.config_data = config_data 01065 01066 # Creates the configuration header if needed: 01067 # - if there is no configuration data, "mbed_config.h" is not create (or deleted if it exists). 01068 # - if there is configuration data and "mbed_config.h" does not exist, it is created. 01069 # - if there is configuration data similar to the previous configuration data, 01070 # "mbed_config.h" is left untouched. 01071 # - if there is new configuration data, "mbed_config.h" is overriden. 01072 # The function needs to be called exactly once for the lifetime of this toolchain instance. 01073 # The "config_processed" variable (below) ensures this behaviour. 01074 # The function returns the location of the configuration file, or None if there is no 01075 # configuration data available (and thus no configuration file) 01076 def get_config_header(self): 01077 if self.config_processed: # this function was already called, return its result 01078 return self.config_file 01079 # The config file is located in the build directory 01080 self.config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME) 01081 # If the file exists, read its current content in prev_data 01082 if exists(self.config_file): 01083 with open(self.config_file, "rt") as f: 01084 prev_data = f.read() 01085 else: 01086 prev_data = None 01087 # Get the current configuration data 01088 crt_data = Config.config_to_header(self.config_data) if self.config_data else None 01089 # "changed" indicates if a configuration change was detected 01090 changed = False 01091 if prev_data is not None: # a previous mbed_config.h exists 01092 if crt_data is None: # no configuration data, so "mbed_config.h" needs to be removed 01093 remove(self.config_file) 01094 self.config_file = None # this means "config file not present" 01095 changed = True 01096 elif crt_data != prev_data: # different content of config file 01097 with open(self.config_file, "wt") as f: 01098 f.write(crt_data) 01099 changed = True 01100 else: # a previous mbed_config.h does not exist 01101 if crt_data is not None: # there's configuration data available 01102 with open(self.config_file, "wt") as f: 01103 f.write(crt_data) 01104 changed = True 01105 else: 01106 self.config_file = None # this means "config file not present" 01107 # If there was a change in configuration, rebuild everything 01108 self.build_all = changed 01109 # Make sure that this function will only return the location of the configuration 01110 # file for subsequent calls, without trying to manipulate its content in any way. 01111 self.config_processed = True 01112 return self.config_file 01113 01114 @abstractmethod 01115 def get_config_option(self, config_header): 01116 """Generate the compiler option that forces the inclusion of the configuration 01117 header file. 01118 01119 Positional arguments: 01120 config_header -- The configuration header that will be included within all source files 01121 01122 Return value: 01123 A list of the command line arguments that will force the inclusion the specified header 01124 01125 Side effects: 01126 None 01127 """ 01128 raise NotImplemented 01129 01130 @abstractmethod 01131 def assemble(self, source, object, includes): 01132 """Generate the command line that assembles. 01133 01134 Positional arguments: 01135 source -- a file path that is the file to assemble 01136 object -- a file path that is the destination object 01137 includes -- a list of all directories where header files may be found 01138 01139 Return value: 01140 The complete command line, as a list, that would invoke the assembler 01141 on the source file, include all the include paths, and generate 01142 the specified object file. 01143 01144 Side effects: 01145 None 01146 01147 Note: 01148 This method should be decorated with @hook_tool. 01149 """ 01150 raise NotImplemented 01151 01152 @abstractmethod 01153 def compile_c(self, source, object, includes): 01154 """Generate the command line that compiles a C source file. 01155 01156 Positional arguments: 01157 source -- the C source file to compile 01158 object -- the destination object file 01159 includes -- a list of all the directories where header files may be found 01160 01161 Return value: 01162 The complete command line, as a list, that would invoke the C compiler 01163 on the source file, include all the include paths, and generate the 01164 specified object file. 01165 01166 Side effects: 01167 None 01168 01169 Note: 01170 This method should be decorated with @hook_tool. 01171 """ 01172 raise NotImplemented 01173 01174 @abstractmethod 01175 def compile_cpp(self, source, object, includes): 01176 """Generate the command line that compiles a C++ source file. 01177 01178 Positional arguments: 01179 source -- the C++ source file to compile 01180 object -- the destination object file 01181 includes -- a list of all the directories where header files may be found 01182 01183 Return value: 01184 The complete command line, as a list, that would invoke the C++ compiler 01185 on the source file, include all the include paths, and generate the 01186 specified object file. 01187 01188 Side effects: 01189 None 01190 01191 Note: 01192 This method should be decorated with @hook_tool. 01193 """ 01194 raise NotImplemented 01195 01196 @abstractmethod 01197 def link(self, output, objects, libraries, lib_dirs, mem_map): 01198 """Run the linker to create an executable and memory map. 01199 01200 Positional arguments: 01201 output -- the file name to place the executable in 01202 objects -- all of the object files to link 01203 libraries -- all of the required libraries 01204 lib_dirs -- where the required libraries are located 01205 mem_map -- the location where the memory map file should be stored 01206 01207 Return value: 01208 None 01209 01210 Side effect: 01211 Runs the linker to produce the executable. 01212 01213 Note: 01214 This method should be decorated with @hook_tool. 01215 """ 01216 raise NotImplemented 01217 01218 @abstractmethod 01219 def archive(self, objects, lib_path): 01220 """Run the command line that creates an archive. 01221 01222 Positional arguhments: 01223 objects -- a list of all the object files that should be archived 01224 lib_path -- the file name of the resulting library file 01225 01226 Return value: 01227 None 01228 01229 Side effect: 01230 Runs the archiving tool to produce the library file. 01231 01232 Note: 01233 This method should be decorated with @hook_tool. 01234 """ 01235 raise NotImplemented 01236 01237 @abstractmethod 01238 def binary(self, resources, elf, bin): 01239 """Run the command line that will Extract a simplified binary file. 01240 01241 Positional arguments: 01242 resources -- A resources object (Is not used in any of the toolchains) 01243 elf -- the executable file that is to be converted 01244 bin -- the file name of the to be created simplified binary file 01245 01246 Return value: 01247 None 01248 01249 Side effect: 01250 Runs the elf2bin tool to produce the simplified binary file. 01251 01252 Note: 01253 This method should be decorated with @hook_tool. 01254 """ 01255 raise NotImplemented 01256 01257 # Return the list of macros geenrated by the build system 01258 def get_config_macros(self): 01259 return Config.config_to_macros(self.config_data) if self.config_data else [] 01260 01261 from tools.settings import ARM_PATH 01262 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH 01263 from tools.settings import IAR_PATH 01264 01265 TOOLCHAIN_PATHS = { 01266 'ARM': ARM_PATH, 01267 'uARM': ARM_PATH, 01268 'GCC_ARM': GCC_ARM_PATH, 01269 'GCC_CR': GCC_CR_PATH, 01270 'IAR': IAR_PATH 01271 } 01272 01273 from tools.toolchains.arm import ARM_STD, ARM_MICRO 01274 from tools.toolchains.gcc import GCC_ARM, GCC_CR 01275 from tools.toolchains.iar import IAR 01276 01277 TOOLCHAIN_CLASSES = { 01278 'ARM': ARM_STD, 01279 'uARM': ARM_MICRO, 01280 'GCC_ARM': GCC_ARM, 01281 'GCC_CR': GCC_CR, 01282 'IAR': IAR 01283 } 01284 01285 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
Generated on Tue Jul 12 2022 11:01:39 by
