Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Fork of mbed-tools by
__init__.py
00001 """ 00002 mbed SDK 00003 Copyright (c) 2011-2013 ARM Limited 00004 00005 Licensed under the Apache License, Version 2.0 (the "License"); 00006 you may not use this file except in compliance with the License. 00007 You may obtain a copy of the License at 00008 00009 http://www.apache.org/licenses/LICENSE-2.0 00010 00011 Unless required by applicable law or agreed to in writing, software 00012 distributed under the License is distributed on an "AS IS" BASIS, 00013 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 00014 See the License for the specific language governing permissions and 00015 limitations under the License. 00016 """ 00017 00018 import re 00019 import sys 00020 from os import stat, walk 00021 from copy import copy 00022 from time import time, sleep 00023 from types import ListType 00024 from shutil import copyfile 00025 from os.path import join, splitext, exists, relpath, dirname, basename, split 00026 from inspect import getmro 00027 00028 from multiprocessing import Pool, cpu_count 00029 from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path 00030 from tools.settings import BUILD_OPTIONS, MBED_ORG_USER 00031 import tools.hooks as hooks 00032 00033 00034 #Disables multiprocessing if set to higher number than the host machine CPUs 00035 CPU_COUNT_MIN = 1 00036 00037 def compile_worker(job): 00038 results = [] 00039 for command in job['commands']: 00040 _, _stderr, _rc = run_cmd(command, job['work_dir']) 00041 results.append({ 00042 'code': _rc, 00043 'output': _stderr, 00044 'command': command 00045 }) 00046 00047 return { 00048 'source': job['source'], 00049 'object': job['object'], 00050 'commands': job['commands'], 00051 'results': results 00052 } 00053 00054 class Resources: 00055 def __init__(self, base_path=None): 00056 self.base_path = base_path 00057 00058 self.inc_dirs = [] 00059 self.headers = [] 00060 00061 self.s_sources = [] 00062 self.c_sources = [] 00063 self.cpp_sources = [] 00064 00065 self.lib_dirs = set([]) 00066 self.objects = [] 00067 self.libraries = [] 00068 00069 # mbed special files 00070 self.lib_builds = [] 00071 self.lib_refs = [] 00072 00073 self.repo_dirs = [] 00074 self.repo_files = [] 00075 00076 self.linker_script = None 00077 00078 # Other files 00079 self.hex_files = [] 00080 self.bin_files = [] 00081 00082 def add(self, resources): 00083 self.inc_dirs += resources.inc_dirs 00084 self.headers += resources.headers 00085 00086 self.s_sources += resources.s_sources 00087 self.c_sources += resources.c_sources 00088 self.cpp_sources += resources.cpp_sources 00089 00090 self.lib_dirs |= resources.lib_dirs 00091 self.objects += resources.objects 00092 self.libraries += resources.libraries 00093 00094 self.lib_builds += resources.lib_builds 00095 self.lib_refs += resources.lib_refs 00096 00097 self.repo_dirs += resources.repo_dirs 00098 self.repo_files += resources.repo_files 00099 00100 if resources.linker_script is not None: 00101 self.linker_script = resources.linker_script 00102 00103 self.hex_files += resources.hex_files 00104 self.bin_files += resources.bin_files 00105 00106 def relative_to(self, base, dot=False): 00107 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00108 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00109 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']: 00110 v = [rel_path(f, base, dot) for f in getattr(self, field)] 00111 setattr(self, field, v) 00112 if self.linker_script is not None: 00113 self.linker_script = rel_path(self.linker_script, base, dot) 00114 00115 def win_to_unix(self): 00116 for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 00117 'cpp_sources', 'lib_dirs', 'objects', 'libraries', 00118 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']: 00119 v = [f.replace('\\', '/') for f in getattr(self, field)] 00120 setattr(self, field, v) 00121 if self.linker_script is not None: 00122 self.linker_script = self.linker_script.replace('\\', '/') 00123 00124 def __str__(self): 00125 s = [] 00126 00127 for (label, resources) in ( 00128 ('Include Directories', self.inc_dirs), 00129 ('Headers', self.headers), 00130 00131 ('Assembly sources', self.s_sources), 00132 ('C sources', self.c_sources), 00133 ('C++ sources', self.cpp_sources), 00134 00135 ('Library directories', self.lib_dirs), 00136 ('Objects', self.objects), 00137 ('Libraries', self.libraries), 00138 00139 ('Hex files', self.hex_files), 00140 ('Bin files', self.bin_files), 00141 ): 00142 if resources: 00143 s.append('%s:\n ' % label + '\n '.join(resources)) 00144 00145 if self.linker_script: 00146 s.append('Linker Script: ' + self.linker_script) 00147 00148 return '\n'.join(s) 00149 00150 00151 # Support legacy build conventions: the original mbed build system did not have 00152 # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but 00153 # had the knowledge of a list of these directories to be ignored. 00154 LEGACY_IGNORE_DIRS = set([ 00155 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', 00156 'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM' 00157 ]) 00158 LEGACY_TOOLCHAIN_NAMES = { 00159 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', 00160 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 00161 'IAR': 'IAR', 00162 } 00163 00164 00165 class mbedToolchain: 00166 VERBOSE = True 00167 00168 CORTEX_SYMBOLS = { 00169 "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00170 "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00171 "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"], 00172 "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00173 "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00174 "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00175 "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00176 "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], 00177 "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], 00178 } 00179 00180 GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%" 00181 GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"') 00182 00183 def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): 00184 self.target = target 00185 self.name = self.__class__.__name__ 00186 self.hook = hooks.Hook(target, self) 00187 self.silent = silent 00188 self.output = "" 00189 00190 self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) 00191 00192 if notify: 00193 self.notify_fun = notify 00194 elif extra_verbose: 00195 self.notify_fun = self.print_notify_verbose 00196 else: 00197 self.notify_fun = self.print_notify 00198 00199 self.options = options if options is not None else [] 00200 00201 self.macros = macros or [] 00202 self.options.extend(BUILD_OPTIONS) 00203 if self.options: 00204 self.info("Build Options: %s" % (', '.join(self.options))) 00205 00206 self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name) 00207 00208 self.symbols = None 00209 self.labels = None 00210 self.has_config = False 00211 00212 self.build_all = False 00213 self.timestamp = time() 00214 self.jobs = 1 00215 00216 self.CHROOT = None 00217 00218 self.mp_pool = None 00219 00220 def get_output(self): 00221 return self.output 00222 00223 def print_notify(self, event, silent=False): 00224 """ Default command line notification 00225 """ 00226 msg = None 00227 00228 if event['type'] in ['info', 'debug']: 00229 msg = event['message'] 00230 00231 elif event['type'] == 'cc': 00232 event['severity'] = event['severity'].title() 00233 event['file'] = basename(event['file']) 00234 msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event 00235 00236 elif event['type'] == 'progress': 00237 if not silent: 00238 msg = '%s: %s' % (event['action'].title(), basename(event['file'])) 00239 00240 if msg: 00241 print msg 00242 self.output += msg + "\n" 00243 00244 def print_notify_verbose(self, event, silent=False): 00245 """ Default command line notification with more verbose mode 00246 """ 00247 if event['type'] in ['info', 'debug']: 00248 self.print_notify(event) # standard handle 00249 00250 elif event['type'] == 'cc': 00251 event['severity'] = event['severity'].title() 00252 event['file'] = basename(event['file']) 00253 event['mcu_name'] = "None" 00254 event['toolchain'] = "None" 00255 event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown" 00256 event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown" 00257 msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event 00258 print msg 00259 self.output += msg + "\n" 00260 00261 elif event['type'] == 'progress': 00262 self.print_notify(event) # standard handle 00263 00264 def notify(self, event): 00265 """ Little closure for notify functions 00266 """ 00267 return self.notify_fun(event, self.silent) 00268 00269 def __exit__(self): 00270 if self.mp_pool is not None: 00271 self.mp_pool.terminate() 00272 00273 def goanna_parse_line(self, line): 00274 if "analyze" in self.options: 00275 return self.GOANNA_DIAGNOSTIC_PATTERN.match(line) 00276 else: 00277 return None 00278 00279 def get_symbols(self): 00280 if self.symbols is None: 00281 # Target and Toolchain symbols 00282 labels = self.get_labels() 00283 self.symbols = ["TARGET_%s" % t for t in labels['TARGET']] 00284 self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']]) 00285 00286 # Config support 00287 if self.has_config: 00288 self.symbols.append('HAVE_MBED_CONFIG_H') 00289 00290 # Cortex CPU symbols 00291 if self.target.core in mbedToolchain.CORTEX_SYMBOLS: 00292 self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) 00293 00294 # Symbols defined by the on-line build.system 00295 self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1']) 00296 if MBED_ORG_USER: 00297 self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER) 00298 00299 # Add target's symbols 00300 self.symbols += self.target.macros 00301 # Add extra symbols passed via 'macros' parameter 00302 self.symbols += self.macros 00303 00304 # Form factor variables 00305 if hasattr(self.target, 'supported_form_factors'): 00306 self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors]) 00307 00308 # uVisor-specific symbols 00309 if "UVISOR_PRESENT=1" in self.symbols and "__FPU_PRESENT=1" in self.symbols: 00310 self.symbols.remove("__FPU_PRESENT=1") 00311 00312 return list(set(self.symbols)) # Return only unique symbols 00313 00314 def get_labels(self): 00315 if self.labels is None: 00316 toolchain_labels = [c.__name__ for c in getmro(self.__class__)] 00317 toolchain_labels.remove('mbedToolchain') 00318 self.labels = { 00319 'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"], 00320 'TOOLCHAIN': toolchain_labels 00321 } 00322 return self.labels 00323 00324 def need_update(self, target, dependencies): 00325 if self.build_all: 00326 return True 00327 00328 if not exists(target): 00329 return True 00330 00331 target_mod_time = stat(target).st_mtime 00332 00333 for d in dependencies: 00334 00335 # Some objects are not provided with full path and here we do not have 00336 # information about the library paths. Safe option: assume an update 00337 if not d or not exists(d): 00338 return True 00339 00340 if stat(d).st_mtime >= target_mod_time: 00341 return True 00342 00343 return False 00344 00345 def scan_resources(self, path): 00346 labels = self.get_labels() 00347 resources = Resources(path) 00348 self.has_config = False 00349 00350 """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) 00351 When topdown is True, the caller can modify the dirnames list in-place 00352 (perhaps using del or slice assignment), and walk() will only recurse into 00353 the subdirectories whose names remain in dirnames; this can be used to prune 00354 the search, impose a specific order of visiting, or even to inform walk() 00355 about directories the caller creates or renames before it resumes walk() 00356 again. Modifying dirnames when topdown is False is ineffective, because in 00357 bottom-up mode the directories in dirnames are generated before dirpath 00358 itself is generated. 00359 """ 00360 for root, dirs, files in walk(path): 00361 # Remove ignored directories 00362 for d in copy(dirs): 00363 if d == '.hg': 00364 dir_path = join(root, d) 00365 resources.repo_dirs.append(dir_path) 00366 resources.repo_files.extend(self.scan_repository(dir_path)) 00367 00368 if ((d.startswith('.') or d in self.legacy_ignore_dirs) or 00369 (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or 00370 (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])): 00371 dirs.remove(d) 00372 00373 # Add root to include paths 00374 resources.inc_dirs.append(root) 00375 00376 for file in files: 00377 file_path = join(root, file) 00378 _, ext = splitext(file) 00379 ext = ext.lower() 00380 00381 if ext == '.s': 00382 resources.s_sources.append(file_path) 00383 00384 elif ext == '.c': 00385 resources.c_sources.append(file_path) 00386 00387 elif ext == '.cpp': 00388 resources.cpp_sources.append(file_path) 00389 00390 elif ext == '.h' or ext == '.hpp': 00391 if basename(file_path) == "mbed_config.h": 00392 self.has_config = True 00393 resources.headers.append(file_path) 00394 00395 elif ext == '.o': 00396 resources.objects.append(file_path) 00397 00398 elif ext == self.LIBRARY_EXT: 00399 resources.libraries.append(file_path) 00400 resources.lib_dirs.add(root) 00401 00402 elif ext == self.LINKER_EXT: 00403 if resources.linker_script is not None: 00404 self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) 00405 resources.linker_script = file_path 00406 00407 elif ext == '.lib': 00408 resources.lib_refs.append(file_path) 00409 00410 elif ext == '.bld': 00411 resources.lib_builds.append(file_path) 00412 00413 elif file == '.hgignore': 00414 resources.repo_files.append(file_path) 00415 00416 elif ext == '.hex': 00417 resources.hex_files.append(file_path) 00418 00419 elif ext == '.bin': 00420 resources.bin_files.append(file_path) 00421 00422 return resources 00423 00424 def scan_repository(self, path): 00425 resources = [] 00426 00427 for root, dirs, files in walk(path): 00428 # Remove ignored directories 00429 for d in copy(dirs): 00430 if d == '.' or d == '..': 00431 dirs.remove(d) 00432 00433 for file in files: 00434 file_path = join(root, file) 00435 resources.append(file_path) 00436 00437 return resources 00438 00439 def copy_files(self, files_paths, trg_path, rel_path=None): 00440 00441 # Handle a single file 00442 if type(files_paths) != ListType: files_paths = [files_paths] 00443 00444 for source in files_paths: 00445 if source is None: 00446 files_paths.remove(source) 00447 00448 for source in files_paths: 00449 if rel_path is not None: 00450 relative_path = relpath(source, rel_path) 00451 else: 00452 _, relative_path = split(source) 00453 00454 target = join(trg_path, relative_path) 00455 00456 if (target != source) and (self.need_update(target, [source])): 00457 self.progress("copy", relative_path) 00458 mkdir(dirname(target)) 00459 copyfile(source, target) 00460 00461 def relative_object_path(self, build_path, base_dir, source): 00462 source_dir, name, _ = split_path(source) 00463 obj_dir = join(build_path, relpath(source_dir, base_dir)) 00464 mkdir(obj_dir) 00465 return join(obj_dir, name + '.o') 00466 00467 def compile_sources(self, resources, build_path, inc_dirs=None): 00468 # Web IDE progress bar for project build 00469 files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources 00470 self.to_be_compiled = len(files_to_compile) 00471 self.compiled = 0 00472 00473 #for i in self.build_params: 00474 # self.debug(i) 00475 # self.debug("%s" % self.build_params[i]) 00476 00477 inc_paths = resources.inc_dirs 00478 if inc_dirs is not None: 00479 inc_paths.extend(inc_dirs) 00480 00481 objects = [] 00482 queue = [] 00483 prev_dir = None 00484 00485 # The dependency checking for C/C++ is delegated to the compiler 00486 base_path = resources.base_path 00487 files_to_compile.sort() 00488 for source in files_to_compile: 00489 _, name, _ = split_path(source) 00490 object = self.relative_object_path(build_path, base_path, source) 00491 00492 # Avoid multiple mkdir() calls on same work directory 00493 work_dir = dirname(object) 00494 if work_dir is not prev_dir: 00495 prev_dir = work_dir 00496 mkdir(work_dir) 00497 00498 # Queue mode (multiprocessing) 00499 commands = self.compile_command(source, object, inc_paths) 00500 if commands is not None: 00501 queue.append({ 00502 'source': source, 00503 'object': object, 00504 'commands': commands, 00505 'work_dir': work_dir, 00506 'chroot': self.CHROOT 00507 }) 00508 else: 00509 objects.append(object) 00510 00511 # Use queues/multiprocessing if cpu count is higher than setting 00512 jobs = self.jobs if self.jobs else cpu_count() 00513 if jobs > CPU_COUNT_MIN and len(queue) > jobs: 00514 return self.compile_queue(queue, objects) 00515 else: 00516 return self.compile_seq(queue, objects) 00517 00518 def compile_seq(self, queue, objects): 00519 for item in queue: 00520 result = compile_worker(item) 00521 00522 self.compiled += 1 00523 self.progress("compile", item['source'], build_update=True) 00524 for res in result['results']: 00525 self.debug("Command: %s" % ' '.join(res['command'])) 00526 self.compile_output([ 00527 res['code'], 00528 res['output'], 00529 res['command'] 00530 ]) 00531 objects.append(result['object']) 00532 return objects 00533 00534 def compile_queue(self, queue, objects): 00535 jobs_count = int(self.jobs if self.jobs else cpu_count()) 00536 p = Pool(processes=jobs_count) 00537 00538 results = [] 00539 for i in range(len(queue)): 00540 results.append(p.apply_async(compile_worker, [queue[i]])) 00541 00542 itr = 0 00543 while True: 00544 itr += 1 00545 if itr > 180000: 00546 p.terminate() 00547 p.join() 00548 raise ToolException("Compile did not finish in 5 minutes") 00549 00550 pending = 0 00551 for r in results: 00552 if r._ready is True: 00553 try: 00554 result = r.get() 00555 results.remove(r) 00556 00557 self.compiled += 1 00558 self.progress("compile", result['source'], build_update=True) 00559 for res in result['results']: 00560 self.debug("Command: %s" % ' '.join(res['command'])) 00561 self.compile_output([ 00562 res['code'], 00563 res['output'], 00564 res['command'] 00565 ]) 00566 objects.append(result['object']) 00567 except ToolException, err: 00568 p.terminate() 00569 p.join() 00570 raise ToolException(err) 00571 else: 00572 pending += 1 00573 if pending > jobs_count: 00574 break 00575 00576 00577 if len(results) == 0: 00578 break 00579 00580 sleep(0.01) 00581 00582 results = None 00583 p.terminate() 00584 p.join() 00585 00586 return objects 00587 00588 def compile_command(self, source, object, includes): 00589 # Check dependencies 00590 _, ext = splitext(source) 00591 ext = ext.lower() 00592 00593 if ext == '.c' or ext == '.cpp': 00594 base, _ = splitext(object) 00595 dep_path = base + '.d' 00596 deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else [] 00597 if len(deps) == 0 or self.need_update(object, deps): 00598 if ext == '.c': 00599 return self.compile_c(source, object, includes) 00600 else: 00601 return self.compile_cpp(source, object, includes) 00602 elif ext == '.s': 00603 deps = [source] 00604 if self.need_update(object, deps): 00605 return self.assemble(source, object, includes) 00606 else: 00607 return False 00608 00609 return None 00610 00611 def is_not_supported_error(self, output): 00612 return "#error directive: [NOT_SUPPORTED]" in output 00613 00614 def compile_output(self, output=[]): 00615 _rc = output[0] 00616 _stderr = output[1] 00617 command = output[2] 00618 00619 # Parse output for Warnings and Errors 00620 self.parse_output(_stderr) 00621 self.debug("Return: %s"% _rc) 00622 for error_line in _stderr.splitlines(): 00623 self.debug("Output: %s"% error_line) 00624 00625 00626 # Check return code 00627 if _rc != 0: 00628 for line in _stderr.splitlines(): 00629 self.tool_error(line) 00630 00631 if self.is_not_supported_error(_stderr): 00632 raise NotSupportedException(_stderr) 00633 else: 00634 raise ToolException(_stderr) 00635 00636 def compile(self, cc, source, object, includes): 00637 _, ext = splitext(source) 00638 ext = ext.lower() 00639 00640 command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source] 00641 00642 if hasattr(self, "get_dep_opt"): 00643 base, _ = splitext(object) 00644 dep_path = base + '.d' 00645 command.extend(self.get_dep_opt(dep_path)) 00646 00647 if hasattr(self, "cc_extra"): 00648 command.extend(self.cc_extra(base)) 00649 00650 return [command] 00651 00652 def compile_c(self, source, object, includes): 00653 return self.compile(self.cc, source, object, includes) 00654 00655 def compile_cpp(self, source, object, includes): 00656 return self.compile(self.cppc, source, object, includes) 00657 00658 def build_library(self, objects, dir, name): 00659 needed_update = False 00660 lib = self.STD_LIB_NAME % name 00661 fout = join(dir, lib) 00662 if self.need_update(fout, objects): 00663 self.info("Library: %s" % lib) 00664 self.archive(objects, fout) 00665 needed_update = True 00666 00667 return needed_update 00668 00669 def link_program(self, r, tmp_path, name): 00670 needed_update = False 00671 ext = 'bin' 00672 if hasattr(self.target, 'OUTPUT_EXT'): 00673 ext = self.target.OUTPUT_EXT 00674 00675 if hasattr(self.target, 'OUTPUT_NAMING'): 00676 self.var("binary_naming", self.target.OUTPUT_NAMING) 00677 if self.target.OUTPUT_NAMING == "8.3": 00678 name = name[0:8] 00679 ext = ext[0:3] 00680 00681 filename = name+'.'+ext 00682 elf = join(tmp_path, name + '.elf') 00683 bin = join(tmp_path, filename) 00684 00685 if self.need_update(elf, r.objects + r.libraries + [r.linker_script]): 00686 needed_update = True 00687 self.progress("link", name) 00688 self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) 00689 00690 if self.need_update(bin, [elf]): 00691 needed_update = True 00692 self.progress("elf2bin", name) 00693 00694 self.binary(r, elf, bin) 00695 00696 self.var("compile_succeded", True) 00697 self.var("binary", filename) 00698 00699 return bin, needed_update 00700 00701 def default_cmd(self, command): 00702 _stdout, _stderr, _rc = run_cmd(command) 00703 # Print all warning / erros from stderr to console output 00704 for error_line in _stderr.splitlines(): 00705 print error_line 00706 00707 self.debug("Command: %s"% ' '.join(command)) 00708 self.debug("Return: %s"% _rc) 00709 00710 for output_line in _stdout.splitlines(): 00711 self.debug("Output: %s"% output_line) 00712 for error_line in _stderr.splitlines(): 00713 self.debug("Errors: %s"% error_line) 00714 00715 if _rc != 0: 00716 for line in _stderr.splitlines(): 00717 self.tool_error(line) 00718 raise ToolException(_stderr) 00719 00720 ### NOTIFICATIONS ### 00721 def info(self, message): 00722 self.notify({'type': 'info', 'message': message}) 00723 00724 def debug(self, message): 00725 if self.VERBOSE: 00726 if type(message) is ListType: 00727 message = ' '.join(message) 00728 message = "[DEBUG] " + message 00729 self.notify({'type': 'debug', 'message': message}) 00730 00731 def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None): 00732 self.notify({'type': 'cc', 00733 'severity': severity, 00734 'file': file, 00735 'line': line, 00736 'message': message, 00737 'target_name': target_name, 00738 'toolchain_name': toolchain_name}) 00739 00740 def progress(self, action, file, build_update=False): 00741 msg = {'type': 'progress', 'action': action, 'file': file} 00742 if build_update: 00743 msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled) 00744 self.notify(msg) 00745 00746 def tool_error(self, message): 00747 self.notify({'type': 'tool_error', 'message': message}) 00748 00749 def var(self, key, value): 00750 self.notify({'type': 'var', 'key': key, 'val': value}) 00751 00752 from tools.settings import ARM_BIN 00753 from tools.settings import GCC_ARM_PATH, GCC_CR_PATH 00754 from tools.settings import IAR_PATH 00755 00756 TOOLCHAIN_BIN_PATH = { 00757 'ARM': ARM_BIN, 00758 'uARM': ARM_BIN, 00759 'GCC_ARM': GCC_ARM_PATH, 00760 'GCC_CR': GCC_CR_PATH, 00761 'IAR': IAR_PATH 00762 } 00763 00764 from tools.toolchains.arm import ARM_STD, ARM_MICRO 00765 from tools.toolchains.gcc import GCC_ARM, GCC_CR 00766 from tools.toolchains.iar import IAR 00767 00768 TOOLCHAIN_CLASSES = { 00769 'ARM': ARM_STD, 00770 'uARM': ARM_MICRO, 00771 'GCC_ARM': GCC_ARM, 00772 'GCC_CR': GCC_CR, 00773 'IAR': IAR 00774 } 00775 00776 TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
Generated on Thu Aug 11 2022 22:20:36 by
1.7.2
