Clone of official tools
Diff: toolchains/__init__.py
- Revision:
- 13:ab47a20b66f0
- Parent:
- 12:f2e8a005c7d3
- Child:
- 14:ee1b877e6839
--- a/toolchains/__init__.py Tue Jun 14 11:33:06 2016 +0100 +++ b/toolchains/__init__.py Thu Jul 14 20:21:19 2016 +0100 @@ -22,8 +22,10 @@ from time import time, sleep from types import ListType from shutil import copyfile -from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath +from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir from inspect import getmro +from copy import deepcopy +from tools.config import Config from multiprocessing import Pool, cpu_count from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path @@ -40,7 +42,11 @@ def compile_worker(job): results = [] for command in job['commands']: - _, _stderr, _rc = run_cmd(command, job['work_dir']) + try: + _, _stderr, _rc = run_cmd(command, job['work_dir']) + except KeyboardInterrupt as e: + raise ToolException + results.append({ 'code': _rc, 'output': _stderr, @@ -59,6 +65,8 @@ def __init__(self, base_path=None): self.base_path = base_path + self.file_basepath = {} + self.inc_dirs = [] self.headers = [] @@ -84,6 +92,9 @@ self.bin_files = [] self.json_files = [] + # Features + self.features = {} + def __add__(self, resources): if resources is None: return self @@ -97,6 +108,9 @@ return self.add(resources) def add(self, resources): + for f,p in resources.file_basepath.items(): + self.file_basepath[f] = p + self.inc_dirs += resources.inc_dirs self.headers += resources.headers @@ -121,6 +135,8 @@ self.bin_files += resources.bin_files self.json_files += resources.json_files + self.features.update(resources.features) + return self def relative_to(self, base, dot=False): @@ -130,6 +146,9 @@ 'hex_files', 'bin_files', 'json_files']: v = [rel_path(f, base, dot) for f in getattr(self, field)] setattr(self, field, v) + + self.features = {k: f.relative_to(base, dot) for k, f in self.features.iteritems() if f} + if self.linker_script is not None: self.linker_script = rel_path(self.linker_script, base, dot) @@ -140,6 +159,9 @@ 'hex_files', 'bin_files', 'json_files']: v = [f.replace('\\', '/') for f in getattr(self, field)] setattr(self, field, v) + + self.features = {k: f.win_to_unix() for k, f in self.features.iteritems() if f} + if self.linker_script is not None: self.linker_script = self.linker_script.replace('\\', '/') @@ -160,6 +182,8 @@ ('Hex files', self.hex_files), ('Bin files', self.bin_files), + + ('Features', self.features), ): if resources: s.append('%s:\n ' % label + '\n '.join(resources)) @@ -174,79 +198,104 @@ # had the knowledge of a list of these directories to be ignored. LEGACY_IGNORE_DIRS = set([ 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', - 'ARM', 'uARM', 'IAR', - 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', + 'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM' ]) LEGACY_TOOLCHAIN_NAMES = { 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', - 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CS', + 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 'IAR': 'IAR', } class mbedToolchain: - PROFILE = None VERBOSE = True - ignorepatterns = [] CORTEX_SYMBOLS = { "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], - "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"], + "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M7FD" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], } + GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%" GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"') - def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False, profile=None): + MBED_CONFIG_FILE_NAME="mbed_config.h" + + def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): self.target = target self.name = self.__class__.__name__ + + # compile/assemble/link/binary hooks self.hook = hooks.Hook(target, self) - self.silent = silent - self.output = "" + + # Toolchain flags + self.flags = deepcopy(self.DEFAULT_FLAGS) + + # User-defined macros + self.macros = macros or [] + + # Macros generated from toolchain and target rules/features + self.symbols = None + + # Labels generated from toolchain and target rules/features (used for selective build) + self.labels = None + + # This will hold the configuration data (as returned by Config.get_config_data()) + self.config_data = None + # Non-incremental compile + self.build_all = False + + # Build output dir + self.build_dir = None + self.timestamp = time() + + # Output build naming based on target+toolchain combo (mbed 2.0 builds) + self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name) + + # Number of concurrent build jobs. 0 means auto (based on host system cores) + self.jobs = 0 + + self.CHROOT = None + + # Ignore patterns from .mbedignore files + self.ignore_patterns = [] + + # Pre-mbed 2.0 ignore dirs self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) + # Output notify function if notify: self.notify_fun = notify elif extra_verbose: self.notify_fun = self.print_notify_verbose else: self.notify_fun = self.print_notify + + # Silent builds (no output) + self.silent = silent + + # Print output buffer + self.output = "" + # Build options passed by -o flag self.options = options if options is not None else [] - self.macros = macros or [] + # Build options passed by settings.py or mbed_settings.py self.options.extend(BUILD_OPTIONS) + if self.options: self.info("Build Options: %s" % (', '.join(self.options))) - - self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name) - - self.symbols = None - self.labels = None - self.has_config = False - - self.build_all = False - self.build_dir = None - self.timestamp = time() - self.jobs = 1 - - self.CHROOT = None - - self.mp_pool = None - - if profile and (TOOLCHAIN_PROFILES.get(self.name, None) and - TOOLCHAIN_PROFILES[self.name].get(profile)): - self.PROFILE = TOOLCHAIN_PROFILES[self.name].get(profile) - self.info("Using toolchain %s profile %s" % (self.name, profile)) - + + # uVisor spepcific rules if 'UVISOR' in self.target.features and 'UVISOR_SUPPORTED' in self.target.extra_labels: self.target.core = re.sub(r"F$", '', self.target.core) @@ -263,7 +312,7 @@ elif event['type'] in ['info', 'debug']: msg = event['message'] - + elif event['type'] == 'cc': event['severity'] = event['severity'].title() event['file'] = basename(event['file']) @@ -302,10 +351,6 @@ """ return self.notify_fun(event, self.silent) - def __exit__(self): - if self.mp_pool is not None: - self.mp_pool.terminate() - def goanna_parse_line(self, line): if "analyze" in self.options: return self.GOANNA_DIAGNOSTIC_PATTERN.match(line) @@ -317,13 +362,8 @@ # Target and Toolchain symbols labels = self.get_labels() self.symbols = ["TARGET_%s" % t for t in labels['TARGET']] - self.symbols.extend(["FEATURE_%s" % t for t in labels['FEATURE']]) self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']]) - # Config support - if self.has_config: - self.symbols.append('HAVE_MBED_CONFIG_H') - # Cortex CPU symbols if self.target.core in mbedToolchain.CORTEX_SYMBOLS: self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core]) @@ -385,16 +425,36 @@ return False def is_ignored(self, file_path): - for pattern in self.ignorepatterns: + for pattern in self.ignore_patterns: if fnmatch.fnmatch(file_path, pattern): return True return False - def scan_resources(self, path, exclude_paths=None): - labels = self.get_labels() + # Create a Resources object from the path pointed to by *path* by either traversing a + # a directory structure, when *path* is a directory, or adding *path* to the resources, + # when *path* is a file. + # The parameter *base_path* is used to set the base_path attribute of the Resources + # object and the parameter *exclude_paths* is used by the directory traversal to + # exclude certain paths from the traversal. + def scan_resources(self, path, exclude_paths=None, base_path=None): resources = Resources(path) - self.has_config = False + if not base_path: + if isfile(path): + base_path = dirname(path) + else: + base_path = path + resources.base_path = base_path + if isfile(path): + self._add_file(path, resources, base_path, exclude_paths=exclude_paths) + else: + self._add_dir(path, resources, base_path, exclude_paths=exclude_paths) + return resources + + # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a + # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file + # on every file it considers adding to the resources object. + def _add_dir(self, path, resources, base_path, exclude_paths=None): """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) When topdown is True, the caller can modify the dirnames list in-place (perhaps using del or slice assignment), and walk() will only recurse into @@ -405,40 +465,45 @@ bottom-up mode the directories in dirnames are generated before dirpath itself is generated. """ + labels = self.get_labels() for root, dirs, files in walk(path, followlinks=True): - # Remove ignored directories # Check if folder contains .mbedignore - if ".mbedignore" in files : + if ".mbedignore" in files: with open (join(root,".mbedignore"), "r") as f: lines=f.readlines() lines = [l.strip() for l in lines] # Strip whitespaces lines = [l for l in lines if l != ""] # Strip empty lines lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines - # Append root path to glob patterns - # and append patterns to ignorepatterns - self.ignorepatterns.extend([join(root,line.strip()) for line in lines]) + # Append root path to glob patterns and append patterns to ignore_patterns + self.ignore_patterns.extend([join(root,line.strip()) for line in lines]) + + # Skip the whole folder if ignored, e.g. .mbedignore containing '*' + if self.is_ignored(join(root,"")): + continue for d in copy(dirs): dir_path = join(root, d) + # Add internal repo folders/files. This is needed for exporters if d == '.hg': resources.repo_dirs.append(dir_path) resources.repo_files.extend(self.scan_repository(dir_path)) if ((d.startswith('.') or d in self.legacy_ignore_dirs) or + # Ignore targets that do not match the TARGET in extra_labels list (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or - (d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or + # Ignore toolchain that do not match the current TOOLCHAIN (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or + # Ignore .mbedignore files + self.is_ignored(join(dir_path,"")) or + # Ignore TESTS dir (d == 'TESTS')): + dirs.remove(d) + elif d.startswith('FEATURE_'): + # Recursively scan features but ignore them in the current scan. + # These are dynamically added by the config system if the conditions are matched + resources.features[d[8:]] = self.scan_resources(dir_path, base_path=base_path) dirs.remove(d) - - - # Remove dirs that already match the ignorepatterns - # to avoid travelling into them and to prevent them - # on appearing in include path. - if self.is_ignored(join(dir_path,"")): - dirs.remove(d) - - if exclude_paths: + elif exclude_paths: for exclude_path in exclude_paths: rel_path = relpath(dir_path, exclude_path) if not (rel_path.startswith('..')): @@ -450,58 +515,61 @@ for file in files: file_path = join(root, file) - - if self.is_ignored(file_path): - continue + self._add_file(file_path, resources, base_path) - _, ext = splitext(file) - ext = ext.lower() + # A helper function for both scan_resources and _add_dir. _add_file adds one file + # (*file_path*) to the resources object based on the file type. + def _add_file(self, file_path, resources, base_path, exclude_paths=None): + resources.file_basepath[file_path] = base_path - if ext == '.s': - resources.s_sources.append(file_path) + if self.is_ignored(file_path): + return + + _, ext = splitext(file_path) + ext = ext.lower() - elif ext == '.c': - resources.c_sources.append(file_path) + if ext == '.s': + resources.s_sources.append(file_path) - elif ext == '.cpp': - resources.cpp_sources.append(file_path) + elif ext == '.c': + resources.c_sources.append(file_path) - elif ext == '.h' or ext == '.hpp': - if basename(file_path) == "mbed_config.h": - self.has_config = True - resources.headers.append(file_path) + elif ext == '.cpp': + resources.cpp_sources.append(file_path) - elif ext == '.o': - resources.objects.append(file_path) + elif ext == '.h' or ext == '.hpp': + resources.headers.append(file_path) + + elif ext == '.o': + resources.objects.append(file_path) - elif ext == self.LIBRARY_EXT: - resources.libraries.append(file_path) - resources.lib_dirs.add(root) + elif ext == self.LIBRARY_EXT: + resources.libraries.append(file_path) + resources.lib_dirs.add(dirname(file_path)) - elif ext == self.LINKER_EXT: - if resources.linker_script is not None: - self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) - resources.linker_script = file_path + elif ext == self.LINKER_EXT: + if resources.linker_script is not None: + self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) + resources.linker_script = file_path - elif ext == '.lib': - resources.lib_refs.append(file_path) + elif ext == '.lib': + resources.lib_refs.append(file_path) - elif ext == '.bld': - resources.lib_builds.append(file_path) + elif ext == '.bld': + resources.lib_builds.append(file_path) - elif file == '.hgignore': - resources.repo_files.append(file_path) + elif file == '.hgignore': + resources.repo_files.append(file_path) - elif ext == '.hex': - resources.hex_files.append(file_path) + elif ext == '.hex': + resources.hex_files.append(file_path) - elif ext == '.bin': - resources.bin_files.append(file_path) + elif ext == '.bin': + resources.bin_files.append(file_path) - elif ext == '.json': - resources.json_files.append(file_path) + elif ext == '.json': + resources.json_files.append(file_path) - return resources def scan_repository(self, path): resources = [] @@ -518,7 +586,7 @@ return resources - def copy_files(self, files_paths, trg_path, rel_path=None): + def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): # Handle a single file if type(files_paths) != ListType: files_paths = [files_paths] @@ -528,7 +596,9 @@ files_paths.remove(source) for source in files_paths: - if rel_path is not None: + if resources is not None and resources.file_basepath.has_key(source): + relative_path = relpath(source, resources.file_basepath[source]) + elif rel_path is not None: relative_path = relpath(source, rel_path) else: _, relative_path = split(source) @@ -581,15 +651,13 @@ queue = [] prev_dir = None - # The dependency checking for C/C++ is delegated to the compiler - base_path = resources.base_path # Sort compile queue for consistency files_to_compile.sort() work_dir = getcwd() for source in files_to_compile: _, name, _ = split_path(source) - object = self.relative_object_path(build_path, base_path, source) + object = self.relative_object_path(build_path, resources.file_basepath[source], source) # Queue mode (multiprocessing) commands = self.compile_command(source, object, inc_paths) @@ -783,6 +851,7 @@ def default_cmd(self, command): self.debug("Command: %s"% ' '.join(command)) _stdout, _stderr, _rc = run_cmd(command) + self.debug("Return: %s"% _rc) for output_line in _stdout.splitlines(): @@ -795,7 +864,6 @@ self.tool_error(line) raise ToolException(_stderr) - ### NOTIFICATIONS ### def info(self, message): self.notify({'type': 'info', 'message': message}) @@ -850,8 +918,29 @@ # Write output to file in CSV format for the CI map_csv = splitext(map)[0] + "_map.csv" memap.generate_output('csv-ci', map_csv) - - + + # Set the configuration data + def set_config_data(self, config_data): + self.config_data = config_data + + # Return the location of the config header. This function will create the config + # header first if needed. The header will be written in a file called "mbed_conf.h" + # located in the project's build directory. + # If config headers are not used (self.config_header_content is None), the function + # returns None + def get_config_header(self): + if self.config_data is None: + return None + config_file = join(self.build_dir, self.MBED_CONFIG_FILE_NAME) + if not exists(config_file): + with open(config_file, "wt") as f: + f.write(Config.config_to_header(self.config_data)) + return config_file + + # Return the list of macros geenrated by the build system + def get_config_macros(self): + return Config.config_to_macros(self.config_data) if self.config_data else [] + from tools.settings import ARM_BIN from tools.settings import GCC_ARM_PATH, GCC_CR_PATH from tools.settings import IAR_PATH @@ -877,81 +966,3 @@ } TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys()) - -# Toolchain profiles for backward compatibility with old mbed SDK library releases -TOOLCHAIN_PROFILES = { - 'ARM_STD' : { - 'v5': { - 'ARMCC_VERSION': '5.06', - 'COMMON_FLAGS': ['-c', '--gnu', '-O3', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - 'COMPILE_C_AS_CPP': False, - }, - 'v4': { - 'ARMCC_VERSION': '5.03', - 'COMMON_FLAGS': ['-c', '--gnu', '-O3', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - 'COMPILE_C_AS_CPP': False, - }, - 'v3': { - 'ARMCC_VERSION': '5.01', - 'COMMON_FLAGS': ['-c', '--gnu', '-Ospace', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - 'COMPILE_C_AS_CPP': False, - }, - 'v2': { - 'ARMCC_VERSION': '5.01', - 'COMMON_FLAGS': ['-c', '--gnu', '-Ospace', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - 'COMPILE_C_AS_CPP': False, - }, - 'v1': { - 'ARMCC_VERSION': '4', - 'COMMON_FLAGS': ['-c', '--gnu', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp'], - 'COMPILE_C_AS_CPP': True, - } - }, - 'ARM_MICRO' : { - 'v5': { - 'ARMCC_VERSION': '5.06', - 'COMMON_FLAGS': ['-c', '--gnu', '-O3', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - }, - 'v4': { - 'ARMCC_VERSION': '5.03', - 'COMMON_FLAGS': ['-c', '--gnu', '-O3', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - }, - 'v3': { - 'ARMCC_VERSION': '5.01', - 'COMMON_FLAGS': ['-c', '--gnu', '-Ospace', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - }, - 'v2': { - 'ARMCC_VERSION': '4', - 'COMMON_FLAGS': ['-c', '--gnu', '-Ospace', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp', '--no_rtti'], - 'PATCHED_LIBRARY' : True, - }, - 'v1': { - 'ARMCC_VERSION': '4', - 'COMMON_FLAGS': ['-c', '--gnu', '-Otime', '--split_sections', '--apcs=interwork'], - 'COMMON_CPP_FLAGS': ['--cpp'], - 'COMPILE_C_AS_CPP': True, - 'PATCHED_LIBRARY' : True, - } - }, - 'GCC_ARM' : { - 'v2': { - 'COMMON_FLAGS': ["-c", "-O2", "-Wall", "-fmessage-length=0", "-fno-exceptions", "-fno-builtin", "-ffunction-sections", "-fdata-sections"], - 'COMMON_CPP_FLAGS': ['-std=gnu++98'], - 'COMMON_LD_FLAGS': ['-Wl,--gc-sections'], - }, - 'v1': { - 'COMMON_FLAGS': ["-c", "-O2", "-Wall", "-fmessage-length=0", "-fno-exceptions", "-fno-builtin", "-ffunction-sections", "-fdata-sections"], - 'COMMON_CPP_FLAGS': ['-std=gnu++98'], - 'COMMON_LD_FLAGS': ['-Wl,--gc-sections'], - } - } -}