Clone of official tools
Diff: export/exporters.py
- Revision:
- 13:ab47a20b66f0
- Parent:
- 0:66f3b5499f7f
- Child:
- 23:fbae331171fa
--- a/export/exporters.py Tue Jun 14 11:33:06 2016 +0100 +++ b/export/exporters.py Thu Jul 14 20:21:19 2016 +0100 @@ -7,6 +7,7 @@ from jinja2.environment import Environment from contextlib import closing from zipfile import ZipFile, ZIP_DEFLATED +from operator import add from tools.utils import mkdir from tools.toolchains import TOOLCHAIN_CLASSES @@ -16,13 +17,15 @@ from project_generator.project import Project from project_generator.settings import ProjectSettings +from tools.config import Config + class OldLibrariesException(Exception): pass class Exporter(object): TEMPLATE_DIR = dirname(__file__) DOT_IN_RELATIVE_PATH = False - def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None): + def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None, sources_relative=True): self.inputDir = inputDir self.target = target self.program_name = program_name @@ -31,19 +34,35 @@ jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__))) self.jinja_environment = Environment(loader=jinja_loader) self.extra_symbols = extra_symbols + self.config_macros = [] + self.sources_relative = sources_relative + self.config_header = None def get_toolchain(self): return self.TOOLCHAIN + @property + def flags(self): + return self.toolchain.flags + + @property + def progen_flags(self): + if not hasattr(self, "_progen_flag_cache") : + self._progen_flag_cache = dict([(key + "_flags", value) for key,value in self.flags.iteritems()]) + if self.config_header: + self._progen_flag_cache['c_flags'] += self.toolchain.get_config_option(self.config_header) + self._progen_flag_cache['cxx_flags'] += self.toolchain.get_config_option(self.config_header) + return self._progen_flag_cache + def __scan_and_copy(self, src_path, trg_path): resources = self.toolchain.scan_resources(src_path) for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources', 'objects', 'libraries', 'linker_script', - 'lib_builds', 'lib_refs', 'repo_files', 'hex_files', 'bin_files']: + 'lib_builds', 'lib_refs', 'hex_files', 'bin_files']: r = getattr(resources, r_type) if r: - self.toolchain.copy_files(r, trg_path, rel_path=src_path) + self.toolchain.copy_files(r, trg_path, resources=resources) return resources @staticmethod @@ -89,13 +108,13 @@ return project_data def progen_gen_file(self, tool_name, project_data): - """" Generate project using ProGen Project API """ + """ Generate project using ProGen Project API """ settings = ProjectSettings() project = Project(self.program_name, [project_data], settings) # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen # thinks it is not dict but a file, and adds them to workspace. project.project['common']['include_paths'] = self.resources.inc_dirs - project.generate(tool_name, copied=True) + project.generate(tool_name, copied=not self.sources_relative) def __scan_all(self, path): resources = [] @@ -111,17 +130,25 @@ return resources - def scan_and_copy_resources(self, prj_path, trg_path, relative=False): + def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] + # Create the configuration object + if isinstance(prj_paths, basestring): + prj_paths = [prj_paths] + config = Config(self.target, prj_paths) for src in ['lib', 'src']: - resources = self.__scan_and_copy(join(prj_path, src), trg_path) + resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path) + for path in prj_paths[1:]: + resources.add(self.__scan_and_copy(join(path, src), trg_path)) + lib_builds.extend(resources.lib_builds) # The repository files - for repo_dir in resources.repo_dirs: - repo_files = self.__scan_all(repo_dir) - self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src)) + #for repo_dir in resources.repo_dirs: + # repo_files = self.__scan_all(repo_dir) + # for path in prj_paths: + # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: @@ -138,15 +165,25 @@ if not relative: # Final scan of the actual exported resources - self.resources = self.toolchain.scan_resources(trg_path) - self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) + resources = self.toolchain.scan_resources(trg_path) + resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) else: # use the prj_dir (source, not destination) - self.resources = self.toolchain.scan_resources(prj_path) - # Check the existence of a binary build of the mbed library for the desired target - # This prevents exporting the mbed libraries from source - # if not self.toolchain.mbed_libs: - # raise OldLibrariesException() + resources = self.toolchain.scan_resources(prj_paths[0]) + for path in prj_paths[1:]: + resources.add(toolchain.scan_resources(path)) + + # Loads the resources into the config system which might expand/modify resources based on config data + self.resources = config.load_resources(resources) + + if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED : + # Add the configuration file to the target directory + self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME + config.get_config_data_header(join(trg_path, self.config_header)) + self.config_macros = [] + else: + # And add the configuration macros to the toolchain + self.config_macros = config.get_config_data_macros() def gen_file(self, template_file, data, target_file): template_path = join(Exporter.TEMPLATE_DIR, template_file) @@ -161,7 +198,7 @@ """ This function returns symbols which must be exported. Please add / overwrite symbols in each exporter separately """ - symbols = self.toolchain.get_symbols() + symbols = self.toolchain.get_symbols() + self.config_macros # We have extra symbols from e.g. libraries, we want to have them also added to export if add_extra_symbols: if self.extra_symbols is not None: