Clone of official tools

Revision:
0:66f3b5499f7f
Child:
13:ab47a20b66f0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/export/exporters.py	Thu May 19 19:44:41 2016 +0100
@@ -0,0 +1,193 @@
+"""Just a template for subclassing"""
+import uuid, shutil, os, logging, fnmatch
+from os import walk, remove
+from os.path import join, dirname, isdir, split
+from copy import copy
+from jinja2 import Template, FileSystemLoader
+from jinja2.environment import Environment
+from contextlib import closing
+from zipfile import ZipFile, ZIP_DEFLATED
+
+from tools.utils import mkdir
+from tools.toolchains import TOOLCHAIN_CLASSES
+from tools.targets import TARGET_MAP
+
+from project_generator.generate import Generator
+from project_generator.project import Project
+from project_generator.settings import ProjectSettings
+
+class OldLibrariesException(Exception): pass
+
+class Exporter(object):
+    TEMPLATE_DIR = dirname(__file__)
+    DOT_IN_RELATIVE_PATH = False
+
+    def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None):
+        self.inputDir = inputDir
+        self.target = target
+        self.program_name = program_name
+        self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target])
+        self.build_url_resolver = build_url_resolver
+        jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
+        self.jinja_environment = Environment(loader=jinja_loader)
+        self.extra_symbols = extra_symbols
+
+    def get_toolchain(self):
+        return self.TOOLCHAIN
+
+    def __scan_and_copy(self, src_path, trg_path):
+        resources = self.toolchain.scan_resources(src_path)
+
+        for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
+            'objects', 'libraries', 'linker_script',
+            'lib_builds', 'lib_refs', 'repo_files', 'hex_files', 'bin_files']:
+            r = getattr(resources, r_type)
+            if r:
+                self.toolchain.copy_files(r, trg_path, rel_path=src_path)
+        return resources
+
+    @staticmethod
+    def _get_dir_grouped_files(files):
+        """ Get grouped files based on the dirname """
+        files_grouped = {}
+        for file in files:
+            rel_path = os.path.relpath(file, os.getcwd())
+            dir_path = os.path.dirname(rel_path)
+            if dir_path == '':
+                # all files within the current dir go into Source_Files
+                dir_path = 'Source_Files'
+            if not dir_path in files_grouped.keys():
+                files_grouped[dir_path] = []
+            files_grouped[dir_path].append(file)
+        return files_grouped
+
+    def progen_get_project_data(self):
+        """ Get ProGen project data  """
+        # provide default data, some tools don't require any additional
+        # tool specific settings
+        code_files = []
+        for r_type in ['c_sources', 'cpp_sources', 's_sources']:
+            for file in getattr(self.resources, r_type):
+                code_files.append(file)
+
+        sources_files = code_files + self.resources.hex_files + self.resources.objects + \
+            self.resources.libraries
+        sources_grouped = Exporter._get_dir_grouped_files(sources_files)
+        headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers)
+
+        project_data = {
+            'common': {
+                'sources': sources_grouped,
+                'includes': headers_grouped,
+                'build_dir':'.build',
+                'target': [TARGET_MAP[self.target].progen['target']],
+                'macros': self.get_symbols(),
+                'export_dir': [self.inputDir],
+                'linker_file': [self.resources.linker_script],
+            }
+        }
+        return project_data
+
+    def progen_gen_file(self, tool_name, project_data):
+        """" Generate project using ProGen Project API """
+        settings = ProjectSettings()
+        project = Project(self.program_name, [project_data], settings)
+        # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
+        # thinks it is not dict but a file, and adds them to workspace.
+        project.project['common']['include_paths'] = self.resources.inc_dirs
+        project.generate(tool_name, copied=True)
+
+    def __scan_all(self, path):
+        resources = []
+
+        for root, dirs, files in walk(path):
+            for d in copy(dirs):
+                if d == '.' or d == '..':
+                    dirs.remove(d)
+
+            for file in files:
+                file_path = join(root, file)
+                resources.append(file_path)
+
+        return resources
+
+    def scan_and_copy_resources(self, prj_path, trg_path, relative=False):
+        # Copy only the file for the required target and toolchain
+        lib_builds = []
+        for src in ['lib', 'src']:
+            resources = self.__scan_and_copy(join(prj_path, src), trg_path)
+            lib_builds.extend(resources.lib_builds)
+
+            # The repository files
+            for repo_dir in resources.repo_dirs:
+                repo_files = self.__scan_all(repo_dir)
+                self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src))
+
+        # The libraries builds
+        for bld in lib_builds:
+            build_url = open(bld).read().strip()
+            lib_data = self.build_url_resolver(build_url)
+            lib_path = lib_data['path'].rstrip('\\/')
+            self.__scan_and_copy(lib_path, join(trg_path, lib_data['name']))
+
+            # Create .hg dir in mbed build dir so it's ignored when versioning
+            hgdir = join(trg_path, lib_data['name'], '.hg')
+            mkdir(hgdir)
+            fhandle = file(join(hgdir, 'keep.me'), 'a')
+            fhandle.close()
+
+        if not relative:
+            # Final scan of the actual exported resources
+            self.resources = self.toolchain.scan_resources(trg_path)
+            self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
+        else:
+            # use the prj_dir (source, not destination)
+            self.resources = self.toolchain.scan_resources(prj_path)
+        # Check the existence of a binary build of the mbed library for the desired target
+        # This prevents exporting the mbed libraries from source
+        # if not self.toolchain.mbed_libs:
+        #    raise OldLibrariesException()
+
+    def gen_file(self, template_file, data, target_file):
+        template_path = join(Exporter.TEMPLATE_DIR, template_file)
+        template = self.jinja_environment.get_template(template_file)
+        target_text = template.render(data)
+
+        target_path = join(self.inputDir, target_file)
+        logging.debug("Generating: %s" % target_path)
+        open(target_path, "w").write(target_text)
+
+    def get_symbols(self, add_extra_symbols=True):
+        """ This function returns symbols which must be exported.
+            Please add / overwrite symbols in each exporter separately
+        """
+        symbols = self.toolchain.get_symbols()
+        # We have extra symbols from e.g. libraries, we want to have them also added to export
+        if add_extra_symbols:
+            if self.extra_symbols is not None:
+                symbols.extend(self.extra_symbols)
+        return symbols
+
+def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
+    uid = str(uuid.uuid4())
+    zipfilename = '%s.zip'%uid
+
+    logging.debug("Zipping up %s to %s" % (tempdirectory,  join(destination, zipfilename)))
+    # make zip
+    def zipdir(basedir, archivename):
+        assert isdir(basedir)
+        fakeroot = program_name + '/'
+        with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
+            for root, _, files in os.walk(basedir):
+                # NOTE: ignore empty directories
+                for fn in files:
+                    absfn = join(root, fn)
+                    zfn = fakeroot + '/' +  absfn[len(basedir)+len(os.sep):]
+                    z.write(absfn, zfn)
+
+    zipdir(tempdirectory, join(destination, zipfilename))
+
+    if clean:
+        shutil.rmtree(tempdirectory)
+
+    return join(destination, zipfilename)