Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Dependents: mbed-TFT-example-NCS36510 mbed-Accelerometer-example-NCS36510 mbed-Accelerometer-example-NCS36510
tools/export/iar/__init__.py@1:f30bdcd2b33b, 2017-02-27 (annotated)
- Committer:
- jacobjohnson
- Date:
- Mon Feb 27 17:45:05 2017 +0000
- Revision:
- 1:f30bdcd2b33b
- Parent:
- 0:098463de4c5d
changed the inputscale from 1 to 7 in analogin_api.c. This will need to be changed later, and accessed from the main level, but for now this allows the adc to read a value from 0 to 3.7V, instead of just up to 1V.;
Who changed what in which revision?
User | Revision | Line number | New contents of line |
---|---|---|---|
group-onsemi | 0:098463de4c5d | 1 | import os |
group-onsemi | 0:098463de4c5d | 2 | from os.path import sep, join, exists |
group-onsemi | 0:098463de4c5d | 3 | from collections import namedtuple |
group-onsemi | 0:098463de4c5d | 4 | from subprocess import Popen, PIPE |
group-onsemi | 0:098463de4c5d | 5 | import shutil |
group-onsemi | 0:098463de4c5d | 6 | import re |
group-onsemi | 0:098463de4c5d | 7 | import sys |
group-onsemi | 0:098463de4c5d | 8 | |
group-onsemi | 0:098463de4c5d | 9 | from tools.targets import TARGET_MAP |
group-onsemi | 0:098463de4c5d | 10 | from tools.export.exporters import Exporter, TargetNotSupportedException |
group-onsemi | 0:098463de4c5d | 11 | import json |
group-onsemi | 0:098463de4c5d | 12 | from tools.export.cmsis import DeviceCMSIS |
group-onsemi | 0:098463de4c5d | 13 | from multiprocessing import cpu_count |
group-onsemi | 0:098463de4c5d | 14 | |
group-onsemi | 0:098463de4c5d | 15 | class IAR(Exporter): |
group-onsemi | 0:098463de4c5d | 16 | NAME = 'iar' |
group-onsemi | 0:098463de4c5d | 17 | TOOLCHAIN = 'IAR' |
group-onsemi | 0:098463de4c5d | 18 | |
group-onsemi | 0:098463de4c5d | 19 | #iar_definitions.json location |
group-onsemi | 0:098463de4c5d | 20 | def_loc = os.path.join( |
group-onsemi | 0:098463de4c5d | 21 | os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', |
group-onsemi | 0:098463de4c5d | 22 | 'tools','export', 'iar', 'iar_definitions.json') |
group-onsemi | 0:098463de4c5d | 23 | |
group-onsemi | 0:098463de4c5d | 24 | #create a dictionary of the definitions |
group-onsemi | 0:098463de4c5d | 25 | with open(def_loc, 'r') as f: |
group-onsemi | 0:098463de4c5d | 26 | IAR_DEFS = json.load(f) |
group-onsemi | 0:098463de4c5d | 27 | |
group-onsemi | 0:098463de4c5d | 28 | #supported targets have a device name and corresponding definition in |
group-onsemi | 0:098463de4c5d | 29 | #iar_definitions.json |
group-onsemi | 0:098463de4c5d | 30 | TARGETS = [target for target, obj in TARGET_MAP.iteritems() |
group-onsemi | 0:098463de4c5d | 31 | if hasattr(obj, 'device_name') and |
group-onsemi | 0:098463de4c5d | 32 | obj.device_name in IAR_DEFS.keys() and "IAR" in obj.supported_toolchains] |
group-onsemi | 0:098463de4c5d | 33 | |
group-onsemi | 0:098463de4c5d | 34 | def iar_groups(self, grouped_src): |
group-onsemi | 0:098463de4c5d | 35 | """Return a namedtuple of group info |
group-onsemi | 0:098463de4c5d | 36 | Positional Arguments: |
group-onsemi | 0:098463de4c5d | 37 | grouped_src: dictionary mapping a group(str) to sources |
group-onsemi | 0:098463de4c5d | 38 | within it (list of file names) |
group-onsemi | 0:098463de4c5d | 39 | Relevant part of IAR template |
group-onsemi | 0:098463de4c5d | 40 | {% for group in groups %} |
group-onsemi | 0:098463de4c5d | 41 | <group> |
group-onsemi | 0:098463de4c5d | 42 | <name>group.name</name> |
group-onsemi | 0:098463de4c5d | 43 | {% for file in group.files %} |
group-onsemi | 0:098463de4c5d | 44 | <file> |
group-onsemi | 0:098463de4c5d | 45 | <name>$PROJ_DIR${{file}}</name> |
group-onsemi | 0:098463de4c5d | 46 | </file> |
group-onsemi | 0:098463de4c5d | 47 | {% endfor %} |
group-onsemi | 0:098463de4c5d | 48 | </group> |
group-onsemi | 0:098463de4c5d | 49 | {% endfor %} |
group-onsemi | 0:098463de4c5d | 50 | """ |
group-onsemi | 0:098463de4c5d | 51 | IARgroup = namedtuple('IARgroup', ['name','files']) |
group-onsemi | 0:098463de4c5d | 52 | groups = [] |
group-onsemi | 0:098463de4c5d | 53 | for name, files in grouped_src.items(): |
group-onsemi | 0:098463de4c5d | 54 | groups.append(IARgroup(name,files)) |
group-onsemi | 0:098463de4c5d | 55 | return groups |
group-onsemi | 0:098463de4c5d | 56 | |
group-onsemi | 0:098463de4c5d | 57 | def iar_device(self): |
group-onsemi | 0:098463de4c5d | 58 | """Retrieve info from iar_definitions.json""" |
group-onsemi | 0:098463de4c5d | 59 | device_name = TARGET_MAP[self.target].device_name |
group-onsemi | 0:098463de4c5d | 60 | device_info = self.IAR_DEFS[device_name] |
group-onsemi | 0:098463de4c5d | 61 | iar_defaults ={ |
group-onsemi | 0:098463de4c5d | 62 | "OGChipSelectEditMenu": "", |
group-onsemi | 0:098463de4c5d | 63 | "CoreVariant": '', |
group-onsemi | 0:098463de4c5d | 64 | "GFPUCoreSlave": '', |
group-onsemi | 0:098463de4c5d | 65 | "GFPUCoreSlave2": 40, |
group-onsemi | 0:098463de4c5d | 66 | "GBECoreSlave": 35, |
group-onsemi | 0:098463de4c5d | 67 | "FPU2": 0, |
group-onsemi | 0:098463de4c5d | 68 | "NrRegs": 0, |
group-onsemi | 0:098463de4c5d | 69 | } |
group-onsemi | 0:098463de4c5d | 70 | |
group-onsemi | 0:098463de4c5d | 71 | iar_defaults.update(device_info) |
group-onsemi | 0:098463de4c5d | 72 | IARdevice = namedtuple('IARdevice', iar_defaults.keys()) |
group-onsemi | 0:098463de4c5d | 73 | return IARdevice(**iar_defaults) |
group-onsemi | 0:098463de4c5d | 74 | |
group-onsemi | 0:098463de4c5d | 75 | def format_file(self, file): |
group-onsemi | 0:098463de4c5d | 76 | """Make IAR compatible path""" |
group-onsemi | 0:098463de4c5d | 77 | return join('$PROJ_DIR$',file) |
group-onsemi | 0:098463de4c5d | 78 | |
group-onsemi | 0:098463de4c5d | 79 | def format_src(self, srcs): |
group-onsemi | 0:098463de4c5d | 80 | """Group source files""" |
group-onsemi | 0:098463de4c5d | 81 | grouped = self.group_project_files(srcs) |
group-onsemi | 0:098463de4c5d | 82 | for group, files in grouped.items(): |
group-onsemi | 0:098463de4c5d | 83 | grouped[group] = [self.format_file(src) for src in files] |
group-onsemi | 0:098463de4c5d | 84 | return grouped |
group-onsemi | 0:098463de4c5d | 85 | |
group-onsemi | 0:098463de4c5d | 86 | def generate(self): |
group-onsemi | 0:098463de4c5d | 87 | """Generate the .eww, .ewd, and .ewp files""" |
group-onsemi | 0:098463de4c5d | 88 | srcs = self.resources.headers + self.resources.s_sources + \ |
group-onsemi | 0:098463de4c5d | 89 | self.resources.c_sources + self.resources.cpp_sources + \ |
group-onsemi | 0:098463de4c5d | 90 | self.resources.objects + self.resources.libraries |
group-onsemi | 0:098463de4c5d | 91 | flags = self.flags |
group-onsemi | 0:098463de4c5d | 92 | c_flags = list(set(flags['common_flags'] |
group-onsemi | 0:098463de4c5d | 93 | + flags['c_flags'] |
group-onsemi | 0:098463de4c5d | 94 | + flags['cxx_flags'])) |
group-onsemi | 0:098463de4c5d | 95 | # Flags set in template to be set by user in IDE |
group-onsemi | 0:098463de4c5d | 96 | template = ["--vla", "--no_static_destruction"] |
group-onsemi | 0:098463de4c5d | 97 | # Flag invalid if set in template |
group-onsemi | 0:098463de4c5d | 98 | # Optimizations are also set in template |
group-onsemi | 0:098463de4c5d | 99 | invalid_flag = lambda x: x in template or re.match("-O(\d|time|n)", x) |
group-onsemi | 0:098463de4c5d | 100 | flags['c_flags'] = [flag for flag in c_flags if not invalid_flag(flag)] |
group-onsemi | 0:098463de4c5d | 101 | |
group-onsemi | 0:098463de4c5d | 102 | try: |
group-onsemi | 0:098463de4c5d | 103 | debugger = DeviceCMSIS(self.target).debug.replace('-','').upper() |
group-onsemi | 0:098463de4c5d | 104 | except TargetNotSupportedException: |
group-onsemi | 0:098463de4c5d | 105 | debugger = "CMSISDAP" |
group-onsemi | 0:098463de4c5d | 106 | |
group-onsemi | 0:098463de4c5d | 107 | ctx = { |
group-onsemi | 0:098463de4c5d | 108 | 'name': self.project_name, |
group-onsemi | 0:098463de4c5d | 109 | 'groups': self.iar_groups(self.format_src(srcs)), |
group-onsemi | 0:098463de4c5d | 110 | 'linker_script': self.format_file(self.resources.linker_script), |
group-onsemi | 0:098463de4c5d | 111 | 'include_paths': [self.format_file(src) for src in self.resources.inc_dirs], |
group-onsemi | 0:098463de4c5d | 112 | 'device': self.iar_device(), |
group-onsemi | 0:098463de4c5d | 113 | 'ewp': sep+self.project_name + ".ewp", |
group-onsemi | 0:098463de4c5d | 114 | 'debugger': debugger |
group-onsemi | 0:098463de4c5d | 115 | } |
group-onsemi | 0:098463de4c5d | 116 | ctx.update(flags) |
group-onsemi | 0:098463de4c5d | 117 | |
group-onsemi | 0:098463de4c5d | 118 | self.gen_file('iar/eww.tmpl', ctx, self.project_name + ".eww") |
group-onsemi | 0:098463de4c5d | 119 | self.gen_file('iar/ewd.tmpl', ctx, self.project_name + ".ewd") |
group-onsemi | 0:098463de4c5d | 120 | self.gen_file('iar/ewp.tmpl', ctx, self.project_name + ".ewp") |
group-onsemi | 0:098463de4c5d | 121 | |
group-onsemi | 0:098463de4c5d | 122 | @staticmethod |
group-onsemi | 0:098463de4c5d | 123 | def build(project_name, log_name="build_log.txt", cleanup=True): |
group-onsemi | 0:098463de4c5d | 124 | """ Build IAR project """ |
group-onsemi | 0:098463de4c5d | 125 | # > IarBuild [project_path] -build [project_name] |
group-onsemi | 0:098463de4c5d | 126 | proj_file = project_name + ".ewp" |
group-onsemi | 0:098463de4c5d | 127 | cmd = ["IarBuild", proj_file, '-build', project_name] |
group-onsemi | 0:098463de4c5d | 128 | |
group-onsemi | 0:098463de4c5d | 129 | # IAR does not support a '0' option to automatically use all |
group-onsemi | 0:098463de4c5d | 130 | # available CPUs, so we use Python's multiprocessing library |
group-onsemi | 0:098463de4c5d | 131 | # to detect the number of CPUs available |
group-onsemi | 0:098463de4c5d | 132 | cpus_available = cpu_count() |
group-onsemi | 0:098463de4c5d | 133 | jobs = cpus_available if cpus_available else None |
group-onsemi | 0:098463de4c5d | 134 | |
group-onsemi | 0:098463de4c5d | 135 | # Only add the parallel flag if we're using more than one CPU |
group-onsemi | 0:098463de4c5d | 136 | if jobs: |
group-onsemi | 0:098463de4c5d | 137 | cmd += ['-parallel', str(jobs)] |
group-onsemi | 0:098463de4c5d | 138 | |
group-onsemi | 0:098463de4c5d | 139 | # Build the project |
group-onsemi | 0:098463de4c5d | 140 | p = Popen(cmd, stdout=PIPE, stderr=PIPE) |
group-onsemi | 0:098463de4c5d | 141 | out, err = p.communicate() |
group-onsemi | 0:098463de4c5d | 142 | ret_code = p.returncode |
group-onsemi | 0:098463de4c5d | 143 | |
group-onsemi | 0:098463de4c5d | 144 | out_string = "=" * 10 + "STDOUT" + "=" * 10 + "\n" |
group-onsemi | 0:098463de4c5d | 145 | out_string += out |
group-onsemi | 0:098463de4c5d | 146 | out_string += "=" * 10 + "STDERR" + "=" * 10 + "\n" |
group-onsemi | 0:098463de4c5d | 147 | out_string += err |
group-onsemi | 0:098463de4c5d | 148 | |
group-onsemi | 0:098463de4c5d | 149 | if ret_code == 0: |
group-onsemi | 0:098463de4c5d | 150 | out_string += "SUCCESS" |
group-onsemi | 0:098463de4c5d | 151 | else: |
group-onsemi | 0:098463de4c5d | 152 | out_string += "FAILURE" |
group-onsemi | 0:098463de4c5d | 153 | |
group-onsemi | 0:098463de4c5d | 154 | print out_string |
group-onsemi | 0:098463de4c5d | 155 | |
group-onsemi | 0:098463de4c5d | 156 | if log_name: |
group-onsemi | 0:098463de4c5d | 157 | # Write the output to the log file |
group-onsemi | 0:098463de4c5d | 158 | with open(log_name, 'w+') as f: |
group-onsemi | 0:098463de4c5d | 159 | f.write(out_string) |
group-onsemi | 0:098463de4c5d | 160 | |
group-onsemi | 0:098463de4c5d | 161 | # Cleanup the exported and built files |
group-onsemi | 0:098463de4c5d | 162 | if cleanup: |
group-onsemi | 0:098463de4c5d | 163 | os.remove(project_name + ".ewp") |
group-onsemi | 0:098463de4c5d | 164 | os.remove(project_name + ".ewd") |
group-onsemi | 0:098463de4c5d | 165 | os.remove(project_name + ".eww") |
group-onsemi | 0:098463de4c5d | 166 | # legacy output file location |
group-onsemi | 0:098463de4c5d | 167 | if exists('.build'): |
group-onsemi | 0:098463de4c5d | 168 | shutil.rmtree('.build') |
group-onsemi | 0:098463de4c5d | 169 | if exists('BUILD'): |
group-onsemi | 0:098463de4c5d | 170 | shutil.rmtree('BUILD') |
group-onsemi | 0:098463de4c5d | 171 | |
group-onsemi | 0:098463de4c5d | 172 | if ret_code !=0: |
group-onsemi | 0:098463de4c5d | 173 | # Seems like something went wrong. |
group-onsemi | 0:098463de4c5d | 174 | return -1 |
group-onsemi | 0:098463de4c5d | 175 | else: |
group-onsemi | 0:098463de4c5d | 176 | return 0 |