Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Revision 29:1210849dba19, committed 2016-08-29
- Comitter:
- screamer
- Date:
- Mon Aug 29 11:18:36 2016 +0100
- Parent:
- 28:e080013bb94e
- Child:
- 30:f12ce67666d0
- Commit message:
- Port the latest tools patches from https://github.com/ARMmbed/mbed-os
Changed in this revision
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/add_fib.py Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,158 @@
+"""
+@copyright (c) 2012 ON Semiconductor. All rights reserved.
+ON Semiconductor is supplying this software for use with ON Semiconductor
+processor based microcontrollers only.
+THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
+OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
+ON SEMICONDUCTOR SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL,
+INCIDENTAL, OR CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
+"""
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import itertools
+import binascii
+import intelhex
+
+FIB_BASE = 0x2000
+FLASH_BASE = 0x3000
+FW_REV = 0x01000100
+def ranges(i):
+ for _, b in itertools.groupby(enumerate(i), lambda x_y: x_y[1] - x_y[0]):
+ b = list(b)
+ yield b[0][1], b[-1][1]
+
+
+def add_fib_at_start(arginput):
+ input_file = arginput + ".bin"
+ file_name_hex = arginput + "_fib.hex"
+ file_name_bin = arginput + ".bin"
+
+ # Read in hex file
+ input_hex_file = intelhex.IntelHex()
+ input_hex_file.padding = 0x00
+ input_hex_file.loadbin(input_file, offset=FLASH_BASE)
+
+ output_hex_file = intelhex.IntelHex()
+ output_hex_file.padding = 0x00
+
+ # Get the starting and ending address
+ addresses = input_hex_file.addresses()
+ addresses.sort()
+ start_end_pairs = list(ranges(addresses))
+ regions = len(start_end_pairs)
+
+ if regions == 1:
+ start, end = start_end_pairs[0]
+ else:
+ start = min(min(start_end_pairs))
+ end = max(max(start_end_pairs))
+
+ assert start >= FLASH_BASE, ("Error - start 0x%x less than begining of user\
+ flash area" %start)
+ # Compute checksum over the range (don't include data at location of crc)
+ size = end - start + 1
+ data = input_hex_file.tobinarray(start=start, size=size)
+ crc32 = binascii.crc32(data) & 0xFFFFFFFF
+
+ fw_rev = FW_REV
+
+ checksum = (start + size + crc32 + fw_rev) & 0xFFFFFFFF
+
+ print("Writing FIB: base 0x%08X, size 0x%08X, crc32 0x%08X, fw rev 0x%08X,\
+ checksum 0x%08X" % (start, size, crc32, fw_rev, checksum))
+
+#expected initial values used by daplink to validate that it is a valid bin
+#file added as dummy values in this file because the fib area preceeds the
+#application area the bootloader will ignore these dummy values
+# 00 is stack pointer (RAM address)
+# 04 is Reset vector (FLASH address)
+# 08 NMI_Handler (FLASH address)
+# 0C HardFault_Handler(FLASH address)
+# 10 dummy
+ dummy_sp = 0x3FFFFC00
+ dummy_reset_vector = 0x00003625
+ dummy_nmi_handler = 0x00003761
+ dummy_hardfault_handler = 0x00003691
+ dummy_blank = 0x00000000
+
+#expected fib structure
+#typedef struct fib{
+ #uint32_t base; /**< Base offset of firmware, indicating what flash the
+ # firmware is in. (will never be 0x11111111) */
+ #uint32_t size; /**< Size of the firmware */
+ #uint32_t crc; /**< CRC32 for firmware correctness check */
+ #uint32_t rev; /**< Revision number */
+ #uint32_t checksum; /**< Check-sum of information block */
+#}fib_t, *fib_pt;
+
+ fib_start = FIB_BASE
+ dummy_fib_size = 20
+ fib_size = 20
+ user_code_start = FLASH_BASE
+
+ # Write FIB to the file in little endian
+ output_hex_file[fib_start + 0] = (dummy_sp >> 0) & 0xFF
+ output_hex_file[fib_start + 1] = (dummy_sp >> 8) & 0xFF
+ output_hex_file[fib_start + 2] = (dummy_sp >> 16) & 0xFF
+ output_hex_file[fib_start + 3] = (dummy_sp >> 24) & 0xFF
+
+ output_hex_file[fib_start + 4] = (dummy_reset_vector >> 0) & 0xFF
+ output_hex_file[fib_start + 5] = (dummy_reset_vector >> 8) & 0xFF
+ output_hex_file[fib_start + 6] = (dummy_reset_vector >> 16) & 0xFF
+ output_hex_file[fib_start + 7] = (dummy_reset_vector >> 24) & 0xFF
+
+ output_hex_file[fib_start + 8] = (dummy_nmi_handler >> 0) & 0xFF
+ output_hex_file[fib_start + 9] = (dummy_nmi_handler >> 8) & 0xFF
+ output_hex_file[fib_start + 10] = (dummy_nmi_handler >> 16) & 0xFF
+ output_hex_file[fib_start + 11] = (dummy_nmi_handler >> 24) & 0xFF
+
+ output_hex_file[fib_start + 12] = (dummy_hardfault_handler >> 0) & 0xFF
+ output_hex_file[fib_start + 13] = (dummy_hardfault_handler >> 8) & 0xFF
+ output_hex_file[fib_start + 14] = (dummy_hardfault_handler >> 16) & 0xFF
+ output_hex_file[fib_start + 15] = (dummy_hardfault_handler >> 24) & 0xFF
+
+ output_hex_file[fib_start + 16] = (dummy_blank >> 0) & 0xFF
+ output_hex_file[fib_start + 17] = (dummy_blank >> 8) & 0xFF
+ output_hex_file[fib_start + 18] = (dummy_blank >> 16) & 0xFF
+ output_hex_file[fib_start + 19] = (dummy_blank >> 24) & 0xFF
+
+ # Write FIB to the file in little endian
+ output_hex_file[fib_start + 20] = (start >> 0) & 0xFF
+ output_hex_file[fib_start + 21] = (start >> 8) & 0xFF
+ output_hex_file[fib_start + 22] = (start >> 16) & 0xFF
+ output_hex_file[fib_start + 23] = (start >> 24) & 0xFF
+
+ output_hex_file[fib_start + 24] = (size >> 0) & 0xFF
+ output_hex_file[fib_start + 25] = (size >> 8) & 0xFF
+ output_hex_file[fib_start + 26] = (size >> 16) & 0xFF
+ output_hex_file[fib_start + 27] = (size >> 24) & 0xFF
+
+ output_hex_file[fib_start + 28] = (crc32 >> 0) & 0xFF
+ output_hex_file[fib_start + 29] = (crc32 >> 8) & 0xFF
+ output_hex_file[fib_start + 30] = (crc32 >> 16) & 0xFF
+ output_hex_file[fib_start + 31] = (crc32 >> 24) & 0xFF
+
+ output_hex_file[fib_start + 32] = (fw_rev >> 0) & 0xFF
+ output_hex_file[fib_start + 33] = (fw_rev >> 8) & 0xFF
+ output_hex_file[fib_start + 34] = (fw_rev >> 16) & 0xFF
+ output_hex_file[fib_start + 35] = (fw_rev >> 24) & 0xFF
+
+ output_hex_file[fib_start + 36] = (checksum >> 0) & 0xFF
+ output_hex_file[fib_start + 37] = (checksum >> 8) & 0xFF
+ output_hex_file[fib_start + 38] = (checksum >> 16) & 0xFF
+ output_hex_file[fib_start + 39] = (checksum >> 24) & 0xFF
+
+ #pad the rest of the file
+ for i in range(fib_start + dummy_fib_size + fib_size, user_code_start):
+ output_hex_file[i] = 0xFF
+
+ #merge two hex files
+ output_hex_file.merge(input_hex_file, overlap='error')
+
+ # Write out file(s)
+ output_hex_file.tofile(file_name_hex, 'hex')
+ output_hex_file.tofile(file_name_bin, 'bin')
+
\ No newline at end of file
--- a/build_api.py Mon Aug 29 10:55:42 2016 +0100
+++ b/build_api.py Mon Aug 29 11:18:36 2016 +0100
@@ -17,18 +17,16 @@
import re
import tempfile
-import colorama
-
-from copy import copy
from types import ListType
from shutil import rmtree
-from os.path import join, exists, basename, abspath, normpath
-from os import getcwd, walk, linesep
+from os.path import join, exists, basename, abspath, normpath, dirname
+from os import linesep, remove
from time import time
-import fnmatch
-from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException, InvalidReleaseTargetException
-from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
+from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException,\
+ ToolException, InvalidReleaseTargetException
+from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL,\
+ MBED_COMMON, MBED_CONFIG_FILE
from tools.targets import TARGET_NAMES, TARGET_MAP, set_targets_json_location
from tools.libraries import Library
from tools.toolchains import TOOLCHAIN_CLASSES, mbedToolchain
@@ -40,7 +38,14 @@
RELEASE_VERSIONS = ['2', '5']
def prep_report(report, target_name, toolchain_name, id_name):
- # Setup report keys
+ """Setup report keys
+
+ Positional arguments:
+ report - the report to fill
+ target_name - the target being used
+ toolchain_name - the toolchain being used
+ id_name - the name of the executable or library being built
+ """
if not target_name in report:
report[target_name] = {}
@@ -51,7 +56,14 @@
report[target_name][toolchain_name][id_name] = []
def prep_properties(properties, target_name, toolchain_name, vendor_label):
- # Setup test properties
+ """Setup test properties
+
+ Positional arguments:
+ properties - the dict to fill
+ target_name - the target the test is targeting
+ toolchain_name - the toolchain that will compile the test
+ vendor_label - the vendor
+ """
if not target_name in properties:
properties[target_name] = {}
@@ -63,6 +75,14 @@
properties[target_name][toolchain_name]["toolchain"] = toolchain_name
def create_result(target_name, toolchain_name, id_name, description):
+ """Create a result dictionary
+
+ Positional arguments:
+ target_name - the target being built for
+ toolchain_name - the toolchain doing the building
+ id_name - the name of the executable or library being built
+ description - a human readable description of what's going on
+ """
cur_result = {}
cur_result["target_name"] = target_name
cur_result["toolchain_name"] = toolchain_name
@@ -74,13 +94,26 @@
return cur_result
def add_result_to_report(report, result):
+ """Add a single result to a report dictionary
+
+ Positional arguments:
+ report - the report to append to
+ result - the result to append
+ """
target = result["target_name"]
toolchain = result["toolchain_name"]
id_name = result['id']
- result_wrap = { 0: result }
+ result_wrap = {0: result}
report[target][toolchain][id_name].append(result_wrap)
def get_config(src_paths, target, toolchain_name):
+ """Get the configuration object for a target-toolchain combination
+
+ Positional arguments:
+ src_paths - paths to scan for the configuration files
+ target - the device we are building for
+ toolchain_name - the string that identifies the build tools
+ """
# Convert src_paths to a list if needed
if type(src_paths) != ListType:
src_paths = [src_paths]
@@ -120,27 +153,33 @@
given version. Return False, 'reason' if a target is not part of the
official release for the given version.
- target_name: Name if the target (ex. 'K64F')
- version: The release version string. Should be a string contained within RELEASE_VERSIONS
+ Positional arguments:
+ target_name - Name if the target (ex. 'K64F')
+ version - The release version string. Should be a string contained within
+ RELEASE_VERSIONS
"""
-
+
result = True
reason = None
target = TARGET_MAP[target_name]
-
- if hasattr(target, 'release_versions') and version in target.release_versions:
+
+ if hasattr(target, 'release_versions') \
+ and version in target.release_versions:
if version == '2':
# For version 2, either ARM or uARM toolchain support is required
required_toolchains = set(['ARM', 'uARM'])
-
- if not len(required_toolchains.intersection(set(target.supported_toolchains))) > 0:
- result = False
+
+ if not len(required_toolchains.intersection(
+ set(target.supported_toolchains))) > 0:
+ result = False
reason = ("Target '%s' must support " % target.name) + \
- ("one of the folowing toolchains to be included in the mbed 2.0 ") + \
- (("official release: %s" + linesep) % ", ".join(required_toolchains)) + \
+ ("one of the folowing toolchains to be included in the") + \
+ ((" mbed 2.0 official release: %s" + linesep) %
+ ", ".join(required_toolchains)) + \
("Currently it is only configured to support the ") + \
- ("following toolchains: %s" % ", ".join(target.supported_toolchains))
-
+ ("following toolchains: %s" %
+ ", ".join(target.supported_toolchains))
+
elif version == '5':
# For version 5, ARM, GCC_ARM, and IAR toolchain support is required
required_toolchains = set(['ARM', 'GCC_ARM', 'IAR'])
@@ -149,45 +188,52 @@
supported_toolchains = set(target.supported_toolchains)
supported_toolchains_sorted = list(supported_toolchains)
supported_toolchains_sorted.sort()
-
+
if not required_toolchains.issubset(supported_toolchains):
result = False
reason = ("Target '%s' must support " % target.name) + \
- ("ALL of the folowing toolchains to be included in the mbed OS 5.0 ") + \
- (("official release: %s" + linesep) % ", ".join(required_toolchains_sorted)) + \
+ ("ALL of the folowing toolchains to be included in the") + \
+ ((" mbed OS 5.0 official release: %s" + linesep) %
+ ", ".join(required_toolchains_sorted)) + \
("Currently it is only configured to support the ") + \
- ("following toolchains: %s" % ", ".join(supported_toolchains_sorted))
+ ("following toolchains: %s" %
+ ", ".join(supported_toolchains_sorted))
- elif not target.default_build == 'standard':
+ elif not target.default_lib == 'std':
result = False
- reason = ("Target '%s' must set the 'default_build' " % target.name) + \
- ("to 'standard' to be included in the mbed OS 5.0 ") + \
- ("official release." + linesep) + \
- ("Currently it is set to '%s'" % target.default_build)
+ reason = ("Target '%s' must set the " % target.name) + \
+ ("'default_lib' to 'std' to be included in the ") + \
+ ("mbed OS 5.0 official release." + linesep) + \
+ ("Currently it is set to '%s'" % target.default_lib)
else:
result = False
- reason = ("Target '%s' has set an invalid release version of '%s'" % version) + \
- ("Please choose from the following release versions: %s" + ', '.join(RELEASE_VERSIONS))
+ reason = ("Target '%s' has set an invalid release version of '%s'" %
+ version) + \
+ ("Please choose from the following release versions: %s" %
+ ', '.join(RELEASE_VERSIONS))
else:
result = False
if not hasattr(target, 'release_versions'):
- reason = "Target '%s' does not have the 'release_versions' key set" % target.name
+ reason = "Target '%s' " % target.name
+ reason += "does not have the 'release_versions' key set"
elif not version in target.release_versions:
- reason = "Target '%s' does not contain the version '%s' in its 'release_versions' key" % (target.name, version)
-
+ reason = "Target '%s' does not contain the version '%s' " % \
+ (target.name, version)
+ reason += "in its 'release_versions' key"
+
return result, reason
def transform_release_toolchains(toolchains, version):
""" Given a list of toolchains and a release version, return a list of
only the supported toolchains for that release
- toolchains: The list of toolchains
- version: The release version string. Should be a string contained within RELEASE_VERSIONS
+ Positional arguments:
+ toolchains - The list of toolchains
+ version - The release version string. Should be a string contained within
+ RELEASE_VERSIONS
"""
- toolchains_set = set(toolchains)
-
if version == '5':
return ['ARM', 'GCC_ARM', 'IAR']
else:
@@ -197,41 +243,58 @@
def get_mbed_official_release(version):
""" Given a release version string, return a tuple that contains a target
and the supported toolchains for that release.
- Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')), ('K64F', ('ARM', 'GCC_ARM')), ...)
+ Ex. Given '2', return (('LPC1768', ('ARM', 'GCC_ARM')),
+ ('K64F', ('ARM', 'GCC_ARM')), ...)
- version: The version string. Should be a string contained within RELEASE_VERSIONS
+ Positional arguments:
+ version - The version string. Should be a string contained within
+ RELEASE_VERSIONS
"""
- MBED_OFFICIAL_RELEASE = (
+ mbed_official_release = (
tuple(
tuple(
[
TARGET_MAP[target].name,
- tuple(transform_release_toolchains(TARGET_MAP[target].supported_toolchains, version))
+ tuple(transform_release_toolchains(
+ TARGET_MAP[target].supported_toolchains, version))
]
- ) for target in TARGET_NAMES if (hasattr(TARGET_MAP[target], 'release_versions') and version in TARGET_MAP[target].release_versions)
+ ) for target in TARGET_NAMES \
+ if (hasattr(TARGET_MAP[target], 'release_versions')
+ and version in TARGET_MAP[target].release_versions)
)
)
-
- for target in MBED_OFFICIAL_RELEASE:
+
+ for target in mbed_official_release:
is_official, reason = is_official_target(target[0], version)
-
+
if not is_official:
raise InvalidReleaseTargetException(reason)
-
- return MBED_OFFICIAL_RELEASE
+
+ return mbed_official_release
def prepare_toolchain(src_paths, target, toolchain_name,
- macros=None, options=None, clean=False, jobs=1,
- notify=None, silent=False, verbose=False, extra_verbose=False, config=None):
+ macros=None, options=None, clean=False, jobs=1,
+ notify=None, silent=False, verbose=False,
+ extra_verbose=False, config=None):
""" Prepares resource related objects - toolchain, target, config
- src_paths: the paths to source directories
- target: ['LPC1768', 'LPC11U24', 'LPC2368']
- toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
- clean: Rebuild everything if True
- notify: Notify function for logs
- verbose: Write the actual tools command lines if True
+
+ Positional arguments:
+ src_paths - the paths to source directories
+ target - ['LPC1768', 'LPC11U24', 'LPC2368', etc.]
+ toolchain_name - ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
+
+ Keyword arguments:
+ macros - additional macros
+ options - general compiler options like debug-symbols or small-build
+ clean - Rebuild everything if True
+ jobs - how many compilers we can run at once
+ notify - Notify function for logs
+ silent - suppress printing of progress indicators
+ verbose - Write the actual tools command lines used if True
+ extra_verbose - even more output!
+ config - a Config object to use instead of creating one
"""
# We need to remove all paths which are repeated to avoid
@@ -250,8 +313,10 @@
# Toolchain instance
try:
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
- except KeyError as e:
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+ target, options, notify, macros, silent,
+ extra_verbose=extra_verbose)
+ except KeyError:
raise KeyError("Toolchain %s not supported" % toolchain_name)
toolchain.config = config
@@ -261,12 +326,16 @@
return toolchain
-def scan_resources(src_paths, toolchain, dependencies_paths=None, inc_dirs=None):
+def scan_resources(src_paths, toolchain, dependencies_paths=None,
+ inc_dirs=None):
""" Scan resources using initialized toolcain
- src_paths: the paths to source directories
- toolchain: valid toolchain object
- dependencies_paths: dependency paths that we should scan for include dirs
- inc_dirs: additional include directories which should be added to thescanner resources
+
+ Positional arguments
+ src_paths - the paths to source directories
+ toolchain - valid toolchain object
+ dependencies_paths - dependency paths that we should scan for include dirs
+ inc_dirs - additional include directories which should be added to
+ the scanner resources
"""
# Scan src_path
@@ -287,7 +356,8 @@
else:
resources.inc_dirs.append(inc_dirs)
- # Load resources into the config system which might expand/modify resources based on config data
+ # Load resources into the config system which might expand/modify resources
+ # based on config data
resources = toolchain.config.load_resources(resources)
# Set the toolchain's configuration data
@@ -296,11 +366,38 @@
return resources
def build_project(src_paths, build_path, target, toolchain_name,
- libraries_paths=None, options=None, linker_script=None,
- clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
- jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None,
- extra_verbose=False, config=None):
- """ This function builds project. Project can be for example one test / UT
+ libraries_paths=None, options=None, linker_script=None,
+ clean=False, notify=None, verbose=False, name=None,
+ macros=None, inc_dirs=None, jobs=1, silent=False,
+ report=None, properties=None, project_id=None,
+ project_description=None, extra_verbose=False, config=None):
+ """ Build a project. A project may be a test or a user program.
+
+ Positional arguments:
+ src_paths - a path or list of paths that contain all files needed to build
+ the project
+ build_path - the directory where all of the object files will be placed
+ target - the MCU or board that the project will compile for
+ toolchain_name - the name of the build tools
+
+ Keyword arguments:
+ libraries_paths - The location of libraries to include when linking
+ options - general compiler options like debug-symbols or small-build
+ linker_script - the file that drives the linker to do it's job
+ clean - Rebuild everything if True
+ notify - Notify function for logs
+ verbose - Write the actual tools command lines used if True
+ name - the name of the project
+ macros - additional macros
+ inc_dirs - additional directories where include files may be found
+ jobs - how many compilers we can run at once
+ silent - suppress printing of progress indicators
+ report - a dict where a result may be appended
+ properties - UUUUHHHHH beats me
+ project_id - the name put in the report
+ project_description - the human-readable version of what this thing does
+ extra_verbose - even more output!
+ config - a Config object to use instead of creating one
"""
# Convert src_path to a list if needed
@@ -311,9 +408,8 @@
src_paths.extend(libraries_paths)
# Build Directory
- if clean:
- if exists(build_path):
- rmtree(build_path)
+ if clean and exists(build_path):
+ rmtree(build_path)
mkdir(build_path)
@@ -327,7 +423,7 @@
for path in src_paths:
profile = find_build_profile(path) or profile
if profile:
- targets_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'legacy_targets.json')
+ targets_json = join(dirname(abspath(__file__)), 'legacy_targets.json')
else:
targets_json = find_targets_json(path) or targets_json
@@ -355,14 +451,16 @@
mbedToolchain.init = init_hook
# Pass all params to the unified prepare_toolchain()
- toolchain = prepare_toolchain(src_paths, target, toolchain_name,
- macros=macros, options=options, clean=clean, jobs=jobs,
- notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config)
+ toolchain = prepare_toolchain(
+ src_paths, target, toolchain_name, macros=macros, options=options,
+ clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
+ extra_verbose=extra_verbose, config=config)
# The first path will give the name to the library
if name is None:
name = basename(normpath(abspath(src_paths[0])))
- toolchain.info("Building project %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
+ toolchain.info("Building project %s (%s, %s)" %
+ (name, toolchain.target.name, toolchain_name))
# Initialize reporting
if report != None:
@@ -372,9 +470,11 @@
description = project_description if project_description else name
vendor_label = toolchain.target.extra_labels[0]
prep_report(report, toolchain.target.name, toolchain_name, id_name)
- cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
+ cur_result = create_result(toolchain.target.name, toolchain_name,
+ id_name, description)
if properties != None:
- prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
+ prep_properties(properties, toolchain.target.name, toolchain_name,
+ vendor_label)
try:
# Call unified scan_resources
@@ -385,7 +485,8 @@
resources.linker_script = linker_script
# Compile Sources
- objects = toolchain.compile_sources(resources, build_path, resources.inc_dirs)
+ objects = toolchain.compile_sources(resources, build_path,
+ resources.inc_dirs)
resources.objects.extend(objects)
# Link Program
@@ -402,11 +503,11 @@
return res
- except Exception, e:
+ except Exception as exc:
if report != None:
end = time()
- if isinstance(e, NotSupportedException):
+ if isinstance(exc, NotSupportedException):
cur_result["result"] = "NOT_SUPPORTED"
else:
cur_result["result"] = "FAIL"
@@ -423,19 +524,37 @@
raise
def build_library(src_paths, build_path, target, toolchain_name,
- dependencies_paths=None, options=None, name=None, clean=False, archive=True,
- notify=None, verbose=False, macros=None, inc_dirs=None,
- jobs=1, silent=False, report=None, properties=None, extra_verbose=False,
- project_id=None):
- """ Prepares resource related objects - toolchain, target, config
- src_paths: the paths to source directories
- build_path: the path of the build directory
- target: ['LPC1768', 'LPC11U24', 'LPC2368']
- toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
- clean: Rebuild everything if True
- notify: Notify function for logs
- verbose: Write the actual tools command lines if True
- inc_dirs: additional include directories which should be included in build
+ dependencies_paths=None, options=None, name=None, clean=False,
+ archive=True, notify=None, verbose=False, macros=None,
+ inc_dirs=None, jobs=1, silent=False, report=None,
+ properties=None, extra_verbose=False, project_id=None,
+ remove_config_header_file=False):
+ """ Build a library
+
+ Positional arguments:
+ src_paths - a path or list of paths that contain all files needed to build
+ the library
+ build_path - the directory where all of the object files will be placed
+ target - the MCU or board that the project will compile for
+ toolchain_name - the name of the build tools
+
+ Keyword arguments:
+ dependencies_paths - The location of libraries to include when linking
+ options - general compiler options like debug-symbols or small-build
+ name - the name of the library
+ clean - Rebuild everything if True
+ archive - whether the library will create an archive file
+ notify - Notify function for logs
+ verbose - Write the actual tools command lines used if True
+ macros - additional macros
+ inc_dirs - additional directories where include files may be found
+ jobs - how many compilers we can run at once
+ silent - suppress printing of progress indicators
+ report - a dict where a result may be appended
+ properties - UUUUHHHHH beats me
+ extra_verbose - even more output!
+ project_id - the name that goes in the report
+ remove_config_header_file - delete config header file when done building
"""
# Convert src_path to a list if needed
@@ -450,15 +569,22 @@
else:
tmp_path = build_path
+ # Clean the build directory
+ if clean and exists(tmp_path):
+ rmtree(tmp_path)
+ mkdir(tmp_path)
+
# Pass all params to the unified prepare_toolchain()
- toolchain = prepare_toolchain(src_paths, target, toolchain_name,
- macros=macros, options=options, clean=clean, jobs=jobs,
- notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose)
+ toolchain = prepare_toolchain(
+ src_paths, target, toolchain_name, macros=macros, options=options,
+ clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose,
+ extra_verbose=extra_verbose)
# The first path will give the name to the library
if name is None:
name = basename(normpath(abspath(src_paths[0])))
- toolchain.info("Building library %s (%s, %s)" % (name, toolchain.target.name, toolchain_name))
+ toolchain.info("Building library %s (%s, %s)" %
+ (name, toolchain.target.name, toolchain_name))
# Initialize reporting
if report != None:
@@ -468,9 +594,11 @@
description = name
vendor_label = toolchain.target.extra_labels[0]
prep_report(report, toolchain.target.name, toolchain_name, id_name)
- cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description)
+ cur_result = create_result(toolchain.target.name, toolchain_name,
+ id_name, description)
if properties != None:
- prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label)
+ prep_properties(properties, toolchain.target.name, toolchain_name,
+ vendor_label)
for src_path in src_paths:
if not exists(src_path):
@@ -483,42 +611,57 @@
try:
# Call unified scan_resources
- resources = scan_resources(src_paths, toolchain, dependencies_paths=dependencies_paths, inc_dirs=inc_dirs)
+ resources = scan_resources(src_paths, toolchain,
+ dependencies_paths=dependencies_paths,
+ inc_dirs=inc_dirs)
- # Copy headers, objects and static libraries - all files needed for static lib
+ # Copy headers, objects and static libraries - all files needed for
+ # static lib
toolchain.copy_files(resources.headers, build_path, resources=resources)
toolchain.copy_files(resources.objects, build_path, resources=resources)
- toolchain.copy_files(resources.libraries, build_path, resources=resources)
+ toolchain.copy_files(resources.libraries, build_path,
+ resources=resources)
+ toolchain.copy_files(resources.json_files, build_path,
+ resources=resources)
if resources.linker_script:
- toolchain.copy_files(resources.linker_script, build_path, resources=resources)
+ toolchain.copy_files(resources.linker_script, build_path,
+ resources=resources)
if resources.hex_files:
- toolchain.copy_files(resources.hex_files, build_path, resources=resources)
+ toolchain.copy_files(resources.hex_files, build_path,
+ resources=resources)
# Compile Sources
- objects = toolchain.compile_sources(resources, abspath(tmp_path), resources.inc_dirs)
+ objects = toolchain.compile_sources(resources, abspath(tmp_path),
+ resources.inc_dirs)
resources.objects.extend(objects)
if archive:
toolchain.build_library(objects, build_path, name)
+ if remove_config_header_file:
+ config_header_path = toolchain.get_config_header()
+ if config_header_path:
+ remove(config_header_path)
+
if report != None:
end = time()
cur_result["elapsed_time"] = end - start
cur_result["output"] = toolchain.get_output()
cur_result["result"] = "OK"
+
add_result_to_report(report, cur_result)
return True
- except Exception, e:
+ except Exception as exc:
if report != None:
end = time()
- if isinstance(e, ToolException):
+ if isinstance(exc, ToolException):
cur_result["result"] = "FAIL"
- elif isinstance(e, NotSupportedException):
+ elif isinstance(exc, NotSupportedException):
cur_result["result"] = "NOT_SUPPORTED"
cur_result["elapsed_time"] = end - start
@@ -530,27 +673,47 @@
add_result_to_report(report, cur_result)
# Let Exception propagate
- raise e
+ raise
######################
### Legacy methods ###
######################
-def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+def build_lib(lib_id, target, toolchain_name, options=None, verbose=False,
+ clean=False, macros=None, notify=None, jobs=1, silent=False,
+ report=None, properties=None, extra_verbose=False):
""" Legacy method for building mbed libraries
- Function builds library in proper directory using all dependencies and macros defined by user.
+
+ Positional arguments:
+ lib_id - the library's unique identifier
+ target - the MCU or board that the project will compile for
+ toolchain_name - the name of the build tools
+
+ Keyword arguments:
+ options - general compiler options like debug-symbols or small-build
+ clean - Rebuild everything if True
+ verbose - Write the actual tools command lines used if True
+ macros - additional macros
+ notify - Notify function for logs
+ jobs - how many compilers we can run at once
+ silent - suppress printing of progress indicators
+ report - a dict where a result may be appended
+ properties - UUUUHHHHH beats me
+ extra_verbose - even more output!
"""
lib = Library(lib_id)
if not lib.is_supported(target, toolchain_name):
- print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
+ print('Library "%s" is not yet supported on target %s with toolchain %s'
+ % (lib_id, target.name, toolchain_name))
return False
- # We need to combine macros from parameter list with macros from library definition
- MACROS = lib.macros if lib.macros else []
+ # We need to combine macros from parameter list with macros from library
+ # definition
+ lib_macros = lib.macros if lib.macros else []
if macros:
- macros.extend(MACROS)
+ macros.extend(lib_macros)
else:
- macros = MACROS
+ macros = lib_macros
src_paths = lib.source_dir
build_path = lib.build_dir
@@ -558,17 +721,6 @@
inc_dirs = lib.inc_dirs
inc_dirs_ext = lib.inc_dirs_ext
- """ src_path: the path of the source directory
- build_path: the path of the build directory
- target: ['LPC1768', 'LPC11U24', 'LPC2368']
- toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
- library_paths: List of paths to additional libraries
- clean: Rebuild everything if True
- notify: Notify function for logs
- verbose: Write the actual tools command lines if True
- inc_dirs: additional include directories which should be included in build
- inc_dirs_ext: additional include directories which should be copied to library directory
- """
if type(src_paths) != ListType:
src_paths = [src_paths]
@@ -582,10 +734,12 @@
vendor_label = target.extra_labels[0]
cur_result = None
prep_report(report, target.name, toolchain_name, id_name)
- cur_result = create_result(target.name, toolchain_name, id_name, description)
+ cur_result = create_result(target.name, toolchain_name, id_name,
+ description)
if properties != None:
- prep_properties(properties, target.name, toolchain_name, vendor_label)
+ prep_properties(properties, target.name, toolchain_name,
+ vendor_label)
for src_path in src_paths:
if not exists(src_path):
@@ -600,12 +754,20 @@
try:
# Toolchain instance
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+ target, options, macros=macros, notify=notify, silent=silent,
+ extra_verbose=extra_verbose)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
- toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+ toolchain.info("Building library %s (%s, %s)" %
+ (name.upper(), target.name, toolchain_name))
+
+ # Take into account the library configuration (MBED_CONFIG_FILE)
+ config = Config(target)
+ toolchain.config = config
+ config.add_config_files([MBED_CONFIG_FILE])
# Scan Resources
resources = []
@@ -629,6 +791,11 @@
if inc_dirs:
dependencies_include_dir.extend(inc_dirs)
+ # Add other discovered configuration data to the configuration object
+ for res in resources:
+ config.load_resources(res)
+ toolchain.set_config_data(toolchain.config.get_config_data())
+
# Create the desired build directory structure
bin_path = join(build_path, toolchain.obj_path)
mkdir(bin_path)
@@ -637,14 +804,17 @@
# Copy Headers
for resource in resources:
- toolchain.copy_files(resource.headers, build_path, resources=resource)
+ toolchain.copy_files(resource.headers, build_path,
+ resources=resource)
- dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+ dependencies_include_dir.extend(
+ toolchain.scan_resources(build_path).inc_dirs)
# Compile Sources
objects = []
for resource in resources:
- objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
+ objects.extend(toolchain.compile_sources(resource, tmp_path,
+ dependencies_include_dir))
needed_update = toolchain.build_library(objects, bin_path, name)
@@ -657,7 +827,7 @@
add_result_to_report(report, cur_result)
return True
- except Exception, e:
+ except Exception:
if report != None:
end = time()
cur_result["result"] = "FAIL"
@@ -670,11 +840,32 @@
add_result_to_report(report, cur_result)
# Let Exception propagate
- raise e
+ raise
+
+# We do have unique legacy conventions about how we build and package the mbed
+# library
+def build_mbed_libs(target, toolchain_name, options=None, verbose=False,
+ clean=False, macros=None, notify=None, jobs=1, silent=False,
+ report=None, properties=None, extra_verbose=False):
+ """ Function returns True is library was built and false if building was
+ skipped
-# We do have unique legacy conventions about how we build and package the mbed library
-def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
- """ Function returns True is library was built and false if building was skipped """
+ Positional arguments:
+ target - the MCU or board that the project will compile for
+ toolchain_name - the name of the build tools
+
+ Keyword arguments:
+ options - general compiler options like debug-symbols or small-build
+ verbose - Write the actual tools command lines used if True
+ clean - Rebuild everything if True
+ macros - additional macros
+ notify - Notify function for logs
+ jobs - how many compilers we can run at once
+ silent - suppress printing of progress indicators
+ report - a dict where a result may be appended
+ properties - UUUUHHHHH beats me
+ extra_verbose - even more output!
+ """
if report != None:
start = time()
@@ -683,16 +874,21 @@
vendor_label = target.extra_labels[0]
cur_result = None
prep_report(report, target.name, toolchain_name, id_name)
- cur_result = create_result(target.name, toolchain_name, id_name, description)
+ cur_result = create_result(target.name, toolchain_name, id_name,
+ description)
if properties != None:
- prep_properties(properties, target.name, toolchain_name, vendor_label)
+ prep_properties(properties, target.name, toolchain_name,
+ vendor_label)
# Check toolchain support
if toolchain_name not in target.supported_toolchains:
supported_toolchains_text = ", ".join(target.supported_toolchains)
- print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
- print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
+ print('%s target is not yet supported by toolchain %s' %
+ (target.name, toolchain_name))
+ print('%s target supports %s toolchain%s' %
+ (target.name, supported_toolchains_text, 's'
+ if len(target.supported_toolchains) > 1 else ''))
if report != None:
cur_result["result"] = "SKIP"
@@ -702,69 +898,89 @@
try:
# Toolchain
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](
+ target, options, macros=macros, notify=notify, silent=silent,
+ extra_verbose=extra_verbose)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
+ # Take into account the library configuration (MBED_CONFIG_FILE)
+ config = Config(target)
+ toolchain.config = config
+ config.add_config_files([MBED_CONFIG_FILE])
+ toolchain.set_config_data(toolchain.config.get_config_data())
+
# Source and Build Paths
- BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
- BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
- mkdir(BUILD_TOOLCHAIN)
+ build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
+ build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
+ mkdir(build_toolchain)
- TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
- mkdir(TMP_PATH)
+ tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+ mkdir(tmp_path)
# CMSIS
- toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
+ toolchain.info("Building library %s (%s, %s)" %
+ ('CMSIS', target.name, toolchain_name))
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
resources = toolchain.scan_resources(cmsis_src)
- toolchain.copy_files(resources.headers, BUILD_TARGET)
- toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
- toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
+ toolchain.copy_files(resources.headers, build_target)
+ toolchain.copy_files(resources.linker_script, build_toolchain)
+ toolchain.copy_files(resources.bin_files, build_toolchain)
- objects = toolchain.compile_sources(resources, TMP_PATH)
- toolchain.copy_files(objects, BUILD_TOOLCHAIN)
+ objects = toolchain.compile_sources(resources, tmp_path)
+ toolchain.copy_files(objects, build_toolchain)
# mbed
- toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+ toolchain.info("Building library %s (%s, %s)" %
+ ('MBED', target.name, toolchain_name))
# Common Headers
- toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
- toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_API).headers,
+ MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
+ MBED_LIBRARIES)
# Target specific sources
- HAL_SRC = join(MBED_TARGETS_PATH, "hal")
- hal_implementation = toolchain.scan_resources(HAL_SRC)
- toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, resources=hal_implementation)
- incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
- objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+ hal_src = join(MBED_TARGETS_PATH, "hal")
+ hal_implementation = toolchain.scan_resources(hal_src)
+ toolchain.copy_files(hal_implementation.headers +
+ hal_implementation.hex_files +
+ hal_implementation.libraries,
+ build_target, resources=hal_implementation)
+ incdirs = toolchain.scan_resources(build_target).inc_dirs
+ objects = toolchain.compile_sources(hal_implementation, tmp_path,
+ [MBED_LIBRARIES] + incdirs)
# Common Sources
mbed_resources = toolchain.scan_resources(MBED_COMMON)
- objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+ objects += toolchain.compile_sources(mbed_resources, tmp_path,
+ [MBED_LIBRARIES] + incdirs)
# A number of compiled files need to be copied as objects as opposed to
- # being part of the mbed library, for reasons that have to do with the way
- # the linker search for symbols in archives. These are:
- # - retarget.o: to make sure that the C standard lib symbols get overridden
+ # being part of the mbed library, for reasons that have to do with the
+ # way the linker search for symbols in archives. These are:
+ # - retarget.o: to make sure that the C standard lib symbols get
+ # overridden
# - board.o: mbed_die is weak
- # - mbed_overrides.o: this contains platform overrides of various weak SDK functions
- separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
+ # - mbed_overrides.o: this contains platform overrides of various
+ # weak SDK functions
+ separate_names, separate_objects = ['retarget.o', 'board.o',
+ 'mbed_overrides.o'], []
- for o in objects:
+ for obj in objects:
for name in separate_names:
- if o.endswith(name):
- separate_objects.append(o)
+ if obj.endswith(name):
+ separate_objects.append(obj)
- for o in separate_objects:
- objects.remove(o)
+ for obj in separate_objects:
+ objects.remove(obj)
- toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
+ toolchain.build_library(objects, build_toolchain, "mbed")
- for o in separate_objects:
- toolchain.copy_files(o, BUILD_TOOLCHAIN)
+ for obj in separate_objects:
+ toolchain.copy_files(obj, build_toolchain)
if report != None:
end = time()
@@ -776,7 +992,7 @@
return True
- except Exception, e:
+ except Exception as exc:
if report != None:
end = time()
cur_result["result"] = "FAIL"
@@ -786,18 +1002,21 @@
if toolchain_output:
cur_result["output"] += toolchain_output
- cur_result["output"] += str(e)
+ cur_result["output"] += str(exc)
add_result_to_report(report, cur_result)
# Let Exception propagate
- raise e
+ raise
def get_unique_supported_toolchains(release_targets=None):
""" Get list of all unique toolchains supported by targets
- If release_targets is not specified, then it queries all known targets
- release_targets: tuple structure returned from get_mbed_official_release()
+
+ Keyword arguments:
+ release_targets - tuple structure returned from get_mbed_official_release().
+ If release_targets is not specified, then it queries all
+ known targets
"""
unique_supported_toolchains = []
@@ -815,9 +1034,17 @@
return unique_supported_toolchains
-def mcu_toolchain_matrix(verbose_html=False, platform_filter=None, release_version='5'):
- """ Shows target map using prettytable """
- from prettytable import PrettyTable # Only use it in this function so building works without extra modules
+def mcu_toolchain_matrix(verbose_html=False, platform_filter=None,
+ release_version='5'):
+ """ Shows target map using prettytable
+
+ Keyword arguments:
+ verbose_html - emit html instead of a simple table
+ platform_filter - remove results that match the string
+ release_version - get the matrix for this major version number
+ """
+ # Only use it in this function so building works without extra modules
+ from prettytable import PrettyTable
if isinstance(release_version, basestring):
# Force release_version to lowercase if it is a string
@@ -832,23 +1059,26 @@
for version in RELEASE_VERSIONS:
version_release_targets[version] = get_mbed_official_release(version)
- version_release_target_names[version] = [x[0] for x in version_release_targets[version]]
+ version_release_target_names[version] = [x[0] for x in
+ version_release_targets[
+ version]]
if release_version in RELEASE_VERSIONS:
release_targets = version_release_targets[release_version]
else:
release_targets = None
- unique_supported_toolchains = get_unique_supported_toolchains(release_targets)
+ unique_supported_toolchains = get_unique_supported_toolchains(
+ release_targets)
prepend_columns = ["Target"] + ["mbed OS %s" % x for x in RELEASE_VERSIONS]
# All tests status table print
columns = prepend_columns + unique_supported_toolchains
- pt = PrettyTable(columns)
+ table_printer = PrettyTable(columns)
# Align table
for col in columns:
- pt.align[col] = "c"
- pt.align["Target"] = "l"
+ table_printer.align[col] = "c"
+ table_printer.align["Target"] = "l"
perm_counter = 0
target_counter = 0
@@ -884,9 +1114,10 @@
text = "-"
row.append(text)
- pt.add_row(row)
+ table_printer.add_row(row)
- result = pt.get_html_string() if verbose_html else pt.get_string()
+ result = table_printer.get_html_string() if verbose_html \
+ else table_printer.get_string()
result += "\n"
result += "Supported targets: %d\n"% (target_counter)
if target_counter == 1:
@@ -895,57 +1126,87 @@
def get_target_supported_toolchains(target):
- """ Returns target supported toolchains list """
- return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
+ """ Returns target supported toolchains list
+
+ Positional arguments:
+ target - the target to get the supported toolchains of
+ """
+ return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP \
+ else None
-def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+def static_analysis_scan(target, toolchain_name, cppcheck_cmd,
+ cppcheck_msg_format, options=None, verbose=False,
+ clean=False, macros=None, notify=None, jobs=1,
+ extra_verbose=False):
+ """Perform static analysis on a target and toolchain combination
+
+ Positional arguments:
+ target - the target to fake the build for
+ toolchain_name - pretend you would compile with this toolchain
+ cppcheck_cmd - the command used to do static analysis
+ cppcheck_msg_format - the format of the check messages
+
+ Keyword arguments:
+ options - things like debug-symbols, or small-build, etc.
+ verbose - more printing!
+ clean - start from a clean slate
+ macros - extra macros to compile with
+ notify - the notification event handling function
+ jobs - number of commands to run at once
+ extra_verbose - even moar printing
+ """
# Toolchain
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options,
+ macros=macros, notify=notify,
+ extra_verbose=extra_verbose)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
toolchain.build_all = clean
# Source and Build Paths
- BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
- BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
- mkdir(BUILD_TOOLCHAIN)
+ build_target = join(MBED_LIBRARIES, "TARGET_" + target.name)
+ build_toolchain = join(build_target, "TOOLCHAIN_" + toolchain.name)
+ mkdir(build_toolchain)
- TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
- mkdir(TMP_PATH)
+ tmp_path = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+ mkdir(tmp_path)
# CMSIS
- toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
+ toolchain.info("Static analysis for %s (%s, %s)" %
+ ('CMSIS', target.name, toolchain_name))
cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
resources = toolchain.scan_resources(cmsis_src)
# Copy files before analysis
- toolchain.copy_files(resources.headers, BUILD_TARGET)
- toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
+ toolchain.copy_files(resources.headers, build_target)
+ toolchain.copy_files(resources.linker_script, build_toolchain)
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+ # command line
includes = ["-I%s"% i for i in resources.inc_dirs]
- includes.append("-I%s"% str(BUILD_TARGET))
+ includes.append("-I%s"% str(build_target))
c_sources = " ".join(resources.c_sources)
cpp_sources = " ".join(resources.cpp_sources)
macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
- includes = map(str.strip, includes)
- macros = map(str.strip, macros)
+ includes = [inc.strip() for inc in includes]
+ macros = [mac.strip() for mac in macros]
- check_cmd = CPPCHECK_CMD
- check_cmd += CPPCHECK_MSG_FORMAT
+ check_cmd = cppcheck_cmd
+ check_cmd += cppcheck_msg_format
check_cmd += includes
check_cmd += macros
- # We need to pass some params via file to avoid "command line too long in some OSs"
+ # We need to pass some params via file to avoid "command line too long in
+ # some OSs"
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.writelines(line + '\n' for line in c_sources.split())
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
tmp_file.close()
check_cmd += ["--file-list=%s"% tmp_file.name]
- _stdout, _stderr, _rc = run_cmd(check_cmd)
+ _stdout, _stderr, _ = run_cmd(check_cmd)
if verbose:
print _stdout
print _stderr
@@ -953,50 +1214,58 @@
# =========================================================================
# MBED
- toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+ toolchain.info("Static analysis for %s (%s, %s)" %
+ ('MBED', target.name, toolchain_name))
# Common Headers
- toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
- toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_API).headers,
+ MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers,
+ MBED_LIBRARIES)
# Target specific sources
- HAL_SRC = join(MBED_TARGETS_PATH, "hal")
- hal_implementation = toolchain.scan_resources(HAL_SRC)
+ hal_src = join(MBED_TARGETS_PATH, "hal")
+ hal_implementation = toolchain.scan_resources(hal_src)
# Copy files before analysis
- toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, resources=hal_implementation)
- incdirs = toolchain.scan_resources(BUILD_TARGET)
+ toolchain.copy_files(hal_implementation.headers +
+ hal_implementation.hex_files, build_target,
+ resources=hal_implementation)
+ incdirs = toolchain.scan_resources(build_target)
target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
- target_includes.append("-I%s"% str(BUILD_TARGET))
- target_includes.append("-I%s"% str(HAL_SRC))
+ target_includes.append("-I%s"% str(build_target))
+ target_includes.append("-I%s"% str(hal_src))
target_c_sources = " ".join(incdirs.c_sources)
target_cpp_sources = " ".join(incdirs.cpp_sources)
- target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
+ target_macros = ["-D%s"% s for s in
+ toolchain.get_symbols() + toolchain.macros]
# Common Sources
mbed_resources = toolchain.scan_resources(MBED_COMMON)
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+ # command line
mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
- mbed_includes.append("-I%s"% str(BUILD_TARGET))
+ mbed_includes.append("-I%s"% str(build_target))
mbed_includes.append("-I%s"% str(MBED_COMMON))
mbed_includes.append("-I%s"% str(MBED_API))
mbed_includes.append("-I%s"% str(MBED_HAL))
mbed_c_sources = " ".join(mbed_resources.c_sources)
mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
- target_includes = map(str.strip, target_includes)
- mbed_includes = map(str.strip, mbed_includes)
- target_macros = map(str.strip, target_macros)
+ target_includes = [inc.strip() for inc in target_includes]
+ mbed_includes = [inc.strip() for inc in mbed_includes]
+ target_macros = [mac.strip() for mac in target_macros]
- check_cmd = CPPCHECK_CMD
- check_cmd += CPPCHECK_MSG_FORMAT
+ check_cmd = cppcheck_cmd
+ check_cmd += cppcheck_msg_format
check_cmd += target_includes
check_cmd += mbed_includes
check_cmd += target_macros
- # We need to pass some parames via file to avoid "command line too long in some OSs"
+ # We need to pass some parames via file to avoid "command line too long in
+ # some OSs"
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.writelines(line + '\n' for line in target_c_sources.split())
tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
@@ -1005,42 +1274,77 @@
tmp_file.close()
check_cmd += ["--file-list=%s"% tmp_file.name]
- _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+ _stdout, _stderr, _ = run_cmd_ext(check_cmd)
if verbose:
print _stdout
print _stderr
-def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
- options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd,
+ cppcheck_msg_format, options=None, verbose=False,
+ clean=False, macros=None, notify=None, jobs=1,
+ extra_verbose=False):
+ """Perform static analysis on a library as if it were to be compiled for a
+ particular target and toolchain combination
+ """
lib = Library(lib_id)
if lib.is_supported(target, toolchain):
- static_analysis_scan_library(lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
- lib.dependencies, options,
- verbose=verbose, clean=clean, macros=macros, notify=notify, jobs=jobs, extra_verbose=extra_verbose)
+ static_analysis_scan_library(
+ lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd,
+ cppcheck_msg_format, lib.dependencies, options, verbose=verbose,
+ clean=clean, macros=macros, notify=notify, jobs=jobs,
+ extra_verbose=extra_verbose)
else:
- print 'Library "%s" is not yet supported on target %s with toolchain %s'% (lib_id, target.name, toolchain)
+ print('Library "%s" is not yet supported on target %s with toolchain %s'
+ % (lib_id, target.name, toolchain))
-def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
- dependencies_paths=None, options=None, name=None, clean=False,
- notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False):
- """ Function scans library (or just some set of sources/headers) for staticly detectable defects """
+def static_analysis_scan_library(src_paths, build_path, target, toolchain_name,
+ cppcheck_cmd, cppcheck_msg_format,
+ dependencies_paths=None, options=None,
+ name=None, clean=False, notify=None,
+ verbose=False, macros=None, jobs=1,
+ extra_verbose=False):
+ """ Function scans library for statically detectable defects
+
+ Positional arguments:
+ src_paths - the list of library paths to scan
+ build_path - the location directory of result files
+ target - the target to fake the build for
+ toolchain_name - pretend you would compile with this toolchain
+ cppcheck_cmd - the command used to do static analysis
+ cppcheck_msg_format - the format of the check messages
+
+ Keyword arguments:
+ dependencies_paths - the paths to sources that this library depends on
+ options - things like debug-symbols, or small-build, etc.
+ name - the name of this library
+ clean - start from a clean slate
+ notify - the notification event handling function
+ verbose - more printing!
+ macros - extra macros to compile with
+ jobs - number of commands to run at once
+ extra_verbose - even moar printing
+ """
if type(src_paths) != ListType:
src_paths = [src_paths]
for src_path in src_paths:
if not exists(src_path):
- raise Exception("The library source folder does not exist: %s", src_path)
+ raise Exception("The library source folder does not exist: %s",
+ src_path)
# Toolchain instance
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options,
+ macros=macros, notify=notify,
+ extra_verbose=extra_verbose)
toolchain.VERBOSE = verbose
toolchain.jobs = jobs
# The first path will give the name to the library
name = basename(src_paths[0])
- toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+ toolchain.info("Static analysis for library %s (%s, %s)" %
+ (name.upper(), target.name, toolchain_name))
# Scan Resources
resources = []
@@ -1060,7 +1364,8 @@
tmp_path = join(build_path, '.temp', toolchain.obj_path)
mkdir(tmp_path)
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck
+ # command line
includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
c_sources = " "
cpp_sources = " "
@@ -1073,34 +1378,42 @@
c_sources += " ".join(resource.c_sources) + " "
cpp_sources += " ".join(resource.cpp_sources) + " "
- dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+ dependencies_include_dir.extend(
+ toolchain.scan_resources(build_path).inc_dirs)
- includes = map(str.strip, includes)
- macros = map(str.strip, macros)
+ includes = [inc.strip() for inc in includes]
+ macros = [mac.strip() for mac in macros]
check_cmd = cppcheck_cmd
check_cmd += cppcheck_msg_format
check_cmd += includes
check_cmd += macros
- # We need to pass some parameters via file to avoid "command line too long in some OSs"
- # Temporary file is created to store e.g. cppcheck list of files for command line
+ # We need to pass some parameters via file to avoid "command line too long
+ # in some OSs". A temporary file is created to store e.g. cppcheck list of
+ # files for command line
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.writelines(line + '\n' for line in c_sources.split())
tmp_file.writelines(line + '\n' for line in cpp_sources.split())
tmp_file.close()
check_cmd += ["--file-list=%s"% tmp_file.name]
- # This will allow us to grab result from both stdio and stderr outputs (so we can show them)
- # We assume static code analysis tool is outputting defects on STDERR
- _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+ # This will allow us to grab result from both stdio and stderr outputs (so
+ # we can show them) We assume static code analysis tool is outputting
+ # defects on STDERR
+ _stdout, _stderr, _ = run_cmd_ext(check_cmd)
if verbose:
print _stdout
print _stderr
def print_build_results(result_list, build_name):
- """ Generate result string for build results """
+ """ Generate result string for build results
+
+ Positional arguments:
+ result_list - the list of results to print
+ build_name - the name of the build we are printing result for
+ """
result = ""
if len(result_list) > 0:
result += build_name + "\n"
@@ -1108,10 +1421,12 @@
result += "\n"
return result
-def print_build_memory_usage_results(report):
+def print_build_memory_usage(report):
""" Generate result table with memory usage values for build results
- Agregates (puts together) reports obtained from self.get_memory_summary()
- @param report Report generated during build procedure. See
+ Aggregates (puts together) reports obtained from self.get_memory_summary()
+
+ Positional arguments:
+ report - Report generated during build procedure.
"""
from prettytable import PrettyTable
columns_text = ['name', 'target', 'toolchain']
@@ -1129,21 +1444,25 @@
for name in report[target][toolchain]:
for dlist in report[target][toolchain][name]:
for dlistelem in dlist:
- # Get 'memory_usage' record and build table with statistics
+ # Get 'memory_usage' record and build table with
+ # statistics
record = dlist[dlistelem]
if 'memory_usage' in record and record['memory_usage']:
# Note that summary should be in the last record of
- # 'memory_usage' section. This is why we are grabbing
- # last "[-1]" record.
+ # 'memory_usage' section. This is why we are
+ # grabbing last "[-1]" record.
row = [
record['description'],
record['target_name'],
record['toolchain_name'],
- record['memory_usage'][-1]['summary']['static_ram'],
+ record['memory_usage'][-1]['summary'][
+ 'static_ram'],
record['memory_usage'][-1]['summary']['stack'],
record['memory_usage'][-1]['summary']['heap'],
- record['memory_usage'][-1]['summary']['total_ram'],
- record['memory_usage'][-1]['summary']['total_flash'],
+ record['memory_usage'][-1]['summary'][
+ 'total_ram'],
+ record['memory_usage'][-1]['summary'][
+ 'total_flash'],
]
table.add_row(row)
@@ -1152,6 +1471,14 @@
return result
def write_build_report(build_report, template_filename, filename):
+ """Write a build report to disk using a template file
+
+ Positional arguments:
+ build_report - a report generated by the build system
+ template_filename - a file that contains the template for the style of build
+ report
+ filename - the location on disk to write the file to
+ """
build_report_failing = []
build_report_passing = []
@@ -1165,5 +1492,7 @@
env.loader = FileSystemLoader('ci_templates')
template = env.get_template(template_filename)
- with open(filename, 'w+') as f:
- f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
+ with open(filename, 'w+') as placeholder:
+ placeholder.write(template.render(
+ failing_builds=build_report_failing,
+ passing_builds=build_report_passing))
--- a/build_everything.py Mon Aug 29 10:55:42 2016 +0100
+++ b/build_everything.py Mon Aug 29 11:18:36 2016 +0100
@@ -147,7 +147,6 @@
if not base_source_paths:
base_source_paths = ['.']
- all_tests = find_tests(base_source_paths[0])
start = time()
build_report = {}
@@ -180,6 +179,7 @@
if options.continue_on_build_fail or library_build_success:
# Build all the tests
+ all_tests = find_tests(base_source_paths[0], target_name, toolchain_name)
test_build_success, test_build = build_tests(all_tests, [build_directory], build_directory, target, target_toolchain,
clean=options.clean,
report=build_report,
--- a/build_travis.py Mon Aug 29 10:55:42 2016 +0100
+++ b/build_travis.py Mon Aug 29 11:18:36 2016 +0100
@@ -113,6 +113,7 @@
{ "target": "MAXWSNENV", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "MAX32600MBED", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "MAX32620HSP", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
{ "target": "RZ_A1H", "toolchains": "GCC_ARM", "libs": ["fat"] },
--- a/colorize.py Mon Aug 29 10:55:42 2016 +0100
+++ b/colorize.py Mon Aug 29 11:18:36 2016 +0100
@@ -1,19 +1,17 @@
-"""
-mbed SDK
-Copyright (c) 2016 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
+# mbed SDK
+# Copyright (c) 2016 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
""" This python file is responsible for generating colorized notifiers.
"""
@@ -23,7 +21,7 @@
from colorama import init, Fore, Back, Style
init()
-colors = {
+COLORS = {
'none' : "",
'default' : Style.RESET_ALL,
@@ -46,26 +44,37 @@
'on_white' : Back.WHITE,
}
-# Convert a color string from a string into an ascii escape code that will print
-# that color on the terminal.
-color_matcher = re.compile(r"(\w+)(\W+on\W+\w+)?")
+COLOR_MATCHER = re.compile(r"(\w+)(\W+on\W+\w+)?")
def colorstring_to_escapecode(color_string):
- match = re.match(color_matcher, color_string)
+ """ Convert a color string from a string into an ascii escape code that
+ will print that color on the terminal.
+
+ Positional arguments:
+ color_string - the string to parse
+ """
+ match = re.match(COLOR_MATCHER, color_string)
if match:
- return colors[match.group(1)] + (colors[match.group(2).strip().replace(" ","_")] if match.group(2) else "")
+ return COLORS[match.group(1)] + \
+ (COLORS[match.group(2).strip().replace(" ", "_")]
+ if match.group(2) else "")
else:
- return corols['default']
+ return COLORS['default']
-# Wrap a toolchain notifier in a colorizer. This colorizer will wrap notifications
-# in a color if the severity matches a color in the *color_map*.
-def print_in_color_notifier (color_map, print_fn):
+
+def print_in_color_notifier(color_map, print_fn):
+ """ Wrap a toolchain notifier in a colorizer. This colorizer will wrap
+ notifications in a color if the severity matches a color in the *color_map*.
+ """
def wrap(event, silent=False):
- fd = sys.stdout
+ """The notification function inself"""
+ file_desc = sys.stdout
self = event['toolchain']
- if fd.isatty() and 'severity' in event and event['severity'] in color_map:
- fd.write(colorstring_to_escapecode(color_map[event['severity']]))
+ if file_desc.isatty() and 'severity' in event and \
+ event['severity'] in color_map:
+ file_desc.write(colorstring_to_escapecode(
+ color_map[event['severity']]))
print_fn(self, event, silent)
- fd.write(colorstring_to_escapecode('default'))
+ file_desc.write(colorstring_to_escapecode('default'))
else:
print_fn(self, event, silent)
return wrap
--- a/config.py Mon Aug 29 10:55:42 2016 +0100
+++ b/config.py Mon Aug 29 11:18:36 2016 +0100
@@ -16,40 +16,56 @@
"""
# Implementation of mbed configuration mechanism
-from copy import deepcopy
-from collections import OrderedDict
-from tools.utils import json_file_to_dict, ToolException
+from tools.utils import json_file_to_dict
from tools.targets import Target
import os
# Base class for all configuration exceptions
class ConfigException(Exception):
+ """Config system only exception. Makes it easier to distinguish config
+ errors"""
pass
-# This class keeps information about a single configuration parameter
-class ConfigParameter:
- # name: the name of the configuration parameter
- # data: the data associated with the configuration parameter
- # unit_name: the unit (target/library/application) that defines this parameter
- # unit_ kind: the kind of the unit ("target", "library" or "application")
+class ConfigParameter(object):
+ """This class keeps information about a single configuration parameter"""
+
def __init__(self, name, data, unit_name, unit_kind):
- self.name = self.get_full_name(name, unit_name, unit_kind, allow_prefix = False)
+ """Construct a ConfigParameter
+
+ Positional arguments:
+ name - the name of the configuration parameter
+ data - the data associated with the configuration parameter
+ unit_name - the unit (target/library/application) that defines this
+ parameter
+ unit_ kind - the kind of the unit ("target", "library" or "application")
+ """
+ self.name = self.get_full_name(name, unit_name, unit_kind,
+ allow_prefix=False)
self.defined_by = self.get_display_name(unit_name, unit_kind)
self.set_value(data.get("value", None), unit_name, unit_kind)
self.help_text = data.get("help", None)
self.required = data.get("required", False)
- self.macro_name = data.get("macro_name", "MBED_CONF_%s" % self.sanitize(self.name.upper()))
+ self.macro_name = data.get("macro_name", "MBED_CONF_%s" %
+ self.sanitize(self.name.upper()))
self.config_errors = []
- # Return the full (prefixed) name of a parameter.
- # If the parameter already has a prefix, check if it is valid
- # name: the simple (unqualified) name of the parameter
- # unit_name: the unit (target/library/application) that defines this parameter
- # unit_kind: the kind of the unit ("target", "library" or "application")
- # label: the name of the label in the 'target_config_overrides' section (optional)
- # allow_prefix: True to allo the original name to have a prefix, False otherwise
@staticmethod
- def get_full_name(name, unit_name, unit_kind, label = None, allow_prefix = True):
+ def get_full_name(name, unit_name, unit_kind, label=None,
+ allow_prefix=True):
+ """Return the full (prefixed) name of a parameter. If the parameter
+ already has a prefix, check if it is valid
+
+ Positional arguments:
+ name - the simple (unqualified) name of the parameter
+ unit_name - the unit (target/library/application) that defines this
+ parameter
+ unit_kind - the kind of the unit ("target", "library" or "application")
+
+ Keyword arguments:
+ label - the name of the label in the 'target_config_overrides' section
+ allow_prefix - True to allow the original name to have a prefix, False
+ otherwise
+ """
if name.find('.') == -1: # the name is not prefixed
if unit_kind == "target":
prefix = "target."
@@ -60,24 +76,39 @@
return prefix + name
# The name has a prefix, so check if it is valid
if not allow_prefix:
- raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
+ raise ConfigException("Invalid parameter name '%s' in '%s'" %
+ (name, ConfigParameter.get_display_name(
+ unit_name, unit_kind, label)))
temp = name.split(".")
- # Check if the parameter syntax is correct (must be unit_name.parameter_name)
+ # Check if the parameter syntax is correct (must be
+ # unit_name.parameter_name)
if len(temp) != 2:
- raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
+ raise ConfigException("Invalid parameter name '%s' in '%s'" %
+ (name, ConfigParameter.get_display_name(
+ unit_name, unit_kind, label)))
prefix = temp[0]
# Check if the given parameter prefix matches the expected prefix
- if (unit_kind == "library" and prefix != unit_name) or (unit_kind == "target" and prefix != "target"):
- raise ConfigException("Invalid prefix '%s' for parameter name '%s' in '%s'" % (prefix, name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))
+ if (unit_kind == "library" and prefix != unit_name) or \
+ (unit_kind == "target" and prefix != "target"):
+ raise ConfigException(
+ "Invalid prefix '%s' for parameter name '%s' in '%s'" %
+ (prefix, name, ConfigParameter.get_display_name(
+ unit_name, unit_kind, label)))
return name
- # Return the name displayed for a unit when interogating the origin
- # and the last set place of a parameter
- # unit_name: the unit (target/library/application) that defines this parameter
- # unit_kind: the kind of the unit ("target", "library" or "application")
- # label: the name of the label in the 'target_config_overrides' section (optional)
@staticmethod
- def get_display_name(unit_name, unit_kind, label = None):
+ def get_display_name(unit_name, unit_kind, label=None):
+ """Return the name displayed for a unit when interrogating the origin
+ and the last set place of a parameter
+
+ Positional arguments:
+ unit_name - the unit (target/library/application) that defines this
+ parameter
+ unit_kind - the kind of the unit ("target", "library" or "application")
+
+ Keyword arguments:
+ label - the name of the label in the 'target_config_overrides' section
+ """
if unit_kind == "target":
return "target:" + unit_name
elif unit_kind == "application":
@@ -85,33 +116,53 @@
else: # library
return "library:%s%s" % (unit_name, "[%s]" % label if label else "")
- # "Sanitize" a name so that it is a valid C macro name
- # Currently it simply replaces '.' and '-' with '_'
- # name: the un-sanitized name.
@staticmethod
def sanitize(name):
+ """ "Sanitize" a name so that it is a valid C macro name. Currently it
+ simply replaces '.' and '-' with '_'.
+
+ Positional arguments:
+ name - the name to make into a valid C macro
+ """
return name.replace('.', '_').replace('-', '_')
- # Sets a value for this parameter, remember the place where it was set.
- # If the value is a boolean, it is converted to 1 (for True) or to 0 (for False).
- # value: the value of the parameter
- # unit_name: the unit (target/library/application) that defines this parameter
- # unit_ kind: the kind of the unit ("target", "library" or "application")
- # label: the name of the label in the 'target_config_overrides' section (optional)
- def set_value(self, value, unit_name, unit_kind, label = None):
+ def set_value(self, value, unit_name, unit_kind, label=None):
+ """ Sets a value for this parameter, remember the place where it was
+ set. If the value is a Boolean, it is converted to 1 (for True) or
+ to 0 (for False).
+
+ Positional arguments:
+ value - the value of the parameter
+ unit_name - the unit (target/library/application) that defines this
+ parameter
+ unit_kind - the kind of the unit ("target", "library" or "application")
+
+ Keyword arguments:
+ label - the name of the label in the 'target_config_overrides' section
+ (optional)
+ """
self.value = int(value) if isinstance(value, bool) else value
self.set_by = self.get_display_name(unit_name, unit_kind, label)
- # Return the string representation of this configuration parameter
def __str__(self):
+ """Return the string representation of this configuration parameter
+
+ Arguments: None
+ """
if self.value is not None:
- return '%s = %s (macro name: "%s")' % (self.name, self.value, self.macro_name)
+ return '%s = %s (macro name: "%s")' % \
+ (self.name, self.value, self.macro_name)
else:
return '%s has no value' % self.name
- # Return a verbose description of this configuration paramater as a string
def get_verbose_description(self):
- desc = "Name: %s%s\n" % (self.name, " (required parameter)" if self.required else "")
+ """Return a verbose description of this configuration parameter as a
+ string
+
+ Arguments: None
+ """
+ desc = "Name: %s%s\n" % \
+ (self.name, " (required parameter)" if self.required else "")
if self.help_text:
desc += " Description: %s\n" % self.help_text
desc += " Defined by: %s\n" % self.defined_by
@@ -121,69 +172,175 @@
desc += " Value: %s (set by %s)" % (self.value, self.set_by)
return desc
-# A representation of a configuration macro. It handles both macros without a value (MACRO)
-# and with a value (MACRO=VALUE)
-class ConfigMacro:
+class ConfigMacro(object):
+ """ A representation of a configuration macro. It handles both macros
+ without a value (MACRO) and with a value (MACRO=VALUE)
+ """
def __init__(self, name, unit_name, unit_kind):
+ """Construct a ConfigMacro object
+
+ Positional arguments:
+ name - the macro's name
+ unit_name - the location where the macro was defined
+ unit_kind - the type of macro this is
+ """
self.name = name
self.defined_by = ConfigParameter.get_display_name(unit_name, unit_kind)
if name.find("=") != -1:
tmp = name.split("=")
if len(tmp) != 2:
- raise ValueError("Invalid macro definition '%s' in '%s'" % (name, self.defined_by))
+ raise ValueError("Invalid macro definition '%s' in '%s'" %
+ (name, self.defined_by))
self.macro_name = tmp[0]
self.macro_value = tmp[1]
else:
self.macro_name = name
self.macro_value = None
-# Representation of overrides for cumulative attributes
-class ConfigCumulativeOverride:
- def __init__(self, name, additions=set(), removals=set(), strict=False):
+class ConfigCumulativeOverride(object):
+ """Representation of overrides for cumulative attributes"""
+ def __init__(self, name, additions=None, removals=None, strict=False):
+ """Construct a ConfigCumulativeOverride object
+
+ Positional arguments:
+ name - the name of the config file this came from ?
+
+ Keyword arguments:
+ additions - macros to add to the overrides
+ removals - macros to remove from the overrides
+ strict - Boolean indicating that attempting to remove from an override
+ that does not exist should error
+ """
self.name = name
- self.additions = set(additions)
- self.removals = set(removals)
+ if additions:
+ self.additions = set(additions)
+ else:
+ self.additions = set()
+ if removals:
+ self.removals = set(removals)
+ else:
+ self.removals = set()
self.strict = strict
- # Add attr to the cumulative override
def remove_cumulative_overrides(self, overrides):
+ """Extend the list of override removals.
+
+ Positional arguments:
+ overrides - a list of names that, when the override is evaluated, will
+ be removed
+ """
for override in overrides:
if override in self.additions:
- raise ConfigException("Configuration conflict. The %s %s both added and removed." % (self.name[:-1], override))
+ raise ConfigException(
+ "Configuration conflict. The %s %s both added and removed."
+ % (self.name[:-1], override))
self.removals |= set(overrides)
- # Remove attr from the cumulative overrides
def add_cumulative_overrides(self, overrides):
+ """Extend the list of override additions.
+
+ Positional arguments:
+ overrides - a list of a names that, when the override is evaluated, will
+ be added to the list
+ """
for override in overrides:
- if (override in self.removals or (self.strict and override not in self.additions)):
- raise ConfigException("Configuration conflict. The %s %s both added and removed." % (self.name[:-1], override))
+ if override in self.removals or \
+ (self.strict and override not in self.additions):
+ raise ConfigException(
+ "Configuration conflict. The %s %s both added and removed."
+ % (self.name[:-1], override))
self.additions |= set(overrides)
- # Enable strict set of cumulative overrides for the specified attr
def strict_cumulative_overrides(self, overrides):
+ """Remove all overrides that are not the specified ones
+
+ Positional arguments:
+ overrides - a list of names that will replace the entire attribute when
+ this override is evaluated.
+ """
self.remove_cumulative_overrides(self.additions - set(overrides))
self.add_cumulative_overrides(overrides)
self.strict = True
def update_target(self, target):
- setattr(target, self.name, list(
- (set(getattr(target, self.name, [])) | self.additions) - self.removals))
+ """Update the attributes of a target based on this override"""
+ setattr(target, self.name,
+ list((set(getattr(target, self.name, []))
+ | self.additions) - self.removals))
+def _process_config_parameters(data, params, unit_name, unit_kind):
+ """Process a "config_parameters" section in either a target, a library,
+ or the application.
-# 'Config' implements the mbed configuration mechanism
-class Config:
- # Libraries and applications have different names for their configuration files
+ Positional arguments:
+ data - a dictionary with the configuration parameters
+ params - storage for the discovered configuration parameters
+ unit_name - the unit (target/library/application) that defines this
+ parameter
+ unit_kind - the kind of the unit ("target", "library" or "application")
+ """
+ for name, val in data.items():
+ full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind)
+ # If the parameter was already defined, raise an error
+ if full_name in params:
+ raise ConfigException(
+ "Parameter name '%s' defined in both '%s' and '%s'" %
+ (name, ConfigParameter.get_display_name(unit_name, unit_kind),
+ params[full_name].defined_by))
+ # Otherwise add it to the list of known parameters
+ # If "val" is not a dictionary, this is a shortcut definition,
+ # otherwise it is a full definition
+ params[full_name] = ConfigParameter(name, val if isinstance(val, dict)
+ else {"value": val}, unit_name,
+ unit_kind)
+ return params
+
+
+def _process_macros(mlist, macros, unit_name, unit_kind):
+ """Process a macro definition and check for incompatible duplicate
+ definitions.
+
+ Positional arguments:
+ mlist - list of macro names to process
+ macros - dictionary with currently discovered macros
+ unit_name - the unit (library/application) that defines this macro
+ unit_kind - the kind of the unit ("library" or "application")
+ """
+ for mname in mlist:
+ macro = ConfigMacro(mname, unit_name, unit_kind)
+ if (macro.macro_name in macros) and \
+ (macros[macro.macro_name].name != mname):
+ # Found an incompatible definition of the macro in another module,
+ # so raise an error
+ full_unit_name = ConfigParameter.get_display_name(unit_name,
+ unit_kind)
+ raise ConfigException(
+ ("Macro '%s' defined in both '%s' and '%s'"
+ % (macro.macro_name, macros[macro.macro_name].defined_by,
+ full_unit_name)) +
+ " with incompatible values")
+ macros[macro.macro_name] = macro
+
+
+class Config(object):
+ """'Config' implements the mbed configuration mechanism"""
+
+ # Libraries and applications have different names for their configuration
+ # files
__mbed_app_config_name = "mbed_app.json"
__mbed_lib_config_name = "mbed_lib.json"
# Allowed keys in configuration dictionaries
- # (targets can have any kind of keys, so this validation is not applicable to them)
+ # (targets can have any kind of keys, so this validation is not applicable
+ # to them)
__allowed_keys = {
- "library": set(["name", "config", "target_overrides", "macros", "__config_path"]),
- "application": set(["config", "custom_targets", "target_overrides", "macros", "__config_path"])
+ "library": set(["name", "config", "target_overrides", "macros",
+ "__config_path"]),
+ "application": set(["config", "custom_targets", "target_overrides",
+ "macros", "__config_path"])
}
# Allowed features in configurations
@@ -191,29 +348,45 @@
"UVISOR", "BLE", "CLIENT", "IPV4", "IPV6", "COMMON_PAL", "STORAGE"
]
- # The initialization arguments for Config are:
- # target: the name of the mbed target used for this configuration instance
- # top_level_dirs: a list of top level source directories (where mbed_abb_config.json could be found)
- # __init__ will look for the application configuration file in top_level_dirs.
- # If found once, it'll parse it and check if it has a custom_targets function.
- # If it does, it'll update the list of targets if need.
- # If found more than once, an exception is raised
- # top_level_dirs can be None (in this case, mbed_app_config.json will not be searched)
- def __init__(self, target, top_level_dirs = []):
+ def __init__(self, target, top_level_dirs=None):
+ """Construct a mbed configuration
+
+ Positional arguments:
+ target - the name of the mbed target used for this configuration
+ instance
+
+ Keyword argumets:
+ top_level_dirs - a list of top level source directories (where
+ mbed_abb_config.json could be found)
+
+ NOTE: Construction of a Config object will look for the application
+ configuration file in top_level_dirs. If found once, it'll parse it and
+ check if it has a custom_targets function. If it does, it'll update the
+ list of targets as needed. If more than one config file is found, an
+ exception is raised. top_level_dirs may be None (in this case,
+ the constructor will not search for a configuration file)
+ """
app_config_location = None
- for s in (top_level_dirs or []):
- full_path = os.path.join(s, self.__mbed_app_config_name)
+ for directory in top_level_dirs or []:
+ full_path = os.path.join(directory, self.__mbed_app_config_name)
if os.path.isfile(full_path):
if app_config_location is not None:
- raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path))
+ raise ConfigException("Duplicate '%s' file in '%s' and '%s'"
+ % (self.__mbed_app_config_name,
+ app_config_location, full_path))
else:
app_config_location = full_path
- self.app_config_data = json_file_to_dict(app_config_location) if app_config_location else {}
+ self.app_config_data = json_file_to_dict(app_config_location) \
+ if app_config_location else {}
# Check the keys in the application configuration data
- unknown_keys = set(self.app_config_data.keys()) - self.__allowed_keys["application"]
+ unknown_keys = set(self.app_config_data.keys()) - \
+ self.__allowed_keys["application"]
if unknown_keys:
- raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name))
- # Update the list of targets with the ones defined in the application config, if applicable
+ raise ConfigException("Unknown key(s) '%s' in %s" %
+ (",".join(unknown_keys),
+ self.__mbed_app_config_name))
+ # Update the list of targets with the ones defined in the application
+ # config, if applicable
Target.add_py_targets(self.app_config_data.get("custom_targets", {}))
self.lib_config_data = {}
# Make sure that each config is processed only once
@@ -221,230 +394,318 @@
self.target = target if isinstance(target, basestring) else target.name
self.target_labels = Target.get_target(self.target).get_labels()
- self.cumulative_overrides = { key: ConfigCumulativeOverride(key)
- for key in Target._Target__cumulative_attributes }
+ self.cumulative_overrides = {key: ConfigCumulativeOverride(key)
+ for key in
+ Target.cumulative_attributes}
- self._process_config_and_overrides(self.app_config_data, {}, "app", "application")
+ self._process_config_and_overrides(self.app_config_data, {}, "app",
+ "application")
self.target_labels = Target.get_target(self.target).get_labels()
+ self.config_errors = None
- # Add one or more configuration files
def add_config_files(self, flist):
- for f in flist:
- if not f.endswith(self.__mbed_lib_config_name):
+ """Add configuration files
+
+ Positional arguments:
+ flist - a list of files to add to this configuration
+ """
+ for config_file in flist:
+ if not config_file.endswith(self.__mbed_lib_config_name):
continue
- full_path = os.path.normpath(os.path.abspath(f))
+ full_path = os.path.normpath(os.path.abspath(config_file))
# Check that we didn't already process this file
if self.processed_configs.has_key(full_path):
continue
self.processed_configs[full_path] = True
- # Read the library configuration and add a "__full_config_path" attribute to it
- cfg = json_file_to_dict(f)
+ # Read the library configuration and add a "__full_config_path"
+ # attribute to it
+ cfg = json_file_to_dict(config_file)
cfg["__config_path"] = full_path
- # If there's already a configuration for a module with the same name, exit with error
+
+ if "name" not in cfg:
+ raise ConfigException(
+ "Library configured at %s has no name field." % full_path)
+ # If there's already a configuration for a module with the same
+ # name, exit with error
if self.lib_config_data.has_key(cfg["name"]):
- raise ConfigException("Library name '%s' is not unique (defined in '%s' and '%s')" % (cfg["name"], full_path, self.lib_config_data[cfg["name"]]["__config_path"]))
+ raise ConfigException(
+ "Library name '%s' is not unique (defined in '%s' and '%s')"
+ % (cfg["name"], full_path,
+ self.lib_config_data[cfg["name"]]["__config_path"]))
self.lib_config_data[cfg["name"]] = cfg
- # Helper function: process a "config_parameters" section in either a target, a library or the application
- # data: a dictionary with the configuration parameters
- # params: storage for the discovered configuration parameters
- # unit_name: the unit (target/library/application) that defines this parameter
- # unit_kind: the kind of the unit ("target", "library" or "application")
- def _process_config_parameters(self, data, params, unit_name, unit_kind):
- for name, v in data.items():
- full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind)
- # If the parameter was already defined, raise an error
- if full_name in params:
- raise ConfigException("Parameter name '%s' defined in both '%s' and '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind), params[full_name].defined_by))
- # Otherwise add it to the list of known parameters
- # If "v" is not a dictionary, this is a shortcut definition, otherwise it is a full definition
- params[full_name] = ConfigParameter(name, v if isinstance(v, dict) else {"value": v}, unit_name, unit_kind)
- return params
+
+ def _process_config_and_overrides(self, data, params, unit_name, unit_kind):
+ """Process "config_parameters" and "target_config_overrides" into a
+ given dictionary
- # Helper function: process "config_parameters" and "target_config_overrides" in a given dictionary
- # data: the configuration data of the library/appliation
- # params: storage for the discovered configuration parameters
- # unit_name: the unit (library/application) that defines this parameter
- # unit_kind: the kind of the unit ("library" or "application")
- def _process_config_and_overrides(self, data, params, unit_name, unit_kind):
+ Positional arguments:
+ data - the configuration data of the library/appliation
+ params - storage for the discovered configuration parameters
+ unit_name - the unit (library/application) that defines this parameter
+ unit_kind - the kind of the unit ("library" or "application")
+ """
self.config_errors = []
- self._process_config_parameters(data.get("config", {}), params, unit_name, unit_kind)
+ _process_config_parameters(data.get("config", {}), params, unit_name,
+ unit_kind)
for label, overrides in data.get("target_overrides", {}).items():
- # If the label is defined by the target or it has the special value "*", process the overrides
+ # If the label is defined by the target or it has the special value
+ # "*", process the overrides
if (label == '*') or (label in self.target_labels):
# Check for invalid cumulative overrides in libraries
- if (unit_kind == 'library' and
- any(attr.startswith('target.extra_labels') for attr in overrides.iterkeys())):
- raise ConfigException("Target override '%s' in '%s' is only allowed at the application level"
- % ("target.extra_labels", ConfigParameter.get_display_name(unit_name, unit_kind, label)))
+ if (unit_kind == 'library' and
+ any(attr.startswith('target.extra_labels') for attr
+ in overrides.iterkeys())):
+ raise ConfigException(
+ "Target override 'target.extra_labels' in " +
+ ConfigParameter.get_display_name(unit_name, unit_kind,
+ label) +
+ " is only allowed at the application level")
# Parse out cumulative overrides
for attr, cumulatives in self.cumulative_overrides.iteritems():
if 'target.'+attr in overrides:
- cumulatives.strict_cumulative_overrides(overrides['target.'+attr])
+ cumulatives.strict_cumulative_overrides(
+ overrides['target.'+attr])
del overrides['target.'+attr]
if 'target.'+attr+'_add' in overrides:
- cumulatives.add_cumulative_overrides(overrides['target.'+attr+'_add'])
+ cumulatives.add_cumulative_overrides(
+ overrides['target.'+attr+'_add'])
del overrides['target.'+attr+'_add']
if 'target.'+attr+'_remove' in overrides:
- cumulatives.remove_cumulative_overrides(overrides['target.'+attr+'_remove'])
+ cumulatives.remove_cumulative_overrides(
+ overrides['target.'+attr+'_remove'])
del overrides['target.'+attr+'_remove']
# Consider the others as overrides
- for name, v in overrides.items():
+ for name, val in overrides.items():
# Get the full name of the parameter
- full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind, label)
+ full_name = ConfigParameter.get_full_name(name, unit_name,
+ unit_kind, label)
if full_name in params:
- params[full_name].set_value(v, unit_name, unit_kind, label)
+ params[full_name].set_value(val, unit_name, unit_kind,
+ label)
else:
- self.config_errors.append(ConfigException("Attempt to override undefined parameter '%s' in '%s'"
- % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label))))
+ self.config_errors.append(
+ ConfigException(
+ "Attempt to override undefined parameter" +
+ (" '%s' in '%s'"
+ % (full_name,
+ ConfigParameter.get_display_name(unit_name,
+ unit_kind,
+ label)))))
for cumulatives in self.cumulative_overrides.itervalues():
cumulatives.update_target(Target.get_target(self.target))
return params
- # Read and interpret configuration data defined by targets
def get_target_config_data(self):
- # We consider the resolution order for our target and sort it by level reversed,
- # so that we first look at the top level target (the parent), then its direct children,
- # then the children's children and so on, until we reach self.target
- # TODO: this might not work so well in some multiple inheritance scenarios
- # At each step, look at two keys of the target data:
- # - config_parameters: used to define new configuration parameters
- # - config_overrides: used to override already defined configuration parameters
+ """Read and interpret configuration data defined by targets.
+
+ We consider the resolution order for our target and sort it by level
+ reversed, so that we first look at the top level target (the parent),
+ then its direct children, then the children of those children and so on,
+ until we reach self.target
+ TODO: this might not work so well in some multiple inheritance scenarios
+ At each step, look at two keys of the target data:
+ - config_parameters: used to define new configuration parameters
+ - config_overrides: used to override already defined configuration
+ parameters
+
+ Arguments: None
+ """
params, json_data = {}, Target.get_json_target_data()
- resolution_order = [e[0] for e in sorted(Target.get_target(self.target).resolution_order, key = lambda e: e[1], reverse = True)]
+ resolution_order = [e[0] for e
+ in sorted(
+ Target.get_target(self.target).resolution_order,
+ key=lambda e: e[1], reverse=True)]
for tname in resolution_order:
# Read the target data directly from its description
- t = json_data[tname]
+ target_data = json_data[tname]
# Process definitions first
- self._process_config_parameters(t.get("config", {}), params, tname, "target")
+ _process_config_parameters(target_data.get("config", {}), params,
+ tname, "target")
# Then process overrides
- for name, v in t.get("overrides", {}).items():
+ for name, val in target_data.get("overrides", {}).items():
full_name = ConfigParameter.get_full_name(name, tname, "target")
- # If the parameter name is not defined or if there isn't a path from this target to the target where the
- # parameter was defined in the target inheritance tree, raise an error
- # We need to use 'defined_by[7:]' to remove the "target:" prefix from defined_by
- if (not full_name in params) or (not params[full_name].defined_by[7:] in Target.get_target(tname).resolution_order_names):
- raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (name, ConfigParameter.get_display_name(tname, "target")))
+ # If the parameter name is not defined or if there isn't a path
+ # from this target to the target where the parameter was defined
+ # in the target inheritance tree, raise an error We need to use
+ # 'defined_by[7:]' to remove the "target:" prefix from
+ # defined_by
+ if (full_name not in params) or \
+ (params[full_name].defined_by[7:] not in
+ Target.get_target(tname).resolution_order_names):
+ raise ConfigException(
+ "Attempt to override undefined parameter '%s' in '%s'"
+ % (name,
+ ConfigParameter.get_display_name(tname, "target")))
# Otherwise update the value of the parameter
- params[full_name].set_value(v, tname, "target")
+ params[full_name].set_value(val, tname, "target")
return params
- # Helper function: process a macro definition, checking for incompatible duplicate definitions
- # mlist: list of macro names to process
- # macros: dictionary with currently discovered macros
- # unit_name: the unit (library/application) that defines this macro
- # unit_kind: the kind of the unit ("library" or "application")
- def _process_macros(self, mlist, macros, unit_name, unit_kind):
- for mname in mlist:
- m = ConfigMacro(mname, unit_name, unit_kind)
- if (m.macro_name in macros) and (macros[m.macro_name].name != mname):
- # Found an incompatible definition of the macro in another module, so raise an error
- full_unit_name = ConfigParameter.get_display_name(unit_name, unit_kind)
- raise ConfigException("Macro '%s' defined in both '%s' and '%s' with incompatible values" % (m.macro_name, macros[m.macro_name].defined_by, full_unit_name))
- macros[m.macro_name] = m
+ def get_lib_config_data(self):
+ """ Read and interpret configuration data defined by libraries. It is
+ assumed that "add_config_files" above was already called and the library
+ configuration data exists in self.lib_config_data
- # Read and interpret configuration data defined by libs
- # It is assumed that "add_config_files" above was already called and the library configuration data
- # exists in self.lib_config_data
- def get_lib_config_data(self):
+ Arguments: None
+ """
all_params, macros = {}, {}
for lib_name, lib_data in self.lib_config_data.items():
unknown_keys = set(lib_data.keys()) - self.__allowed_keys["library"]
if unknown_keys:
- raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), lib_name))
- all_params.update(self._process_config_and_overrides(lib_data, {}, lib_name, "library"))
- self._process_macros(lib_data.get("macros", []), macros, lib_name, "library")
+ raise ConfigException("Unknown key(s) '%s' in %s" %
+ (",".join(unknown_keys), lib_name))
+ all_params.update(self._process_config_and_overrides(lib_data, {},
+ lib_name,
+ "library"))
+ _process_macros(lib_data.get("macros", []), macros, lib_name,
+ "library")
return all_params, macros
- # Read and interpret the configuration data defined by the target
- # The target can override any configuration parameter, as well as define its own configuration data
- # params: the dictionary with configuration parameters found so far (in the target and in libraries)
- # macros: the list of macros defined in the configuration
def get_app_config_data(self, params, macros):
+ """ Read and interpret the configuration data defined by the target. The
+ target can override any configuration parameter, as well as define its
+ own configuration data.
+
+ Positional arguments.
+ params - the dictionary with configuration parameters found so far (in
+ the target and in libraries)
+ macros - the list of macros defined in the configuration
+ """
app_cfg = self.app_config_data
- # The application can have a "config_parameters" and a "target_config_overrides" section just like a library
- self._process_config_and_overrides(app_cfg, params, "app", "application")
+ # The application can have a "config_parameters" and a
+ # "target_config_overrides" section just like a library
+ self._process_config_and_overrides(app_cfg, params, "app",
+ "application")
# The application can also defined macros
- self._process_macros(app_cfg.get("macros", []), macros, "app", "application")
+ _process_macros(app_cfg.get("macros", []), macros, "app",
+ "application")
- # Return the configuration data in two parts:
- # - params: a dictionary with (name, ConfigParam) entries
- # - macros: the list of macros defined with "macros" in libraries and in the application (as ConfigMacro instances)
def get_config_data(self):
+ """ Return the configuration data in two parts: (params, macros)
+ params - a dictionary with mapping a name to a ConfigParam
+ macros - the list of macros defined with "macros" in libraries and in
+ the application (as ConfigMacro instances)
+
+ Arguments: None
+ """
all_params = self.get_target_config_data()
lib_params, macros = self.get_lib_config_data()
all_params.update(lib_params)
self.get_app_config_data(all_params, macros)
return all_params, macros
- # Helper: verify if there are any required parameters without a value in 'params'
@staticmethod
def _check_required_parameters(params):
- for p in params.values():
- if p.required and (p.value is None):
- raise ConfigException("Required parameter '%s' defined by '%s' doesn't have a value" % (p.name, p.defined_by))
+ """Check that there are no required parameters without a value
+
+ Positional arguments:
+ params - the list of parameters to check
- # Return the macro definitions generated for a dictionary of configuration parameters
- # params: a dictionary of (name, ConfigParameters instance) mappings
+ NOTE: This function does not return. Instead, it throws a
+ ConfigException when any of the required parameters are missing values
+ """
+ for param in params.values():
+ if param.required and (param.value is None):
+ raise ConfigException("Required parameter '" + param.name +
+ "' defined by '" + param.defined_by +
+ "' doesn't have a value")
+
@staticmethod
def parameters_to_macros(params):
- return ['%s=%s' % (m.macro_name, m.value) for m in params.values() if m.value is not None]
+ """ Encode the configuration parameters as C macro definitions.
+
+ Positional arguments:
+ params - a dictionary mapping a name to a ConfigParameter
- # Return the macro definitions generated for a dictionary of ConfigMacros (as returned by get_config_data)
- # params: a dictionary of (name, ConfigMacro instance) mappings
+ Return: a list of strings that encode the configuration parameters as
+ C pre-processor macros
+ """
+ return ['%s=%s' % (m.macro_name, m.value) for m in params.values()
+ if m.value is not None]
+
@staticmethod
def config_macros_to_macros(macros):
+ """ Return the macro definitions generated for a dictionary of
+ ConfigMacros (as returned by get_config_data).
+
+ Positional arguments:
+ params - a dictionary mapping a name to a ConfigMacro instance
+
+ Return: a list of strings that are the C pre-processor macros
+ """
return [m.name for m in macros.values()]
- # Return the configuration data converted to a list of C macros
- # config - configuration data as (ConfigParam instances, ConfigMacro instances) tuple
- # (as returned by get_config_data())
@staticmethod
def config_to_macros(config):
+ """Convert the configuration data to a list of C macros
+
+ Positional arguments:
+ config - configuration data as (ConfigParam instances, ConfigMacro
+ instances) tuple (as returned by get_config_data())
+ """
params, macros = config[0], config[1]
Config._check_required_parameters(params)
- return Config.config_macros_to_macros(macros) + Config.parameters_to_macros(params)
+ return Config.config_macros_to_macros(macros) + \
+ Config.parameters_to_macros(params)
- # Return the configuration data converted to a list of C macros
def get_config_data_macros(self):
+ """ Convert a Config object to a list of C macros
+
+ Arguments: None
+ """
return self.config_to_macros(self.get_config_data())
- # Returns any features in the configuration data
def get_features(self):
+ """ Extract any features from the configuration data
+
+ Arguments: None
+ """
params, _ = self.get_config_data()
self._check_required_parameters(params)
- self.cumulative_overrides['features'].update_target(Target.get_target(self.target))
+ self.cumulative_overrides['features']\
+ .update_target(Target.get_target(self.target))
features = Target.get_target(self.target).features
for feature in features:
if feature not in self.__allowed_features:
- raise ConfigException("Feature '%s' is not a supported features" % feature)
+ raise ConfigException(
+ "Feature '%s' is not a supported features" % feature)
return features
- # Validate configuration settings. This either returns True or raises an exception
def validate_config(self):
+ """ Validate configuration settings. This either returns True or
+ raises an exception
+
+ Arguments: None
+ """
if self.config_errors:
raise self.config_errors[0]
return True
- # Loads configuration data from resources. Also expands resources based on defined features settings
def load_resources(self, resources):
+ """ Load configuration data from a Resources instance and expand it
+ based on defined features.
+
+ Positional arguments:
+ resources - the resources object to load from and expand
+ """
# Update configuration files until added features creates no changes
prev_features = set()
while True:
- # Add/update the configuration with any .json files found while scanning
+ # Add/update the configuration with any .json files found while
+ # scanning
self.add_config_files(resources.json_files)
# Add features while we find new ones
- features = self.get_features()
+ features = set(self.get_features())
if features == prev_features:
break
@@ -457,52 +718,85 @@
return resources
- # Return the configuration data converted to the content of a C header file,
- # meant to be included to a C/C++ file. The content is returned as a string.
- # If 'fname' is given, the content is also written to the file called "fname".
- # WARNING: if 'fname' names an existing file, that file will be overwritten!
- # config - configuration data as (ConfigParam instances, ConfigMacro instances) tuple
- # (as returned by get_config_data())
@staticmethod
- def config_to_header(config, fname = None):
+ def config_to_header(config, fname=None):
+ """ Convert the configuration data to the content of a C header file,
+ meant to be included to a C/C++ file. The content is returned as a
+ string.
+
+ Positional arguments:
+ config - configuration data as (ConfigParam instances, ConfigMacro
+ instances) tuple (as returned by get_config_data())
+
+ Keyword arguments:
+ fname - also write the content is to the file called "fname".
+ WARNING: if 'fname' names an existing file, it will be
+ overwritten!
+ """
params, macros = config[0], config[1]
Config._check_required_parameters(params)
- header_data = "// Automatically generated configuration file.\n"
+ header_data = "// Automatically generated configuration file.\n"
header_data += "// DO NOT EDIT, content will be overwritten.\n\n"
header_data += "#ifndef __MBED_CONFIG_DATA__\n"
header_data += "#define __MBED_CONFIG_DATA__\n\n"
# Compute maximum length of macro names for proper alignment
- max_param_macro_name_len = max([len(m.macro_name) for m in params.values() if m.value is not None]) if params else 0
- max_direct_macro_name_len = max([len(m.macro_name) for m in macros.values()]) if macros else 0
- max_macro_name_len = max(max_param_macro_name_len, max_direct_macro_name_len)
+ max_param_macro_name_len = (max([len(m.macro_name) for m
+ in params.values()
+ if m.value is not None])
+ if params else 0)
+ max_direct_macro_name_len = (max([len(m.macro_name) for m
+ in macros.values()])
+ if macros else 0)
+ max_macro_name_len = max(max_param_macro_name_len,
+ max_direct_macro_name_len)
# Compute maximum length of macro values for proper alignment
- max_param_macro_val_len = max([len(str(m.value)) for m in params.values() if m.value is not None]) if params else 0
- max_direct_macro_val_len = max([len(m.macro_value or "") for m in macros.values()]) if macros else 0
- max_macro_val_len = max(max_param_macro_val_len, max_direct_macro_val_len)
+ max_param_macro_val_len = (max([len(str(m.value)) for m
+ in params.values()
+ if m.value is not None])
+ if params else 0)
+ max_direct_macro_val_len = max([len(m.macro_value or "") for m
+ in macros.values()]) if macros else 0
+ max_macro_val_len = max(max_param_macro_val_len,
+ max_direct_macro_val_len)
# Generate config parameters first
if params:
header_data += "// Configuration parameters\n"
- for m in params.values():
- if m.value is not None:
- header_data += "#define {0:<{1}} {2!s:<{3}} // set by {4}\n".format(m.macro_name, max_macro_name_len, m.value, max_macro_val_len, m.set_by)
+ for macro in params.values():
+ if macro.value is not None:
+ header_data += ("#define {0:<{1}} {2!s:<{3}} " +
+ "// set by {4}\n")\
+ .format(macro.macro_name, max_macro_name_len,
+ macro.value, max_macro_val_len, macro.set_by)
# Then macros
if macros:
header_data += "// Macros\n"
- for m in macros.values():
- if m.macro_value:
- header_data += "#define {0:<{1}} {2!s:<{3}} // defined by {4}\n".format(m.macro_name, max_macro_name_len, m.macro_value, max_macro_val_len, m.defined_by)
+ for macro in macros.values():
+ if macro.macro_value:
+ header_data += ("#define {0:<{1}} {2!s:<{3}}" +
+ " // defined by {4}\n")\
+ .format(macro.macro_name, max_macro_name_len,
+ macro.macro_value, max_macro_val_len,
+ macro.defined_by)
else:
- header_data += "#define {0:<{1}} // defined by {2}\n".format(m.macro_name, max_macro_name_len + max_macro_val_len + 1, m.defined_by)
+ header_data += ("#define {0:<{1}}" +
+ " // defined by {2}\n")\
+ .format(macro.macro_name,
+ max_macro_name_len + max_macro_val_len + 1,
+ macro.defined_by)
header_data += "\n#endif\n"
# If fname is given, write "header_data" to it
if fname:
- with open(fname, "wt") as f:
- f.write(header_data)
+ with open(fname, "w+") as file_desc:
+ file_desc.write(header_data)
return header_data
- # Return the configuration data converted to the content of a C header file,
- # meant to be included to a C/C++ file. The content is returned as a string.
- # If 'fname' is given, the content is also written to the file called "fname".
- # WARNING: if 'fname' names an existing file, that file will be overwritten!
- def get_config_data_header(self, fname = None):
+ def get_config_data_header(self, fname=None):
+ """ Convert a Config instance to the content of a C header file, meant
+ to be included to a C/C++ file. The content is returned as a string.
+
+ Keyword arguments:
+ fname - also write the content to the file called "fname".
+ WARNING: if 'fname' names an existing file, it will be
+ overwritten!
+ """
return self.config_to_header(self.get_config_data(), fname)
--- a/detect_targets.py Mon Aug 29 10:55:42 2016 +0100
+++ b/detect_targets.py Mon Aug 29 11:18:36 2016 +0100
@@ -14,18 +14,13 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
-
-TEST BUILD & RUN
"""
import sys
import os
-import json
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, ROOT)
-
from tools.options import get_default_options_parser
# Check: Extra modules which are required by core test suite
@@ -37,54 +32,61 @@
from tools.test_api import get_autodetected_MUTS_list
-if __name__ == '__main__':
+def main():
+ """Entry Point"""
try:
# Parse Options
parser = get_default_options_parser()
parser.add_argument("-S", "--supported-toolchains",
- action="store_true",
- dest="supported_toolchains",
- default=False,
- help="Displays supported matrix of targets and toolchains")
+ action="store_true",
+ dest="supported_toolchains",
+ default=False,
+ help="Displays supported matrix of"
+ " targets and toolchains")
parser.add_argument('-f', '--filter',
- dest='general_filter_regex',
- default=None,
- help='Filter targets')
+ dest='general_filter_regex',
+ default=None,
+ help='Filter targets')
parser.add_argument("-v", "--verbose",
- action="store_true",
- dest="verbose",
- default=False,
- help="Verbose diagnostic output")
+ action="store_true",
+ dest="verbose",
+ default=False,
+ help="Verbose diagnostic output")
options = parser.parse_args()
# Only prints matrix of supported toolchains
if options.supported_toolchains:
- print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
+ print mcu_toolchain_matrix(
+ platform_filter=options.general_filter_regex)
exit(0)
# If auto_detect attribute is present, we assume other auto-detection
# parameters like 'toolchains_filter' are also set.
- MUTs = get_autodetected_MUTS_list()
+ muts = get_autodetected_MUTS_list()
count = 0
- for mut in MUTs.values():
+ for mut in muts.values():
print ""
- print "[mbed] Detected %s, port %s, mounted %s" % (mut['mcu'], mut['port'], mut['disk'])
+ print "[mbed] Detected %s, port %s, mounted %s" % \
+ (mut['mcu'], mut['port'], mut['disk'])
print "[mbed] Supported toolchains for %s" % mut['mcu']
print mcu_toolchain_matrix(platform_filter=r'^'+mut['mcu']+'$')
count += 1
-
+
if count == 0:
print "[mbed] No mbed targets where detected on your system."
- except KeyboardInterrupt, e:
+ except KeyboardInterrupt:
print "\n[CTRL+c] exit"
- except Exception,e:
+ except Exception as exc:
import traceback
traceback.print_exc(file=sys.stdout)
- print "[ERROR] %s" % str(e)
+ print "[ERROR] %s" % str(exc)
sys.exit(1)
+
+if __name__ == '__main__':
+ main()
--- a/export/ds5_5.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/ds5_5.py Mon Aug 29 11:18:36 2016 +0100
@@ -40,6 +40,8 @@
's_sources':'2'
}
+ TOOLCHAIN = "ARM"
+
def get_toolchain(self):
return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
--- a/export/exporters.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/exporters.py Mon Aug 29 11:18:36 2016 +0100
@@ -43,7 +43,7 @@
self.build_url_resolver = build_url_resolver
jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
self.jinja_environment = Environment(loader=jinja_loader)
- self.extra_symbols = extra_symbols
+ self.extra_symbols = extra_symbols if extra_symbols else []
self.config_macros = []
self.sources_relative = sources_relative
self.config_header = None
@@ -59,6 +59,11 @@
def progen_flags(self):
if not hasattr(self, "_progen_flag_cache") :
self._progen_flag_cache = dict([(key + "_flags", value) for key,value in self.flags.iteritems()])
+ asm_defines = ["-D"+symbol for symbol in self.toolchain.get_symbols(True)]
+ c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()]
+ self._progen_flag_cache['asm_flags'] += asm_defines
+ self._progen_flag_cache['c_flags'] += c_defines
+ self._progen_flag_cache['cxx_flags'] += c_defines
if self.config_header:
self._progen_flag_cache['c_flags'] += self.toolchain.get_config_option(self.config_header)
self._progen_flag_cache['cxx_flags'] += self.toolchain.get_config_option(self.config_header)
@@ -196,6 +201,7 @@
self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME
config.get_config_data_header(join(trg_path, self.config_header))
self.config_macros = []
+ self.resources.inc_dirs.append(".")
else:
# And add the configuration macros to the toolchain
self.config_macros = config.get_config_data_macros()
@@ -213,11 +219,16 @@
""" This function returns symbols which must be exported.
Please add / overwrite symbols in each exporter separately
"""
- symbols = self.toolchain.get_symbols() + self.config_macros
+
# We have extra symbols from e.g. libraries, we want to have them also added to export
- if add_extra_symbols:
- if self.extra_symbols is not None:
- symbols.extend(self.extra_symbols)
+ extra = self.extra_symbols if add_extra_symbols else []
+ if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED:
+ # If the config header is supported, we will preinclude it and do not not
+ # need the macros as preprocessor flags
+ return extra
+
+ symbols = self.toolchain.get_symbols(True) + self.toolchain.get_symbols() \
+ + self.config_macros + extra
return symbols
def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
--- a/export/gcc_arm_common.tmpl Mon Aug 29 10:55:42 2016 +0100 +++ b/export/gcc_arm_common.tmpl Mon Aug 29 11:18:36 2016 +0100 @@ -89,13 +89,13 @@ .asm.o: +@$(call MAKEDIR,$(dir $@)) - $(CC) $(CPU) -c $(ASM_FLAGS) -o $@ $< + $(CC) $(CPU) -c $(ASM_FLAGS) $(CC_SYMBOLS) $(INCLUDE_PATHS) -o $@ $< .s.o: +@$(call MAKEDIR,$(dir $@)) - $(CC) $(CPU) -c $(ASM_FLAGS) -o $@ $< + $(CC) $(CPU) -c $(ASM_FLAGS) $(CC_SYMBOLS) $(INCLUDE_PATHS) -o $@ $< .S.o: +@$(call MAKEDIR,$(dir $@)) - $(CC) $(CPU) -c $(ASM_FLAGS) -o $@ $< + $(CC) $(CPU) -c $(ASM_FLAGS) $(CC_SYMBOLS) $(INCLUDE_PATHS) -o $@ $< .c.o: +@$(call MAKEDIR,$(dir $@))
--- a/export/gcc_arm_nrf51_dk.tmpl Mon Aug 29 10:55:42 2016 +0100
+++ b/export/gcc_arm_nrf51_dk.tmpl Mon Aug 29 11:18:36 2016 +0100
@@ -1,7 +1,11 @@
{% extends "gcc_arm_common.tmpl" %}
+{% block target_all %}
+all: $(PROJECT).bin $(PROJECT)-combined.hex size
+{% endblock %}
+
{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51_DK/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+SOFTDEVICE = {% for f in hex_files %}{{f}} {% endfor %}
{% endblock %}
{% block additional_executables %}
@@ -9,6 +13,6 @@
{% endblock %}
{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+$(PROJECT)-combined.hex: $(PROJECT).hex
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o $(PROJECT)-combined.hex -intel --line-length=44
{% endblock %}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/export/gcc_arm_ty51822r3.tmpl Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,18 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_all %}
+all: $(PROJECT).bin $(PROJECT)-combined.hex size
+{% endblock %}
+
+{% block additional_variables %}
+SOFTDEVICE = {% for f in hex_files %}{{f}} {% endfor %}
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+$(PROJECT)-combined.hex: $(PROJECT).hex
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o $(PROJECT)-combined.hex -intel --line-length=44
+{% endblock %}
--- a/export/gccarm.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/gccarm.py Mon Aug 29 11:18:36 2016 +0100
@@ -58,6 +58,7 @@
'ARCH_PRO',
'NRF51822',
'HRM1017',
+ 'TY51822R3',
'RBLAB_NRF51822',
'RBLAB_BLENANO',
'LPC2368',
@@ -98,6 +99,7 @@
'DISCO_F334C8',
'MAX32600MBED',
'MAXWSNENV',
+ 'MAX32620HSP',
'MTS_MDOT_F405RG',
'MTS_MDOT_F411RE',
'NUCLEO_L152RE',
@@ -161,10 +163,11 @@
'libraries': libraries,
'symbols': self.get_symbols(),
'cpu_flags': self.toolchain.cpu,
- 'vpath': [relpath(s, build_dir) for s in self.prj_paths] if self.sources_relative else [".."]
+ 'vpath': [relpath(s, build_dir) for s in self.prj_paths] if self.sources_relative else [".."],
+ 'hex_files': self.resources.hex_files
}
- for key in ['include_paths', 'library_paths', 'linker_script']:
+ for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']:
if isinstance(ctx[key], list):
ctx[key] = [ctx['vpath'][0] + "/" + t for t in ctx[key]]
else:
--- a/export/kds.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/kds.py Mon Aug 29 11:18:36 2016 +0100
@@ -24,6 +24,7 @@
TARGETS = [
'K64F',
+ 'HEXIWEAR',
'K22F',
]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/export/kds_hexiwear_cproject.tmpl Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,306 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?fileVersion 4.0.0?><cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
+ <storageModule moduleId="org.eclipse.cdt.core.settings">
+ <cconfiguration id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026">
+ <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026" moduleId="org.eclipse.cdt.core.settings" name="Debug">
+ <externalSettings/>
+ <extensions>
+ <extension id="org.eclipse.cdt.managedbuilder.core.ManagedBuildManager" point="org.eclipse.cdt.core.ScannerInfoProvider"/>
+ <extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
+ <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ </extensions>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <configuration artifactName="${ProjName}" buildArtefactType="org.eclipse.cdt.build.core.buildArtefactType.exe" buildProperties="org.eclipse.cdt.build.core.buildType=org.eclipse.cdt.build.core.buildType.debug,org.eclipse.cdt.build.core.buildArtefactType=org.eclipse.cdt.build.core.buildArtefactType.exe" cleanCommand="${cross_rm} -rf" description="" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026" name="Debug" parent="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug">
+ <folderInfo id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026." name="/" resourcePath="">
+ <toolChain id="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug.1221610645" name="Cross ARM GCC" nonInternalBuilderId="ilg.gnuarmeclipse.managedbuild.cross.builder" superClass="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.1271983492" name="Optimization Level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level" value="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.none" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength.1681866628" name="Message length (-fmessage-length=0)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar.1550050553" name="'char' is signed (-fsigned-char)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections.2126138943" name="Function sections (-ffunction-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections.1492840277" name="Data sections (-fdata-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.1058622512" name="Debug level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.default" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.1583945235" name="Debug format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.gdb" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family.1089911925" name="ARM family" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.mcpu.cortex-m4" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.77844367" name="Float ABI" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.softfp" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.353876552" name="FPU Type" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.fpv4spd16" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name.1308049896" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name" value="Custom" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix.560926624" name="Prefix" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix" value="arm-none-eabi-" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.c.660978974" name="C compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.c" value="gcc" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp.1169416449" name="C++ compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp" value="g++" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy.1545312724" name="Hex/Bin converter" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy" value="objcopy" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump.2106299868" name="Listing generator" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump" value="objdump" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.size.880150025" name="Size command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.size" value="size" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.make.1449434602" name="Build command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.make" value="make" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm.1638755745" name="Remove command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm" value="rm" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn.1500383066" name="Enable all common warnings (-Wall)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.1422858690" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash.1453349108" name="Create flash image" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.nobuiltin.918192766" name="Disable builtin (-fno-builtin)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.nobuiltin" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.other.845411621" name="Other debugging flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.other" value="" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.prof.2076910080" name="Generate prof information (-p)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.prof" value="false" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.gprof.1002876099" name="Generate gprof information (-pg)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.gprof" value="false" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize.371856963" name="Print size" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize" value="true" valueType="boolean"/>
+ <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform.2090214221" isAbstract="false" osList="all" superClass="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform"/>
+ <builder autoBuildTarget="all" buildPath="${workspace_loc:/{{name}}}/Debug" cleanBuildTarget="clean" command="${cross_make}" id="org.eclipse.cdt.build.core.internal.builder.2045347460" incrementalBuildTarget="all" managedBuildOn="true" name="CDT Internal Builder" superClass="org.eclipse.cdt.build.core.internal.builder"/>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.774448198" name="Cross ARM GNU Assembler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor.874144438" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs.1457752231" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths.1240528565" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input.645447748" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1023327076" name="Cross ARM C Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std.655157579" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std.c99" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths.1298012181" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths" useByScannerDiscovery="false" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs.26057600" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.247734571" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.248936164" name="Cross ARM C++ Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths.1551083554" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs.1601945676" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs" useByScannerDiscovery="false" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.noexceptions.73762833" name="Do not use exceptions (-fno-exceptions)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.noexceptions" useByScannerDiscovery="true" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.nortti.1541205451" name="Do not use RTTI (-fno-rtti)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.nortti" useByScannerDiscovery="true" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std.2072412260" name="Language standard" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std.default" valueType="enumerated"/>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.2029463372" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.1882430856" name="Cross ARM C Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections.339583643" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections" value="true" valueType="boolean"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.1999194416" name="Cross ARM C++ Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections.344980185" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths.727573047" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths" valueType="libPaths">
+ {% if libraries %}
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ {% endif %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile.828171482" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile" valueType="stringList">
+ <listOptionValue builtIn="false" value="${workspace_loc:/${ProjName}/{{linker_script}}}"/>
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs.310068762" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs" valueType="libs">
+ {% for lib in libraries %}
+ <listOptionValue builtIn="false" value="{{lib}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.otherobjs.460736806" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.otherobjs" valueType="userObjs">
+ {% for path in object_files %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other.30848869" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other" value="-specs=nosys.specs" valueType="string"/>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input.1081415325" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input">
+ <additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/>
+ <additionalInput kind="additionalinput" paths="$(LIBS)"/>
+ </inputType>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver.1216251638" name="Cross ARM GNU Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver"/>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash.1820796904" name="Cross ARM GNU Create Flash Image" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.70927688" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting.721327636" name="Cross ARM GNU Create Listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source.625552450" name="Display source (--source|-S)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders.263758416" name="Display all headers (--all-headers|-x)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle.1024069673" name="Demangle names (--demangle|-C)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers.1043375284" name="Display line numbers (--line-numbers|-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide.1671601569" name="Wide lines (--wide|-w)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide" value="true" valueType="boolean"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize.171400698" name="Cross ARM GNU Print Size" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format.1102568395" name="Size format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format"/>
+ </tool>
+ </toolChain>
+ </folderInfo>
+ </configuration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
+ </cconfiguration>
+ <cconfiguration id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787">
+ <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787" moduleId="org.eclipse.cdt.core.settings" name="Release">
+ <externalSettings/>
+ <extensions>
+ <extension id="org.eclipse.cdt.managedbuilder.core.ManagedBuildManager" point="org.eclipse.cdt.core.ScannerInfoProvider"/>
+ <extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
+ <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ </extensions>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <configuration artifactName="${ProjName}" buildArtefactType="org.eclipse.cdt.build.core.buildArtefactType.exe" buildProperties="org.eclipse.cdt.build.core.buildType=org.eclipse.cdt.build.core.buildType.release,org.eclipse.cdt.build.core.buildArtefactType=org.eclipse.cdt.build.core.buildArtefactType.exe" cleanCommand="${cross_rm} -rf" description="" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787" name="Release" parent="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release">
+ <folderInfo id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787." name="/" resourcePath="">
+ <toolChain id="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.release.765163102" name="Cross ARM GCC" superClass="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.release">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.1271983492" name="Optimization Level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level" value="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.size" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength.1681866628" name="Message length (-fmessage-length=0)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar.1550050553" name="'char' is signed (-fsigned-char)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections.2126138943" name="Function sections (-ffunction-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections.1492840277" name="Data sections (-fdata-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.1058622512" name="Debug level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.default" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.1583945235" name="Debug format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.gdb" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family.1089911925" name="ARM family" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.mcpu.cortex-m4" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.77844367" name="Float ABI" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.softfp" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.353876552" name="FPU Type" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.fpv4spd16" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name.1308049896" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name" value="Custom" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix.560926624" name="Prefix" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix" value="arm-none-eabi-" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.c.660978974" name="C compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.c" value="gcc" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp.1169416449" name="C++ compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp" value="g++" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy.1545312724" name="Hex/Bin converter" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy" value="objcopy" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump.2106299868" name="Listing generator" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump" value="objdump" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.size.880150025" name="Size command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.size" value="size" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.make.1449434602" name="Build command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.make" value="make" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm.1638755745" name="Remove command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm" value="rm" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn.1500383066" name="Enable all common warnings (-Wall)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.1422858690" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash.1453349108" name="Create flash image" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.nobuiltin.918192766" name="Disable builtin (-fno-builtin)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.nobuiltin" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.other.845411621" name="Other debugging flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.other" value="" valueType="string"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.prof.2076910080" name="Generate prof information (-p)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.prof" value="false" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.gprof.1002876099" name="Generate gprof information (-pg)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.gprof" value="false" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize.371856963" name="Print size" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize" value="true" valueType="boolean"/>
+ <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform.2090214221" isAbstract="false" osList="all" superClass="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform"/>
+ <builder autoBuildTarget="all" buildPath="${workspace_loc:/{{name}}}/Debug" cleanBuildTarget="clean" command="${cross_make}" id="org.eclipse.cdt.build.core.internal.builder.2045347460" incrementalBuildTarget="all" managedBuildOn="true" name="CDT Internal Builder" superClass="org.eclipse.cdt.build.core.internal.builder"/>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.774448198" name="Cross ARM GNU Assembler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor.874144438" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs.1457752231" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths.1240528565" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input.645447748" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1023327076" name="Cross ARM C Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std.655157579" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.std.c99" valueType="enumerated"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths.1298012181" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths" useByScannerDiscovery="false" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs.26057600" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.247734571" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.248936164" name="Cross ARM C++ Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths.1551083554" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths" valueType="includePath">
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs.1601945676" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs" useByScannerDiscovery="false" valueType="definedSymbols">
+ {% for s in symbols %}
+ <listOptionValue builtIn="false" value="{{s}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.noexceptions.73762833" name="Do not use exceptions (-fno-exceptions)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.noexceptions" useByScannerDiscovery="true" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.nortti.1541205451" name="Do not use RTTI (-fno-rtti)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.nortti" useByScannerDiscovery="true" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std.2072412260" name="Language standard" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.std.default" valueType="enumerated"/>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.2029463372" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.1882430856" name="Cross ARM C Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections.339583643" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections" value="true" valueType="boolean"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.1999194416" name="Cross ARM C++ Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections.344980185" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths.727573047" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths" valueType="libPaths">
+ {% if libraries %}
+ {% for path in include_paths %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ {% endif %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile.828171482" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile" valueType="stringList">
+ <listOptionValue builtIn="false" value="${workspace_loc:/${ProjName}/{{linker_script}}}"/>
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs.310068762" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs" valueType="libs">
+ {% for lib in libraries %}
+ <listOptionValue builtIn="false" value="{{lib}}"/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.otherobjs.460736806" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.otherobjs" valueType="userObjs">
+ {% for path in object_files %}
+ <listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/{{path}}}""/>
+ {% endfor %}
+ </option>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other.30848869" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other" value="-specs=nosys.specs" valueType="string"/>
+ <inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input.1081415325" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input">
+ <additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/>
+ <additionalInput kind="additionalinput" paths="$(LIBS)"/>
+ </inputType>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver.1216251638" name="Cross ARM GNU Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver"/>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash.1820796904" name="Cross ARM GNU Create Flash Image" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.70927688" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting.721327636" name="Cross ARM GNU Create Listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source.625552450" name="Display source (--source|-S)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders.263758416" name="Display all headers (--all-headers|-x)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle.1024069673" name="Demangle names (--demangle|-C)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers.1043375284" name="Display line numbers (--line-numbers|-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers" value="true" valueType="boolean"/>
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide.1671601569" name="Wide lines (--wide|-w)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide" value="true" valueType="boolean"/>
+ </tool>
+ <tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize.171400698" name="Cross ARM GNU Print Size" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize">
+ <option id="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format.1102568395" name="Size format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format"/>
+ </tool>
+ </toolChain>
+ </folderInfo>
+ </configuration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
+ </cconfiguration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <project id="{{name}}.ilg.gnuarmeclipse.managedbuild.cross.target.elf.829438011" name="Executable" projectType="ilg.gnuarmeclipse.managedbuild.cross.target.elf"/>
+ </storageModule>
+ <storageModule moduleId="scannerConfiguration">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ <scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026;ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026.;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1023327076;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.247734571">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ <scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787;ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787.;ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.307634730;ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.1070359138">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ <scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026;ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.637912026.;ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.248936164;ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.2029463372">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ <scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787;ilg.gnuarmeclipse.managedbuild.cross.config.elf.release.1382253787.;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1300731881;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.690792246">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ </storageModule>
+</cproject>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/export/kds_hexiwear_project.tmpl Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>{{name}}</name>
+ <comment>This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-KDS</comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
+ <triggers>clean,full,incremental,</triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
+ <triggers>full,incremental,</triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.cdt.core.cnature</nature>
+ <nature>org.eclipse.cdt.core.ccnature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
+ </natures>
+</projectDescription>
--- a/export/uvision4.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/uvision4.py Mon Aug 29 11:18:36 2016 +0100
@@ -73,10 +73,14 @@
# get flags from toolchain and apply
project_data['tool_specific']['uvision']['misc'] = {}
- # asm flags only, common are not valid within uvision project, they are armcc specific
- project_data['tool_specific']['uvision']['misc']['asm_flags'] = list(set(self.progen_flags['asm_flags']))
+ # need to make this a string for progen. Only adds preprocessor when "macros" set
+ asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(
+ list(set(self.progen_flags['asm_flags'])))
+ project_data['tool_specific']['uvision']['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab
- project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
+ project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(
+ ['-D__ASSERT_MSG'] + self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags[
+ 'cxx_flags']))
# not compatible with c99 flag set in the template
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files
@@ -85,17 +89,6 @@
project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--no_vla")
project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.progen_flags['ld_flags']
- i = 0
- for macro in project_data['common']['macros']:
- # armasm does not like floating numbers in macros, timestamp to int
- if macro.startswith('MBED_BUILD_TIMESTAMP'):
- timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
- project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
- # armasm does not even accept MACRO=string
- if macro.startswith('MBED_USERNAME'):
- project_data['common']['macros'].pop(i)
- i += 1
- project_data['common']['macros'].append('__ASSERT_MSG')
project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision4'
if progen_build:
self.progen_gen_file('uvision', project_data, True)
--- a/export/uvision5.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export/uvision5.py Mon Aug 29 11:18:36 2016 +0100
@@ -73,10 +73,12 @@
# get flags from toolchain and apply
project_data['tool_specific']['uvision5']['misc'] = {}
- # asm flags only, common are not valid within uvision project, they are armcc specific
- project_data['tool_specific']['uvision5']['misc']['asm_flags'] = list(set(self.progen_flags['asm_flags']))
+
+ # need to make this a string got progen. Only adds preprocessor when "macros" set
+ asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.progen_flags['asm_flags'])))
+ project_data['tool_specific']['uvision5']['misc']['asm_flags'] = [asm_flag_string]
# cxx flags included, as uvision have them all in one tab
- project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
+ project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']+self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags']))
# not compatible with c99 flag set in the template
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99")
# cpp is not required as it's implicit for cpp files
@@ -85,17 +87,6 @@
project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--no_vla")
project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.progen_flags['ld_flags']
- i = 0
- for macro in project_data['common']['macros']:
- # armasm does not like floating numbers in macros, timestamp to int
- if macro.startswith('MBED_BUILD_TIMESTAMP'):
- timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
- project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
- # armasm does not even accept MACRO=string
- if macro.startswith('MBED_USERNAME'):
- project_data['common']['macros'].pop(i)
- i += 1
- project_data['common']['macros'].append('__ASSERT_MSG')
project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision5'
if progen_build:
self.progen_gen_file('uvision5', project_data, True)
--- a/export_test.py Mon Aug 29 10:55:42 2016 +0100
+++ b/export_test.py Mon Aug 29 11:18:36 2016 +0100
@@ -155,6 +155,7 @@
('uvision', 'MTS_MDOT_F405RG'),
('uvision', 'MAXWSNENV'),
('uvision', 'MAX32600MBED'),
+ ('uvision', 'MAX32620HSP'),
('uvision', 'DISCO_F051R8'),
('uvision', 'DISCO_F103RB'),
('uvision', 'DISCO_F303VC'),
@@ -226,6 +227,7 @@
('gcc_arm', 'RZ_A1H'),
('gcc_arm', 'MAXWSNENV'),
('gcc_arm', 'MAX32600MBED'),
+ ('gcc_arm', 'MAX32620HSP'),
('gcc_arm', 'ARCH_BLE'),
('gcc_arm', 'ARCH_MAX'),
('gcc_arm', 'ARCH_PRO'),
@@ -287,6 +289,7 @@
('iar', 'MTS_MDOT_F411RE'),
('iar', 'MAXWSNENV'),
('iar', 'MAX32600MBED'),
+ ('iar', 'MAX32620HSP'),
('iar', 'MOTE_L152RC'),
('iar', 'RZ_A1H'),
--- a/hooks.py Mon Aug 29 10:55:42 2016 +0100
+++ b/hooks.py Mon Aug 29 11:18:36 2016 +0100
@@ -1,37 +1,40 @@
-# Configurable hooks in the build system. Can be used by various platforms
-# to customize the build process.
+""" Configurable hooks in the build system. Can be used by various platforms
+to customize the build process.
+"""
################################################################################
# Hooks for the various parts of the build process
# Internal mapping of hooks per tool
-_hooks = {}
+_HOOKS = {}
# Internal mapping of running hooks
-_running_hooks = {}
+_RUNNING_HOOKS = {}
# Available hook types
-_hook_types = ["binary", "compile", "link", "assemble"]
+_HOOK_TYPES = ["binary", "compile", "link", "assemble"]
# Available hook steps
-_hook_steps = ["pre", "replace", "post"]
+_HOOK_STEPS = ["pre", "replace", "post"]
# Hook the given function. Use this function as a decorator
def hook_tool(function):
+ """Decorate a function as a tool that may be hooked"""
tool = function.__name__
tool_flag = "_" + tool + "_done"
def wrapper(t_self, *args, **kwargs):
+ """The hooked function itself"""
# if a hook for this tool is already running, it's most likely
# coming from a derived class, so don't hook the super class version
- if _running_hooks.get(tool, False):
+ if _RUNNING_HOOKS.get(tool, False):
return function(t_self, *args, **kwargs)
- _running_hooks[tool] = True
+ _RUNNING_HOOKS[tool] = True
# If this tool isn't hooked, return original function
- if not _hooks.has_key(tool):
+ if not _HOOKS.has_key(tool):
res = function(t_self, *args, **kwargs)
- _running_hooks[tool] = False
+ _RUNNING_HOOKS[tool] = False
return res
- tooldesc = _hooks[tool]
+ tooldesc = _HOOKS[tool]
setattr(t_self, tool_flag, False)
# If there is a replace hook, execute the replacement instead
if tooldesc.has_key("replace"):
@@ -39,7 +42,7 @@
# If the replacement has set the "done" flag, exit now
# Otherwise continue as usual
if getattr(t_self, tool_flag, False):
- _running_hooks[tool] = False
+ _RUNNING_HOOKS[tool] = False
return res
# Execute pre-function before main function if specified
if tooldesc.has_key("pre"):
@@ -49,76 +52,162 @@
# Execute post-function after main function if specified
if tooldesc.has_key("post"):
post_res = tooldesc["post"](t_self, *args, **kwargs)
- _running_hooks[tool] = False
+ _RUNNING_HOOKS[tool] = False
return post_res or res
else:
- _running_hooks[tool] = False
+ _RUNNING_HOOKS[tool] = False
return res
return wrapper
-class Hook:
+class Hook(object):
+ """A compiler class that may be hooked"""
def __init__(self, target, toolchain):
- _hooks.clear()
+ _HOOKS.clear()
self._cmdline_hooks = {}
self.toolchain = toolchain
target.init_hooks(self, toolchain.__class__.__name__)
# Hook various functions directly
- def _hook_add(self, hook_type, hook_step, function):
- if not hook_type in _hook_types or not hook_step in _hook_steps:
+ @staticmethod
+ def _hook_add(hook_type, hook_step, function):
+ """Add a hook to a compile function
+
+ Positional arguments:
+ hook_type - one of the _HOOK_TYPES
+ hook_step - one of the _HOOK_STEPS
+ function - the function to add to the list of hooks
+ """
+ if hook_type not in _HOOK_TYPES or hook_step not in _HOOK_STEPS:
return False
- if not hook_type in _hooks:
- _hooks[hook_type] = {}
- _hooks[hook_type][hook_step] = function
+ if hook_type not in _HOOKS:
+ _HOOKS[hook_type] = {}
+ _HOOKS[hook_type][hook_step] = function
return True
def hook_add_compiler(self, hook_step, function):
+ """Add a hook to the compiler
+
+ Positional Arguments:
+ hook_step - one of the _HOOK_STEPS
+ function - the function to add to the list of hooks
+ """
return self._hook_add("compile", hook_step, function)
def hook_add_linker(self, hook_step, function):
+ """Add a hook to the linker
+
+ Positional Arguments:
+ hook_step - one of the _HOOK_STEPS
+ function - the function to add to the list of hooks
+ """
return self._hook_add("link", hook_step, function)
def hook_add_assembler(self, hook_step, function):
+ """Add a hook to the assemble
+
+ Positional Arguments:
+ hook_step - one of the _HOOK_STEPS
+ function - the function to add to the list of hooks
+ """
return self._hook_add("assemble", hook_step, function)
def hook_add_binary(self, hook_step, function):
+ """Add a hook to the elf to binary tool
+
+ Positional Arguments:
+ hook_step - one of the _HOOK_STEPS
+ function - the function to add to the list of hooks
+ """
return self._hook_add("binary", hook_step, function)
# Hook command lines
def _hook_cmdline(self, hook_type, function):
- if not hook_type in _hook_types:
+ """Add a hook to a command line function
+
+ Positional arguments:
+ hook_type - one of the _HOOK_TYPES
+ function - the function to add to the list of hooks
+ """
+ if hook_type not in _HOOK_TYPES:
return False
self._cmdline_hooks[hook_type] = function
return True
def hook_cmdline_compiler(self, function):
+ """Add a hook to the compiler command line
+
+ Positional arguments:
+ function - the function to call
+ """
return self._hook_cmdline("compile", function)
def hook_cmdline_linker(self, function):
+ """Add a hook to the linker command line
+
+ Positional arguments:
+ function - the function to call
+ """
return self._hook_cmdline("link", function)
def hook_cmdline_assembler(self, function):
+ """Add a hook to the assembler command line
+
+ Positional arguments:
+ function - the function to call
+ """
return self._hook_cmdline("assemble", function)
def hook_cmdline_binary(self, function):
+ """Add a hook to the elf to bin tool command line
+
+ Positional arguments:
+ function - the function to call
+ """
return self._hook_cmdline("binary", function)
# Return the command line after applying the hook
def _get_cmdline(self, hook_type, cmdline):
+ """Get the command line after running all hooks
+
+ Positional arguments:
+ hook_type - one of the _HOOK_TYPES
+ cmdline - the initial command line
+ """
if self._cmdline_hooks.has_key(hook_type):
- cmdline = self._cmdline_hooks[hook_type](self.toolchain.__class__.__name__, cmdline)
+ cmdline = self._cmdline_hooks[hook_type](
+ self.toolchain.__class__.__name__, cmdline)
return cmdline
def get_cmdline_compiler(self, cmdline):
+ """Get the compiler command line after running all hooks
+
+ Positional arguments:
+ cmdline - the initial command line
+ """
return self._get_cmdline("compile", cmdline)
def get_cmdline_linker(self, cmdline):
+ """Get the linker command line after running all hooks
+
+ Positional arguments:
+ cmdline - the initial command line
+ """
return self._get_cmdline("link", cmdline)
def get_cmdline_assembler(self, cmdline):
+ """Get the assmebler command line after running all hooks
+
+ Positional arguments:
+ cmdline - the initial command line
+ """
return self._get_cmdline("assemble", cmdline)
def get_cmdline_binary(self, cmdline):
+ """Get the binary command line after running all hooks
+
+ Positional arguments:
+ cmdline - the initial command line
+ """
return self._get_cmdline("binary", cmdline)
################################################################################
--- a/libraries.py Mon Aug 29 10:55:42 2016 +0100
+++ b/libraries.py Mon Aug 29 11:18:36 2016 +0100
@@ -14,8 +14,15 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
-from tools.paths import *
-from tools.data.support import *
+from tools.paths import MBED_RTX, RTOS_LIBRARIES, MBED_LIBRARIES, MBED_RPC,\
+ RTOS_ABSTRACTION, RPC_LIBRARY, USB, USB_LIBRARIES, USB_HOST,\
+ USB_HOST_LIBRARIES, FAT_FS, DSP_ABSTRACTION, DSP_CMSIS, DSP_LIBRARIES,\
+ SD_FS, FS_LIBRARY, ETH_SOURCES, LWIP_SOURCES, ETH_LIBRARY, UBLOX_SOURCES,\
+ UBLOX_LIBRARY, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, CPPUTEST_SRC,\
+ CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR, CPPUTEST_LIBRARY,\
+ CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC,\
+ CPPUTEST_INC_EXT
+from tools.data.support import DEFAULT_SUPPORT
from tools.tests import TEST_MBED_LIB
@@ -84,7 +91,8 @@
{
"id": "ublox",
- "source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, LWIP_SOURCES],
+ "source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES,
+ LWIP_SOURCES],
"build_dir": UBLOX_LIBRARY,
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES],
},
@@ -92,12 +100,15 @@
# Unit Testing library
{
"id": "cpputest",
- "source_dir": [CPPUTEST_SRC, CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR],
+ "source_dir": [CPPUTEST_SRC, CPPUTEST_PLATFORM_SRC,
+ CPPUTEST_TESTRUNNER_SCR],
"build_dir": CPPUTEST_LIBRARY,
"dependencies": [MBED_LIBRARIES],
- 'inc_dirs': [CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC, TEST_MBED_LIB],
+ 'inc_dirs': [CPPUTEST_INC, CPPUTEST_PLATFORM_INC,
+ CPPUTEST_TESTRUNNER_INC, TEST_MBED_LIB],
'inc_dirs_ext': [CPPUTEST_INC_EXT],
- 'macros': ["CPPUTEST_USE_MEM_LEAK_DETECTION=0", "CPPUTEST_USE_STD_CPP_LIB=0", "CPPUTEST=1"],
+ 'macros': ["CPPUTEST_USE_MEM_LEAK_DETECTION=0",
+ "CPPUTEST_USE_STD_CPP_LIB=0", "CPPUTEST=1"],
},
]
@@ -105,19 +116,30 @@
LIBRARY_MAP = dict([(library['id'], library) for library in LIBRARIES])
-class Library:
- DEFAULTS = {
- "supported": DEFAULT_SUPPORT,
- 'dependencies': None,
- 'inc_dirs': None, # Include dirs required by library build
- 'inc_dirs_ext': None, # Include dirs required by others to use with this library
- 'macros': None, # Additional macros you want to define when building library
- }
+class Library(object):
+ """A library representation that allows for querying of support"""
def __init__(self, lib_id):
- self.__dict__.update(Library.DEFAULTS)
- self.__dict__.update(LIBRARY_MAP[lib_id])
+ lib = LIBRARY_MAP[lib_id]
+ self.supported = lib.get("supported", DEFAULT_SUPPORT)
+ self.dependencies = lib.get("dependencies", None)
+ # Include dirs required by library build
+ self.inc_dirs = lib.get("inc_dirs", None)
+ # Include dirs required by others to use with this library
+ self.inc_dirs_ext = lib.get("inc_dirs_ext", None)
+ # Additional macros you want to define when building library
+ self.macros = lib.get("macros", None)
+
+ self.source_dir = lib["source_dir"]
+ self.build_dir = lib["build_dir"]
def is_supported(self, target, toolchain):
+ """Check if a target toolchain combination is supported
+
+ Positional arguments:
+ target - the MCU or board
+ toolchain - the compiler
+ """
if not hasattr(self, 'supported'):
return True
- return (target.name in self.supported) and (toolchain in self.supported[target.name])
+ return (target.name in self.supported) and \
+ (toolchain in self.supported[target.name])
--- a/memap.py Mon Aug 29 10:55:42 2016 +0100
+++ b/memap.py Mon Aug 29 11:18:36 2016 +0100
@@ -1,8 +1,6 @@
#!/usr/bin/env python
-# pylint: disable=too-many-arguments, too-many-locals, too-many-branches, too-many-lines, line-too-long, too-many-nested-blocks, too-many-public-methods, too-many-instance-attributes
-# pylint: disable=invalid-name, missing-docstring
-# Memory Map File Analyser for ARM mbed
+"""Memory Map File Analyser for ARM mbed"""
import sys
import os
@@ -10,37 +8,46 @@
import csv
import json
import argparse
-from utils import argparse_uppercase_type, argparse_lowercase_hyphen_type, argparse_filestring_type
from prettytable import PrettyTable
-debug = False
+from tools.utils import argparse_filestring_type, \
+ argparse_lowercase_hyphen_type, argparse_uppercase_type
+
+DEBUG = False
+RE_ARMCC = re.compile(
+ r'^\s+0x(\w{8})\s+0x(\w{8})\s+(\w+)\s+(\w+)\s+(\d+)\s+[*]?.+\s+(.+)$')
+RE_IAR = re.compile(
+ r'^\s+(.+)\s+(zero|const|ro code|inited|uninit)\s'
+ r'+0x(\w{8})\s+0x(\w+)\s+(.+)\s.+$')
class MemapParser(object):
+ """An object that represents parsed results, parses the memory map files,
+ and writes out different file types of memory results
+ """
+
+ print_sections = ('.text', '.data', '.bss')
+
+ misc_flash_sections = ('.interrupts', '.flash_config')
+
+ other_sections = ('.interrupts_ram', '.init', '.ARM.extab',
+ '.ARM.exidx', '.ARM.attributes', '.eh_frame',
+ '.init_array', '.fini_array', '.jcr', '.stab',
+ '.stabstr', '.ARM.exidx', '.ARM')
+
+ # sections to print info (generic for all toolchains)
+ sections = ('.text', '.data', '.bss', '.heap', '.stack')
def __init__(self):
- """
- General initialization
+ """ General initialization
"""
# list of all modules and their sections
self.modules = dict()
- self.misc_flash_sections = ('.interrupts', '.flash_config')
-
- self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab',
- '.ARM.exidx', '.ARM.attributes', '.eh_frame',
- '.init_array', '.fini_array', '.jcr', '.stab',
- '.stabstr', '.ARM.exidx', '.ARM')
-
- # sections to print info (generic for all toolchains)
- self.sections = ('.text', '.data', '.bss', '.heap', '.stack')
-
# sections must be defined in this order to take irrelevant out
self.all_sections = self.sections + self.other_sections + \
self.misc_flash_sections + ('unknown', 'OUTPUT')
- self.print_sections = ('.text', '.data', '.bss')
-
# list of all object files and mappting to module names
self.object_to_module = dict()
@@ -48,8 +55,12 @@
self.mem_summary = dict()
def module_add(self, module_name, size, section):
- """
- Adds a module / section to the list
+ """ Adds a module / section to the list
+
+ Positional arguments:
+ module_name - name of the module to add
+ size - the size of the module being added
+ section - the section the module contributes to
"""
if module_name in self.modules:
@@ -62,22 +73,29 @@
self.modules[module_name] = temp_dic
def check_new_section_gcc(self, line):
- """
- Check whether a new section in a map file has been detected (only applies to gcc)
+ """ Check whether a new section in a map file has been detected (only
+ applies to gcc)
+
+ Positional arguments:
+ line - the line to check for a new section
"""
for i in self.all_sections:
if line.startswith(i):
- return i # should name of the section (assuming it's a known one)
+ # should name of the section (assuming it's a known one)
+ return i
if line.startswith('.'):
return 'unknown' # all others are classified are unknown
else:
return False # everything else, means no change in section
- def path_object_to_module_name(self, txt):
- """
- Parses path to object file and extracts module / object data
+ @staticmethod
+ def path_object_to_module_name(txt):
+ """ Parse a path to object file to extract it's module and object data
+
+ Positional arguments:
+ txt - the path to parse the object and module name from
"""
txt = txt.replace('\\', '/')
@@ -101,13 +119,17 @@
def parse_section_gcc(self, line):
- """
- Parse data from a section of gcc map file
+ """ Parse data from a section of gcc map file
+
+ examples:
+ 0x00004308 0x7c ./.build/K64F/GCC_ARM/mbed-os/hal/targets/hal/TARGET_Freescale/TARGET_KPSDK_MCUS/spi_api.o
+ .text 0x00000608 0x198 ./.build/K64F/GCC_ARM/mbed-os/core/mbed-rtos/rtx/TARGET_CORTEX_M/TARGET_RTOS_M4_M7/TOOLCHAIN_GCC/HAL_CM4.o
+
+ Positional arguments:
+ line - the line to parse a section from
"""
- # examples
- # 0x00004308 0x7c ./.build/K64F/GCC_ARM/mbed-os/hal/targets/hal/TARGET_Freescale/TARGET_KPSDK_MCUS/spi_api.o
- # .text 0x00000608 0x198 ./.build/K64F/GCC_ARM/mbed-os/core/mbed-rtos/rtx/TARGET_CORTEX_M/TARGET_RTOS_M4_M7/TOOLCHAIN_GCC/HAL_CM4.o
- rex_address_len_name = r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$'
+ rex_address_len_name = re.compile(
+ r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$')
test_address_len_name = re.match(rex_address_len_name, line)
@@ -116,7 +138,8 @@
if int(test_address_len_name.group(2), 16) == 0: # size == 0
return ["", 0] # no valid entry
else:
- m_name, m_object = self.path_object_to_module_name(test_address_len_name.group(3))
+ m_name, _ = self.path_object_to_module_name(
+ test_address_len_name.group(3))
m_size = int(test_address_len_name.group(2), 16)
return [m_name, m_size]
@@ -137,8 +160,10 @@
return ["", 0] # no valid entry
def parse_map_file_gcc(self, file_desc):
- """
- Main logic to decode gcc map files
+ """ Main logic to decode gcc map files
+
+ Positional arguments:
+ file_desc - a stream object to parse as a gcc map file
"""
current_section = 'unknown'
@@ -168,22 +193,25 @@
else:
self.module_add(module_name, module_size, current_section)
- if debug:
+ if DEBUG:
print "Line: %s" % line,
- print "Module: %s\tSection: %s\tSize: %s" % (module_name, current_section, module_size)
+ print "Module: %s\tSection: %s\tSize: %s" % \
+ (module_name, current_section, module_size)
raw_input("----------")
def parse_section_armcc(self, line):
- """
- Parse data from an armcc map file
+ """ Parse data from an armcc map file
+
+ Examples of armcc map file:
+ Base_Addr Size Type Attr Idx E Section Name Object
+ 0x00000000 0x00000400 Data RO 11222 RESET startup_MK64F12.o
+ 0x00000410 0x00000008 Code RO 49364 * !!!main c_w.l(__main.o)
+
+ Positional arguments:
+ line - the line to parse the section data from
"""
- # Examples of armcc map file:
- # Base_Addr Size Type Attr Idx E Section Name Object
- # 0x00000000 0x00000400 Data RO 11222 RESET startup_MK64F12.o
- # 0x00000410 0x00000008 Code RO 49364 * !!!main c_w.l(__main.o)
- rex_armcc = r'^\s+0x(\w{8})\s+0x(\w{8})\s+(\w+)\s+(\w+)\s+(\d+)\s+[*]?.+\s+(.+)$'
- test_rex_armcc = re.match(rex_armcc, line)
+ test_rex_armcc = re.match(RE_ARMCC, line)
if test_rex_armcc:
@@ -213,30 +241,34 @@
return ["", 0, ""] # no valid entry
def parse_section_iar(self, line):
- """
- Parse data from an IAR map file
+ """ Parse data from an IAR map file
+
+ Examples of IAR map file:
+ Section Kind Address Size Object
+ .intvec ro code 0x00000000 0x198 startup_MK64F12.o [15]
+ .rodata const 0x00000198 0x0 zero_init3.o [133]
+ .iar.init_table const 0x00008384 0x2c - Linker created -
+ Initializer bytes const 0x00000198 0xb2 <for P3 s0>
+ .data inited 0x20000000 0xd4 driverAtmelRFInterface.o [70]
+ .bss zero 0x20000598 0x318 RTX_Conf_CM.o [4]
+ .iar.dynexit uninit 0x20001448 0x204 <Block tail>
+ HEAP uninit 0x20001650 0x10000 <Block tail>
+
+ Positional_arguments:
+ line - the line to parse section data from
"""
- # Examples of IAR map file:
- # Section Kind Address Size Object
- # .intvec ro code 0x00000000 0x198 startup_MK64F12.o [15]
- # .rodata const 0x00000198 0x0 zero_init3.o [133]
- # .iar.init_table const 0x00008384 0x2c - Linker created -
- # Initializer bytes const 0x00000198 0xb2 <for P3 s0>
- # .data inited 0x20000000 0xd4 driverAtmelRFInterface.o [70]
- # .bss zero 0x20000598 0x318 RTX_Conf_CM.o [4]
- # .iar.dynexit uninit 0x20001448 0x204 <Block tail>
- # HEAP uninit 0x20001650 0x10000 <Block tail>
- rex_iar = r'^\s+(.+)\s+(zero|const|ro code|inited|uninit)\s+0x(\w{8})\s+0x(\w+)\s+(.+)\s.+$'
- test_rex_iar = re.match(rex_iar, line)
+ test_rex_iar = re.match(RE_IAR, line)
if test_rex_iar:
size = int(test_rex_iar.group(4), 16)
- if test_rex_iar.group(2) == 'const' or test_rex_iar.group(2) == 'ro code':
+ if test_rex_iar.group(2) == 'const' or \
+ test_rex_iar.group(2) == 'ro code':
section = '.text'
- elif test_rex_iar.group(2) == 'zero' or test_rex_iar.group(2) == 'uninit':
+ elif test_rex_iar.group(2) == 'zero' or \
+ test_rex_iar.group(2) == 'uninit':
if test_rex_iar.group(1)[0:4] == 'HEAP':
section = '.heap'
elif test_rex_iar.group(1)[0:6] == 'CSTACK':
@@ -263,8 +295,10 @@
return ["", 0, ""] # no valid entry
def parse_map_file_armcc(self, file_desc):
- """
- Main logic to decode armcc map files
+ """ Main logic to decode armc5 map files
+
+ Positional arguments:
+ file_desc - a file like object to parse as an armc5 map file
"""
with file_desc as infile:
@@ -285,8 +319,10 @@
self.module_add(name, size, section)
def parse_map_file_iar(self, file_desc):
- """
- Main logic to decode armcc map files
+ """ Main logic to decode IAR map files
+
+ Positional arguments:
+ file_desc - a file like object to parse as an IAR map file
"""
with file_desc as infile:
@@ -307,9 +343,12 @@
self.module_add(name, size, section)
def search_objects(self, path, toolchain):
- """
- Check whether the specified map file matches with the toolchain.
+ """ Check whether the specified map file matches with the toolchain.
Searches for object files and creates mapping: object --> module
+
+ Positional arguments:
+ path - the path to an object file
+ toolchain - the toolchain used to build the object file
"""
path = path.replace('\\', '/')
@@ -323,18 +362,22 @@
else:
# It looks this is not an mbed project
# object-to-module mapping cannot be generated
- print "Warning: specified toolchain doesn't match with path to the memory map file."
+ print "Warning: specified toolchain doesn't match with"\
+ " path to the memory map file."
return
- for root, dir, obj_files in os.walk(search_path):
+ for root, _, obj_files in os.walk(search_path):
for obj_file in obj_files:
if obj_file.endswith(".o"):
- module_name, object_name = self.path_object_to_module_name(os.path.join(root, obj_file))
+ module_name, object_name = self.path_object_to_module_name(
+ os.path.join(root, obj_file))
if object_name in self.object_to_module:
- if debug:
- print "WARNING: multiple usages of object file: %s" % object_name
- print " Current: %s" % self.object_to_module[object_name]
+ if DEBUG:
+ print "WARNING: multiple usages of object file: %s"\
+ % object_name
+ print " Current: %s" % \
+ self.object_to_module[object_name]
print " New: %s" % module_name
print " "
else:
@@ -343,12 +386,13 @@
export_formats = ["json", "csv-ci", "table"]
def generate_output(self, export_format, file_output=None):
- """
- Generates summary of memory map data
+ """ Generates summary of memory map data
- Parameters
- json_mode: generates output in json formal (True/False)
- file_desc: descriptor (either stdout or file)
+ Positional arguments:
+ export_format - the format to dump
+
+ Keyword arguments:
+ file_desc - descriptor (either stdout or file)
"""
try:
@@ -360,6 +404,10 @@
print "I/O error({0}): {1}".format(error.errno, error.strerror)
return False
+ subtotal = dict()
+ for k in self.sections:
+ subtotal[k] = 0
+
# Calculate misc flash sections
misc_flash_mem = 0
for i in self.modules:
@@ -367,6 +415,102 @@
if self.modules[i][k]:
misc_flash_mem += self.modules[i][k]
+ json_obj = []
+ for i in sorted(self.modules):
+
+ row = []
+
+ json_obj.append({
+ "module":i,
+ "size":{
+ k:self.modules[i][k] for k in self.print_sections
+ }
+ })
+
+ summary = {
+ 'summary':{
+ 'static_ram': (subtotal['.data'] + subtotal['.bss']),
+ 'heap': (subtotal['.heap']),
+ 'stack': (subtotal['.stack']),
+ 'total_ram': (subtotal['.data'] + subtotal['.bss'] +
+ subtotal['.heap']+subtotal['.stack']),
+ 'total_flash': (subtotal['.text'] + subtotal['.data'] +
+ misc_flash_mem),
+ }
+ }
+
+ self.mem_summary = json_obj + [summary]
+
+ to_call = {'json': self.generate_json,
+ 'csv-ci': self.generate_csv,
+ 'table': self.generate_table}[export_format]
+ to_call(subtotal, misc_flash_mem, file_desc)
+
+ if file_desc is not sys.stdout:
+ file_desc.close()
+
+ def generate_json(self, _, dummy, file_desc):
+ """Generate a json file from a memory map
+
+ Positional arguments:
+ subtotal - total sizes for each module
+ misc_flash_mem - size of misc flash sections
+ file_desc - the file to write out the final report to
+ """
+ file_desc.write(json.dumps(self.mem_summary, indent=4))
+ file_desc.write('\n')
+
+ def generate_csv(self, subtotal, misc_flash_mem, file_desc):
+ """Generate a CSV file from a memoy map
+
+ Positional arguments:
+ subtotal - total sizes for each module
+ misc_flash_mem - size of misc flash sections
+ file_desc - the file to write out the final report to
+ """
+ csv_writer = csv.writer(file_desc, delimiter=',',
+ quoting=csv.QUOTE_NONE)
+
+ csv_module_section = []
+ csv_sizes = []
+ for i in sorted(self.modules):
+ for k in self.print_sections:
+ csv_module_section += [i+k]
+ csv_sizes += [self.modules[i][k]]
+
+ csv_module_section += ['static_ram']
+ csv_sizes += [subtotal['.data']+subtotal['.bss']]
+
+ csv_module_section += ['heap']
+ if subtotal['.heap'] == 0:
+ csv_sizes += ['unknown']
+ else:
+ csv_sizes += [subtotal['.heap']]
+
+ csv_module_section += ['stack']
+ if subtotal['.stack'] == 0:
+ csv_sizes += ['unknown']
+ else:
+ csv_sizes += [subtotal['.stack']]
+
+ csv_module_section += ['total_ram']
+ csv_sizes += [subtotal['.data'] + subtotal['.bss'] +
+ subtotal['.heap'] + subtotal['.stack']]
+
+ csv_module_section += ['total_flash']
+ csv_sizes += [subtotal['.text']+subtotal['.data']+misc_flash_mem]
+
+ csv_writer.writerow(csv_module_section)
+ csv_writer.writerow(csv_sizes)
+
+ def generate_table(self, subtotal, misc_flash_mem, file_desc):
+ """Generate a table from a memoy map
+
+ Positional arguments:
+ subtotal - total sizes for each module
+ misc_flash_mem - size of misc flash sections
+ file_desc - the file to write out the final report to
+ """
# Create table
columns = ['Module']
columns.extend(self.print_sections)
@@ -379,15 +523,8 @@
for i in list(self.print_sections):
table.align[i] = 'r'
- subtotal = dict()
- for k in self.sections:
- subtotal[k] = 0
-
- json_obj = []
for i in sorted(self.modules):
-
- row = []
- row.append(i)
+ row = [i]
for k in self.sections:
subtotal[k] += self.modules[i][k]
@@ -395,13 +532,6 @@
for k in self.print_sections:
row.append(self.modules[i][k])
- json_obj.append({
- "module":i,
- "size":{
- k:self.modules[i][k] for k in self.print_sections
- }
- })
-
table.add_row(row)
subtotal_row = ['Subtotals']
@@ -410,98 +540,46 @@
table.add_row(subtotal_row)
- summary = {
- 'summary':{
- 'static_ram':(subtotal['.data']+subtotal['.bss']),
- 'heap':(subtotal['.heap']),
- 'stack':(subtotal['.stack']),
- 'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),
- 'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),
- }
- }
-
- if export_format == 'json':
- json_to_file = json_obj + [summary]
- file_desc.write(json.dumps(json_to_file, indent=4))
- file_desc.write('\n')
-
- elif export_format == 'csv-ci': # CSV format for the CI system
+ file_desc.write(table.get_string())
+ file_desc.write('\n')
- csv_writer = csv.writer(file_desc, delimiter=',', quoting=csv.QUOTE_NONE)
-
- csv_module_section = []
- csv_sizes = []
- for i in sorted(self.modules):
- for k in self.print_sections:
- csv_module_section += [i+k]
- csv_sizes += [self.modules[i][k]]
-
- csv_module_section += ['static_ram']
- csv_sizes += [subtotal['.data']+subtotal['.bss']]
-
- csv_module_section += ['heap']
- if subtotal['.heap'] == 0:
- csv_sizes += ['unknown']
- else:
- csv_sizes += [subtotal['.heap']]
+ if subtotal['.heap'] == 0:
+ file_desc.write("Allocated Heap: unknown\n")
+ else:
+ file_desc.write("Allocated Heap: %s bytes\n" %
+ str(subtotal['.heap']))
- csv_module_section += ['stack']
- if subtotal['.stack'] == 0:
- csv_sizes += ['unknown']
- else:
- csv_sizes += [subtotal['.stack']]
-
- csv_module_section += ['total_ram']
- csv_sizes += [subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']]
-
- csv_module_section += ['total_flash']
- csv_sizes += [subtotal['.text']+subtotal['.data']+misc_flash_mem]
-
- csv_writer.writerow(csv_module_section)
- csv_writer.writerow(csv_sizes)
-
- else: # default format is 'table'
- file_desc.write(table.get_string())
- file_desc.write('\n')
+ if subtotal['.stack'] == 0:
+ file_desc.write("Allocated Stack: unknown\n")
+ else:
+ file_desc.write("Allocated Stack: %s bytes\n" %
+ str(subtotal['.stack']))
- if subtotal['.heap'] == 0:
- file_desc.write("Allocated Heap: unknown\n")
- else:
- file_desc.write("Allocated Heap: %s bytes\n" % str(subtotal['.heap']))
-
- if subtotal['.stack'] == 0:
- file_desc.write("Allocated Stack: unknown\n")
- else:
- file_desc.write("Allocated Stack: %s bytes\n" % str(subtotal['.stack']))
-
- file_desc.write("Total Static RAM memory (data + bss): %s bytes\n" % (str(subtotal['.data']+subtotal['.bss'])))
- file_desc.write("Total RAM memory (data + bss + heap + stack): %s bytes\n" % (str(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack'])))
- file_desc.write("Total Flash memory (text + data + misc): %s bytes\n" % (str(subtotal['.text']+subtotal['.data']+misc_flash_mem)))
-
- if file_desc is not sys.stdout:
- file_desc.close()
-
- self.mem_summary = json_obj + [summary]
-
- return True
-
- def get_memory_summary(self):
- """! Object is available only after self.generate_output('json') is called
- @return Return memory summary object
- """
- return self.mem_summary
+ file_desc.write("Total Static RAM memory (data + bss): %s bytes\n" %
+ (str(subtotal['.data'] + subtotal['.bss'])))
+ file_desc.write(
+ "Total RAM memory (data + bss + heap + stack): %s bytes\n"
+ % (str(subtotal['.data'] + subtotal['.bss'] + subtotal['.heap'] +
+ subtotal['.stack'])))
+ file_desc.write("Total Flash memory (text + data + misc): %s bytes\n" %
+ (str(subtotal['.text'] + subtotal['.data'] +
+ misc_flash_mem)))
toolchains = ["ARM", "ARM_STD", "ARM_MICRO", "GCC_ARM", "IAR"]
def parse(self, mapfile, toolchain):
- """
- Parse and decode map file depending on the toolchain
+ """ Parse and decode map file depending on the toolchain
+
+ Positional arguments:
+ mapfile - the file name of the memory map file
+ toolchain - the toolchain used to create the file
"""
result = True
try:
- with open(mapfile, 'rt') as file_input:
- if toolchain == "ARM" or toolchain == "ARM_STD" or toolchain == "ARM_MICRO":
+ with open(mapfile, 'r') as file_input:
+ if toolchain == "ARM" or toolchain == "ARM_STD" or\
+ toolchain == "ARM_MICRO":
self.search_objects(os.path.abspath(mapfile), "ARM")
self.parse_map_file_armcc(file_input)
elif toolchain == "GCC_ARM":
@@ -517,21 +595,34 @@
return result
def main():
+ """Entry Point"""
version = '0.3.11'
# Parser handling
- parser = argparse.ArgumentParser(description="Memory Map File Analyser for ARM mbed\nversion %s" % version)
+ parser = argparse.ArgumentParser(
+ description="Memory Map File Analyser for ARM mbed\nversion %s" %
+ version)
- parser.add_argument('file', type=argparse_filestring_type, help='memory map file')
+ parser.add_argument(
+ 'file', type=argparse_filestring_type, help='memory map file')
- parser.add_argument('-t', '--toolchain', dest='toolchain', help='select a toolchain used to build the memory map file (%s)' % ", ".join(MemapParser.toolchains),\
- required=True, type=argparse_uppercase_type(MemapParser.toolchains, "toolchain"))
+ parser.add_argument(
+ '-t', '--toolchain', dest='toolchain',
+ help='select a toolchain used to build the memory map file (%s)' %
+ ", ".join(MemapParser.toolchains),
+ required=True,
+ type=argparse_uppercase_type(MemapParser.toolchains, "toolchain"))
- parser.add_argument('-o', '--output', help='output file name', required=False)
+ parser.add_argument(
+ '-o', '--output', help='output file name', required=False)
- parser.add_argument('-e', '--export', dest='export', required=False, default='table', type=argparse_lowercase_hyphen_type(MemapParser.export_formats,'export format'),\
- help="export format (examples: %s: default)" % ", ".join(MemapParser.export_formats))
+ parser.add_argument(
+ '-e', '--export', dest='export', required=False, default='table',
+ type=argparse_lowercase_hyphen_type(MemapParser.export_formats,
+ 'export format'),
+ help="export format (examples: %s: default)" %
+ ", ".join(MemapParser.export_formats))
parser.add_argument('-v', '--version', action='version', version=version)
@@ -541,7 +632,7 @@
sys.exit(1)
- args, remainder = parser.parse_known_args()
+ args = parser.parse_args()
# Create memap object
memap = MemapParser()
--- a/options.py Mon Aug 29 10:55:42 2016 +0100
+++ b/options.py Mon Aug 29 11:18:36 2016 +0100
@@ -17,9 +17,16 @@
from argparse import ArgumentParser
from tools.toolchains import TOOLCHAINS
from tools.targets import TARGET_NAMES
-from utils import argparse_force_uppercase_type, argparse_lowercase_hyphen_type, argparse_many
+from tools.utils import argparse_force_uppercase_type, \
+ argparse_lowercase_hyphen_type, argparse_many
def get_default_options_parser(add_clean=True, add_options=True):
+ """Create a new options parser with the default compiler options added
+
+ Keyword arguments:
+ add_clean - add the clean argument?
+ add_options - add the options argument?
+ """
parser = ArgumentParser()
targetnames = TARGET_NAMES
@@ -28,26 +35,49 @@
toolchainlist.sort()
parser.add_argument("-m", "--mcu",
- help="build for the given MCU (%s)" % ', '.join(targetnames),
- metavar="MCU",
- type=argparse_many(argparse_force_uppercase_type(targetnames, "MCU")))
+ help=("build for the given MCU (%s)" %
+ ', '.join(targetnames)),
+ metavar="MCU",
+ type=argparse_many(
+ argparse_force_uppercase_type(
+ targetnames, "MCU")))
parser.add_argument("-t", "--tool",
- help="build using the given TOOLCHAIN (%s)" % ', '.join(toolchainlist),
- metavar="TOOLCHAIN",
- type=argparse_many(argparse_force_uppercase_type(toolchainlist, "toolchain")))
+ help=("build using the given TOOLCHAIN (%s)" %
+ ', '.join(toolchainlist)),
+ metavar="TOOLCHAIN",
+ type=argparse_many(
+ argparse_force_uppercase_type(
+ toolchainlist, "toolchain")))
parser.add_argument("--color",
help="print Warnings, and Errors in color",
action="store_true", default=False)
+ parser.add_argument("--cflags", default=[], action="append",
+ help="Extra flags to provide to the C compiler")
+
+ parser.add_argument("--asmflags", default=[], action="append",
+ help="Extra flags to provide to the assembler")
+
+ parser.add_argument("--ldflags", default=[], action="append",
+ help="Extra flags to provide to the linker")
+
if add_clean:
parser.add_argument("-c", "--clean", action="store_true", default=False,
- help="clean the build directory")
+ help="clean the build directory")
if add_options:
parser.add_argument("-o", "--options", action="append",
- help='Add a build argument ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run Goanna static code analyzer")',
- type=argparse_lowercase_hyphen_type(['save-asm', 'debug-info', 'analyze'], "build option"))
+ help=('Add a build argument ("save-asm": save the '
+ 'asm generated by the compiler, "debug-info":'
+ ' generate debugging information, "analyze": '
+ 'run Goanna static code analyzer")'),
+ type=argparse_lowercase_hyphen_type(['save-asm',
+ 'debug-info',
+ 'analyze',
+ 'small-lib',
+ 'std-lib'],
+ "build option"))
return parser
--- a/paths.py Mon Aug 29 10:55:42 2016 +0100
+++ b/paths.py Mon Aug 29 11:18:36 2016 +0100
@@ -18,7 +18,7 @@
from os import getenv
# Conventions about the directory structure
-from settings import ROOT, BUILD_DIR
+from tools.settings import ROOT, BUILD_DIR
# Allow overriding some of the build parameters using environment variables
BUILD_DIR = getenv("MBED_BUILD_DIR") or BUILD_DIR
@@ -40,6 +40,8 @@
MBED_LIBRARIES = join(BUILD_DIR, "mbed")
+MBED_CONFIG_FILE = join(ROOT, "mbed_lib.json")
+
# Tests
TEST_DIR = join(LIB_DIR, "tests")
HOST_TESTS = join(ROOT, "tools", "host_tests")
@@ -102,8 +104,10 @@
CPPUTEST_INC = join(CPPUTEST_DIR, "cpputest", "include")
CPPUTEST_INC_EXT = join(CPPUTEST_DIR, "cpputest", "include", "CppUTest")
# Platform dependant code is here (for armcc compiler)
-CPPUTEST_PLATFORM_SRC = join(CPPUTEST_DIR, "cpputest", "src", "Platforms", "armcc")
-CPPUTEST_PLATFORM_INC = join(CPPUTEST_DIR, "cpputest", "include", "Platforms", "armcc")
+CPPUTEST_PLATFORM_SRC = join(CPPUTEST_DIR, "cpputest", "src", "Platforms",
+ "armcc")
+CPPUTEST_PLATFORM_INC = join(CPPUTEST_DIR, "cpputest", "include", "Platforms",
+ "armcc")
# Function 'main' used to run all compiled UTs
CPPUTEST_TESTRUNNER_SCR = join(TEST_DIR, "utest", "testrunner")
CPPUTEST_TESTRUNNER_INC = join(TEST_DIR, "utest", "testrunner")
--- a/project.py Mon Aug 29 10:55:42 2016 +0100
+++ b/project.py Mon Aug 29 11:18:36 2016 +0100
@@ -59,7 +59,7 @@
parser.add_argument("-b",
dest="build",
default=False,
- type=argparse_dir_not_parent(ROOT),
+ action="store_true",
help="use the mbed library build, instead of the sources")
group.add_argument("-L", "--list-tests",
--- a/targets.py Mon Aug 29 10:55:42 2016 +0100
+++ b/targets.py Mon Aug 29 11:18:36 2016 +0100
@@ -15,6 +15,16 @@
limitations under the License.
"""
+import os
+import binascii
+import struct
+import shutil
+import inspect
+import sys
+from tools.patch import patch
+from tools.paths import TOOLS_BOOTLOADERS
+from tools.utils import json_file_to_dict
+
CORE_LABELS = {
"ARM7TDMI-S": ["ARM7", "LIKE_CORTEX_ARM7"],
"Cortex-M0" : ["M0", "CORTEX_M", "LIKE_CORTEX_M0"],
@@ -29,240 +39,313 @@
"Cortex-A9" : ["A9", "CORTEX_A", "LIKE_CORTEX_A9"]
}
-import os
-import binascii
-import struct
-import shutil
-from tools.patch import patch
-from paths import TOOLS_BOOTLOADERS
-import json
-import inspect
-import sys
-from tools.utils import json_file_to_dict
-
-########################################################################################################################
+################################################################################
# Generic Target class that reads and interprets the data in targets.json
-# A simple class that represents all the exceptions associated with hooking
class HookError(Exception):
+ """ A simple class that represents all the exceptions associated with
+ hooking
+ """
pass
-# A simple decorator used for automatically caching data returned by a function
-caches = {}
+CACHES = {}
def cached(func):
+ """A simple decorator used for automatically caching data returned by a
+ function
+ """
def wrapper(*args, **kwargs):
- if not caches.has_key((func.__name__, args)):
- caches[(func.__name__, args)] = func(*args, **kwargs)
- return caches[(func.__name__, args)]
+ """The wrapped function itself"""
+ if not CACHES.has_key((func.__name__, args)):
+ CACHES[(func.__name__, args)] = func(*args, **kwargs)
+ return CACHES[(func.__name__, args)]
return wrapper
-class Target:
+class Target(object):
+ """An object to represent a Target (MCU/Board)"""
# Cumulative attributes can have values appended to them, so they
# need to be computed differently than regular attributes
- __cumulative_attributes = ['extra_labels', 'macros', 'device_has', 'features']
+ cumulative_attributes = ['extra_labels', 'macros', 'device_has', 'features']
- # List of targets that were added dynamically using "add_py_targets" (see below)
+ # List of targets that were added dynamically using "add_py_targets" (see
+ # below)
__py_targets = set()
- # Location of the 'targets.json' file
- __targets_json_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'latest_targets.json')
+ # Default location of the 'targets.json' file
+ __targets_json_location_default = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'latest_targets.json')
- # Load the description of JSON target data
+ # Current/new location of the 'targets.json' file
+ __targets_json_location = None
+
@staticmethod
@cached
def get_json_target_data():
- return json_file_to_dict(Target.__targets_json_location)
+ """Load the description of JSON target data"""
+ return json_file_to_dict(Target.__targets_json_location or
+ Target.__targets_json_location_default)
- # Set the location of the targets.json file
@staticmethod
- def set_targets_json_location(location):
- Target.__targets_json_location = location
+ def set_targets_json_location(location=None):
+ """Set the location of the targets.json file"""
+ Target.__targets_json_location = (location or
+ Target.__targets_json_location_default)
# Invalidate caches, since the location of the JSON file changed
- caches.clear()
+ CACHES.clear()
- # Get the members of this module using Python's "inspect" module
@staticmethod
@cached
def get_module_data():
- return dict([(m[0], m[1]) for m in inspect.getmembers(sys.modules[__name__])])
+ """Get the members of this module using Python's "inspect" module"""
+ return dict([(m[0], m[1]) for m in
+ inspect.getmembers(sys.modules[__name__])])
- # Return the order in which target descriptions are searched for attributes
- # This mimics the Python 2.2 method resolution order, which is what the old targets.py module used
- # For more details, check http://makina-corpus.com/blog/metier/2014/python-tutorial-understanding-python-mro-class-search-path
- # The resolution order contains (name, level) tuples, where "name" is the name of the class and "level"
- # is the level in the inheritance hierarchy (the target itself is at level 0, its first parent at level 1,
- # its parent's parent at level 1 and so on)
- def __get_resolution_order(self, target_name, order, level = 0):
- if not target_name in [l[0] for l in order]: # the resolution order can't contain duplicate target names
+ def __get_resolution_order(self, target_name, order, level=0):
+ """ Return the order in which target descriptions are searched for
+ attributes. This mimics the Python 2.2 method resolution order, which
+ is what the old targets.py module used. For more details, check
+ http://makina-corpus.com/blog/metier/2014/python-tutorial-understanding-python-mro-class-search-path
+ The resolution order contains (name, level) tuples, where "name" is the
+ name of the class and "level" is the level in the inheritance hierarchy
+ (the target itself is at level 0, its first parent at level 1, its
+ parent's parent at level 2 and so on)
+ """
+ # the resolution order can't contain duplicate target names
+ if target_name not in [l[0] for l in order]:
order.append((target_name, level))
parents = self.get_json_target_data()[target_name].get("inherits", [])
- for p in parents:
- order = self.__get_resolution_order(p, order, level + 1)
+ for par in parents:
+ order = self.__get_resolution_order(par, order, level + 1)
return order
- # Modify the exporter specification ("progen") by changing all "template" keys to full paths
@staticmethod
def __add_paths_to_progen(data):
+ """Modify the exporter specification ("progen") by changing all
+ "template" keys to full paths
+ """
out = {}
- for key, value in data.items():
- if isinstance(value, dict):
- out[key] = Target.__add_paths_to_progen(value)
+ for key, val in data.items():
+ if isinstance(val, dict):
+ out[key] = Target.__add_paths_to_progen(val)
elif key == "template":
- out[key] = [os.path.join(os.path.dirname(__file__), 'export', v) for v in value]
+ out[key] = [os.path.join(os.path.dirname(__file__), 'export', v)
+ for v in val]
else:
- out[key] = value
+ out[key] = val
return out
- # Comute the value of a given target attribute
- def __getattr_helper(self, attrname):
+ def __getattr_cumulative(self, attrname):
+ """Look for the attribute in the class and its parents, as defined by
+ the resolution order
+ """
tdata = self.get_json_target_data()
- if attrname in self.__cumulative_attributes:
- # For a cumulative attribute, figure out when it was defined the last time (in attribute
- # resolution order) then follow the "_add" and "_remove" data fields
- for idx, t in enumerate(self.resolution_order):
- if attrname in tdata[t[0]]: # the attribute was defined at this level in the resolution order
- def_idx = idx
+ # For a cumulative attribute, figure out when it was defined the
+ # last time (in attribute resolution order) then follow the "_add"
+ # and "_remove" data fields
+ for idx, target in enumerate(self.resolution_order):
+ # the attribute was defined at this level in the resolution
+ # order
+ if attrname in tdata[target[0]]:
+ def_idx = idx
+ break
+ else:
+ raise AttributeError("Attribute '%s' not found in target '%s'"
+ % (attrname, self.name))
+ # Get the starting value of the attribute
+ starting_value = (tdata[self.resolution_order[def_idx][0]][attrname]
+ or [])[:]
+ # Traverse the resolution list in high inheritance to low
+ # inheritance level, left to right order to figure out all the
+ # other classes that change the definition by adding or removing
+ # elements
+ for idx in xrange(self.resolution_order[def_idx][1] - 1, -1, -1):
+ same_level_targets = [tar[0] for tar in self.resolution_order
+ if tar[1] == idx]
+ for tar in same_level_targets:
+ data = tdata[tar]
+ # Do we have anything to add ?
+ if data.has_key(attrname + "_add"):
+ starting_value.extend(data[attrname + "_add"])
+ # Do we have anything to remove ?
+ if data.has_key(attrname + "_remove"):
+ # Macros can be defined either without a value (MACRO)
+ # or with a value (MACRO=10). When removing, we specify
+ # only the name of the macro, without the value. So we
+ # need to create a mapping between the macro name and
+ # its value. This will work for extra_labels and other
+ # type of arrays as well, since they fall into the
+ # "macros without a value" category (simple definitions
+ # without a value).
+ name_def_map = {}
+ for crtv in starting_value:
+ if crtv.find('=') != -1:
+ temp = crtv.split('=')
+ if len(temp) != 2:
+ raise ValueError(
+ "Invalid macro definition '%s'" % crtv)
+ name_def_map[temp[0]] = crtv
+ else:
+ name_def_map[crtv] = crtv
+ for element in data[attrname + "_remove"]:
+ if element not in name_def_map:
+ raise ValueError(
+ ("Unable to remove '%s' in '%s.%s' since "
+ % (element, self.name, attrname)) +
+ "it doesn't exist")
+ starting_value.remove(name_def_map[element])
+ return starting_value
+
+ def __getattr_helper(self, attrname):
+ """Compute the value of a given target attribute"""
+ if attrname in self.cumulative_attributes:
+ return self.__getattr_cumulative(attrname)
+ else:
+ tdata = self.get_json_target_data()
+ starting_value = None
+ for target in self.resolution_order:
+ data = tdata[target[0]]
+ if data.has_key(attrname):
+ starting_value = data[attrname]
break
+ else: # Attribute not found
+ raise AttributeError(
+ "Attribute '%s' not found in target '%s'"
+ % (attrname, self.name))
+ # 'progen' needs the full path to the template (the path in JSON is
+ # relative to tools/export)
+ if attrname == "progen":
+ return self.__add_paths_to_progen(starting_value)
else:
- raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
- # Get the starting value of the attribute
- v = (tdata[self.resolution_order[def_idx][0]][attrname] or [])[:]
- # Traverse the resolution list in high inheritance to low inheritance level, left to right order
- # to figure out all the other classes that change the definition by adding or removing elements
- for idx in xrange(self.resolution_order[def_idx][1] - 1, -1, -1):
- same_level_targets = [t[0] for t in self.resolution_order if t[1] == idx]
- for t in same_level_targets:
- data = tdata[t]
- # Do we have anything to add ?
- if data.has_key(attrname + "_add"):
- v.extend(data[attrname + "_add"])
- # Do we have anything to remove ?
- if data.has_key(attrname + "_remove"):
- # Macros can be defined either without a value (MACRO) or with a value (MACRO=10).
- # When removing, we specify only the name of the macro, without the value. So we need
- # to create a mapping between the macro name and its value. This will work for
- # extra_labels and other type of arrays as well, since they fall into the "macros
- # without a value" category (simple definitions without a value).
- name_def_map = {}
- for crtv in v:
- if crtv.find('=') != -1:
- temp = crtv.split('=')
- if len(temp) != 2:
- raise ValueError("Invalid macro definition '%s'" % crtv)
- name_def_map[temp[0]] = crtv
- else:
- name_def_map[crtv] = crtv
- for e in data[attrname + "_remove"]:
- if not e in name_def_map:
- raise ValueError("Unable to remove '%s' in '%s.%s' since it doesn't exist" % (e, self.name, attrname))
- v.remove(name_def_map[e])
- return v
- # Look for the attribute in the class and its parents, as defined by the resolution order
- v = None
- for t in self.resolution_order:
- data = tdata[t[0]]
- if data.has_key(attrname):
- v = data[attrname]
- break
- else: # Attribute not found
- raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
- # 'progen' needs the full path to the template (the path in JSON is relative to tools/export)
- return v if attrname != "progen" else self.__add_paths_to_progen(v)
+ return starting_value
- # Return the value of an attribute
- # This function only computes the attribute's value once, then adds it to the instance attributes
- # (in __dict__), so the next time it is returned directly
def __getattr__(self, attrname):
- v = self.__getattr_helper(attrname)
- self.__dict__[attrname] = v
- return v
+ """ Return the value of an attribute. This function only computes the
+ attribute's value once, then adds it to the instance attributes (in
+ __dict__), so the next time it is returned directly
+ """
+ result = self.__getattr_helper(attrname)
+ self.__dict__[attrname] = result
+ return result
- # Add one or more new target(s) represented as a Python dictionary in 'new_targets'
- # It is an error to add a target with a name that already exists.
@staticmethod
def add_py_targets(new_targets):
+ """Add one or more new target(s) represented as a Python dictionary
+ in 'new_targets'. It is an error to add a target with a name that
+ already exists.
+ """
crt_data = Target.get_json_target_data()
- for tk, tv in new_targets.items():
- if crt_data.has_key(tk):
- raise Exception("Attempt to add target '%s' that already exists" % tk)
+ for target_key, target_value in new_targets.items():
+ if crt_data.has_key(target_key):
+ raise Exception(
+ "Attempt to add target '%s' that already exists"
+ % target_key)
# Add target data to the internal target dictionary
- crt_data[tk] = tv
+ crt_data[target_key] = target_value
# Create the new target and add it to the relevant data structures
- new_target = Target(tk)
+ new_target = Target(target_key)
TARGETS.append(new_target)
- TARGET_MAP[tk] = new_target
- TARGET_NAMES.append(tk)
+ TARGET_MAP[target_key] = new_target
+ TARGET_NAMES.append(target_key)
- # Return the target instance starting from the target name
@staticmethod
@cached
- def get_target(name):
- return Target(name)
+ def get_target(target_name):
+ """ Return the target instance starting from the target name """
+ return Target(target_name)
- def __init__(self, name):
- self.name = name
+ def __init__(self, target_name):
+ self.name = target_name
# Compute resolution order once (it will be used later in __getattr__)
self.resolution_order = self.__get_resolution_order(self.name, [])
- # Create also a list with only the names of the targets in the resolution order
- self.resolution_order_names = [t[0] for t in self.resolution_order]
+ # Create also a list with only the names of the targets in the
+ # resolution order
+ self.resolution_order_names = [target[0] for target
+ in self.resolution_order]
@property
def program_cycle_s(self):
+ """Special override for program_cycle_s as it's default value depends
+ upon is_disk_virtual
+ """
try:
return self.__getattr__("program_cycle_s")
except AttributeError:
return 4 if self.is_disk_virtual else 1.5
def get_labels(self):
+ """Get all possible labels for this target"""
labels = [self.name] + CORE_LABELS[self.core] + self.extra_labels
- # Automatically define UVISOR_UNSUPPORTED if the target doesn't specifically
- # define UVISOR_SUPPORTED
- if not "UVISOR_SUPPORTED" in labels:
+ # Automatically define UVISOR_UNSUPPORTED if the target doesn't
+ # specifically define UVISOR_SUPPORTED
+ if "UVISOR_SUPPORTED" not in labels:
labels.append("UVISOR_UNSUPPORTED")
return labels
- # For now, this function only allows "post binary" hooks (hooks that are executed after
- # the binary image is extracted from the executable file)
def init_hooks(self, hook, toolchain_name):
+ """Initialize the post-build hooks for a toolchain. For now, this
+ function only allows "post binary" hooks (hooks that are executed
+ after the binary image is extracted from the executable file)
+ """
+
# If there's no hook, simply return
try:
hook_data = self.post_binary_hook
except AttributeError:
return
- # A hook was found. The hook's name is in the format "classname.functionname"
+ # A hook was found. The hook's name is in the format
+ # "classname.functionname"
temp = hook_data["function"].split(".")
if len(temp) != 2:
- raise HookError("Invalid format for hook '%s' in target '%s' (must be 'class_name.function_name')" % (hook_data["function"], self.name))
+ raise HookError(
+ ("Invalid format for hook '%s' in target '%s'"
+ % (hook_data["function"], self.name)) +
+ " (must be 'class_name.function_name')")
class_name, function_name = temp[0], temp[1]
- # "class_name" must refer to a class in this file, so check if the class exists
+ # "class_name" must refer to a class in this file, so check if the
+ # class exists
mdata = self.get_module_data()
- if not mdata.has_key(class_name) or not inspect.isclass(mdata[class_name]):
- raise HookError("Class '%s' required by '%s' in target '%s' not found in targets.py" % (class_name, hook_data["function"], self.name))
- # "function_name" must refer to a static function inside class "class_name"
+ if not mdata.has_key(class_name) or \
+ not inspect.isclass(mdata[class_name]):
+ raise HookError(
+ ("Class '%s' required by '%s' in target '%s'"
+ % (class_name, hook_data["function"], self.name)) +
+ " not found in targets.py")
+ # "function_name" must refer to a static function inside class
+ # "class_name"
cls = mdata[class_name]
- if (not hasattr(cls, function_name)) or (not inspect.isfunction(getattr(cls, function_name))):
- raise HookError("Static function '%s' required by '%s' in target '%s' not found in class '%s'" % (function_name, hook_data["function"], self.name, class_name))
+ if (not hasattr(cls, function_name)) or \
+ (not inspect.isfunction(getattr(cls, function_name))):
+ raise HookError(
+ ("Static function '%s' " % function_name) +
+ ("required by '%s' " % hook_data["function"]) +
+ ("in target '%s' " % self.name) +
+ ("not found in class '%s'" % class_name))
# Check if the hook specification also has target restrictions
toolchain_restrictions = hook_data.get("toolchains", [])
- if toolchain_restrictions and (toolchain_name not in toolchain_restrictions):
+ if toolchain_restrictions and \
+ (toolchain_name not in toolchain_restrictions):
return
# Finally, hook the requested function
hook.hook_add_binary("post", getattr(cls, function_name))
-########################################################################################################################
+################################################################################
# Target specific code goes in this section
-# This code can be invoked from the target description using the "post_binary_hook" key
+# This code can be invoked from the target description using the
+# "post_binary_hook" key
-class LPCTargetCode:
+class LPCTargetCode(object):
+ """General LPC Target patching code"""
@staticmethod
def lpc_patch(t_self, resources, elf, binf):
+ """Patch an elf file"""
t_self.debug("LPC Patch: %s" % os.path.split(binf)[1])
patch(binf)
-class LPC4088Code:
+class LPC4088Code(object):
+ """Code specific to the LPC4088"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
+ """Hook to be run after an elf file is built"""
if not os.path.isdir(binf):
# Regular binary file, nothing to do
LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
@@ -274,7 +357,8 @@
outbin.write(data)
outbin.write('\xFF' * (512*1024 - len(data)))
partf.close()
- # Read and append the second part (external flash) in chunks of fixed size
+ # Read and append the second part (external flash) in chunks of fixed
+ # size
chunksize = 128 * 1024
partf = open(os.path.join(binf, "ER_IROM2"), "rb")
while True:
@@ -291,19 +375,23 @@
t_self.debug("Generated custom binary file (internal flash + SPIFI)")
LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
-class TEENSY3_1Code:
+class TEENSY3_1Code(object):
+ """Hooks for the TEENSY3.1"""
@staticmethod
def binary_hook(t_self, resources, elf, binf):
+ """Hook that is run after elf is generated"""
from intelhex import IntelHex
binh = IntelHex()
- binh.loadbin(binf, offset = 0)
+ binh.loadbin(binf, offset=0)
+
+ with open(binf.replace(".bin", ".hex"), "w") as file_desc:
+ binh.tofile(file_desc, format='hex')
- with open(binf.replace(".bin", ".hex"), "w") as f:
- binh.tofile(f, format='hex')
-
-class MTSCode:
+class MTSCode(object):
+ """Generic MTS code"""
@staticmethod
- def _combine_bins_helper(target_name, t_self, resources, elf, binf):
+ def _combine_bins_helper(target_name, binf):
+ """combine bins with the bootloader for a particular target"""
loader = os.path.join(TOOLS_BOOTLOADERS, target_name, "bootloader.bin")
target = binf + ".tmp"
if not os.path.exists(loader):
@@ -330,51 +418,62 @@
@staticmethod
def combine_bins_mts_dot(t_self, resources, elf, binf):
- MTSCode._combine_bins_helper("MTS_MDOT_F411RE", t_self, resources, elf, binf)
+ """A hook for the MTS MDOT"""
+ MTSCode._combine_bins_helper("MTS_MDOT_F411RE", binf)
@staticmethod
def combine_bins_mts_dragonfly(t_self, resources, elf, binf):
- MTSCode._combine_bins_helper("MTS_DRAGONFLY_F411RE", t_self, resources, elf, binf)
+ """A hoof for the MTS Dragonfly"""
+ MTSCode._combine_bins_helper("MTS_DRAGONFLY_F411RE", binf)
-class MCU_NRF51Code:
+class MCU_NRF51Code(object):
+ """NRF51 Hooks"""
@staticmethod
- def binary_hook(t_self, resources, elf, binf):
-
+ def binary_hook(t_self, resources, _, binf):
+ """Hook that merges the soft device with the bin file"""
# Scan to find the actual paths of soft device
sdf = None
- for softdeviceAndOffsetEntry in t_self.target.EXPECTED_SOFTDEVICES_WITH_OFFSETS:
+ for softdevice_and_offset_entry\
+ in t_self.target.EXPECTED_SOFTDEVICES_WITH_OFFSETS:
for hexf in resources.hex_files:
- if hexf.find(softdeviceAndOffsetEntry['name']) != -1:
- t_self.debug("SoftDevice file found %s." % softdeviceAndOffsetEntry['name'])
+ if hexf.find(softdevice_and_offset_entry['name']) != -1:
+ t_self.debug("SoftDevice file found %s."
+ % softdevice_and_offset_entry['name'])
sdf = hexf
- if sdf is not None: break
- if sdf is not None: break
+ if sdf is not None:
+ break
+ if sdf is not None:
+ break
if sdf is None:
t_self.debug("Hex file not found. Aborting.")
return
- # Look for bootloader file that matches this soft device or bootloader override image
+ # Look for bootloader file that matches this soft device or bootloader
+ # override image
blf = None
if t_self.target.MERGE_BOOTLOADER is True:
for hexf in resources.hex_files:
if hexf.find(t_self.target.OVERRIDE_BOOTLOADER_FILENAME) != -1:
- t_self.debug("Bootloader file found %s." % t_self.target.OVERRIDE_BOOTLOADER_FILENAME)
+ t_self.debug("Bootloader file found %s."
+ % t_self.target.OVERRIDE_BOOTLOADER_FILENAME)
blf = hexf
break
- elif hexf.find(softdeviceAndOffsetEntry['boot']) != -1:
- t_self.debug("Bootloader file found %s." % softdeviceAndOffsetEntry['boot'])
+ elif hexf.find(softdevice_and_offset_entry['boot']) != -1:
+ t_self.debug("Bootloader file found %s."
+ % softdevice_and_offset_entry['boot'])
blf = hexf
break
# Merge user code with softdevice
from intelhex import IntelHex
binh = IntelHex()
- binh.loadbin(binf, offset=softdeviceAndOffsetEntry['offset'])
+ binh.loadbin(binf, offset=softdevice_and_offset_entry['offset'])
if t_self.target.MERGE_SOFT_DEVICE is True:
- t_self.debug("Merge SoftDevice file %s" % softdeviceAndOffsetEntry['name'])
+ t_self.debug("Merge SoftDevice file %s"
+ % softdevice_and_offset_entry['name'])
sdh = IntelHex(sdf)
binh.merge(sdh)
@@ -383,13 +482,21 @@
blh = IntelHex(blf)
binh.merge(blh)
- with open(binf.replace(".bin", ".hex"), "w") as f:
- binh.tofile(f, format='hex')
+ with open(binf.replace(".bin", ".hex"), "w") as fileout:
+ binh.tofile(fileout, format='hex')
-########################################################################################################################
+class NCS36510TargetCode:
+ @staticmethod
+ def ncs36510_addfib(t_self, resources, elf, binf):
+ from tools.add_fib import add_fib_at_start
+ print("binf ", binf)
+ add_fib_at_start(binf[:-4])
+################################################################################
# Instantiate all public targets
-TARGETS = [Target.get_target(name) for name, value in Target.get_json_target_data().items() if value.get("public", True)]
+TARGETS = [Target.get_target(name) for name, value
+ in Target.get_json_target_data().items()
+ if value.get("public", True)]
# Map each target name to its unique instance
TARGET_MAP = dict([(t.name, t) for t in TARGETS])
@@ -409,14 +516,17 @@
result[detect_code] = target.name
return result
-# Sets the location of the JSON file that contains the targets
-def set_targets_json_location(location):
+def set_targets_json_location(location=None):
+ """Sets the location of the JSON file that contains the targets"""
# First instruct Target about the new location
Target.set_targets_json_location(location)
- # Then re-initialize TARGETS, TARGET_MAP and TARGET_NAMES
- # The re-initialization does not create new variables, it keeps the old ones instead
- # This ensures compatibility with code that does "from tools.targets import TARGET_NAMES"
- TARGETS[:] = [Target.get_target(name) for name, value in Target.get_json_target_data().items() if value.get("public", True)]
+ # Then re-initialize TARGETS, TARGET_MAP and TARGET_NAMES. The
+ # re-initialization does not create new variables, it keeps the old ones
+ # instead. This ensures compatibility with code that does
+ # "from tools.targets import TARGET_NAMES"
+ TARGETS[:] = [Target.get_target(target) for target, obj
+ in Target.get_json_target_data().items()
+ if obj.get("public", True)]
TARGET_MAP.clear()
- TARGET_MAP.update(dict([(t.name, t) for t in TARGETS]))
+ TARGET_MAP.update(dict([(target.name, target) for target in TARGETS]))
TARGET_NAMES[:] = TARGET_MAP.keys()
--- a/test.py Mon Aug 29 10:55:42 2016 +0100
+++ b/test.py Mon Aug 29 11:18:36 2016 +0100
@@ -29,7 +29,7 @@
from tools.test_api import test_path_to_name, find_tests, print_tests, build_tests, test_spec_from_test_builds
from tools.options import get_default_options_parser
from tools.build_api import build_project, build_library
-from tools.build_api import print_build_memory_usage_results
+from tools.build_api import print_build_memory_usage
from tools.targets import TARGET_MAP
from tools.utils import mkdir, ToolException, NotSupportedException, args_error
from tools.test_exporters import ReportExporter, ResultExporterType
@@ -177,7 +177,8 @@
macros=options.macros,
verbose=options.verbose,
notify=notify,
- archive=False)
+ archive=False,
+ remove_config_header_file=True)
library_build_success = True
except ToolException, e:
@@ -230,7 +231,7 @@
# Print memory map summary on screen
if build_report:
print
- print print_build_memory_usage_results(build_report)
+ print print_build_memory_usage(build_report)
print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
status = print_report_exporter.report(build_report)
--- a/test/config_test/test12/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test12/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"target_overrides": {
--- a/test/config_test/test16/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test16/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"macros": ["APP1=10", "APP2", "LIB2_1=5"]
--- a/test/config_test/test21/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test21/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"target_overrides": {
--- a/test/config_test/test22/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test22/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"target_overrides": {
@@ -13,4 +13,3 @@
}
}
}
-
--- a/test/config_test/test24/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test24/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"target_overrides": {
--- a/test/config_test/test26/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test26/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,7 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": [],
- "default_build": "standard"
+ "default_lib": "std"
}
},
"target_overrides": {
--- a/test/config_test/test27/mbed_app.json Mon Aug 29 10:55:42 2016 +0100
+++ b/test/config_test/test27/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -4,8 +4,7 @@
"core": "Cortex-M0",
"extra_labels": [],
"features": ["IPV4"],
- "default_build": "standard"
+ "default_lib": "std"
}
}
}
-
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/config_test/test28/mbed_app.json Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,16 @@
+{
+ "custom_targets": {
+ "test_target": {
+ "core": "Cortex-M0",
+ "extra_labels": [],
+ "features": [],
+ "default_lib": "std"
+ }
+ },
+ "target_overrides": {
+ "*": {
+ "target.features_add": ["UVISOR"],
+ "target.extra_labels_add": ["UVISOR_SUPPORTED"]
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/config_test/test28/test_data.py Mon Aug 29 11:18:36 2016 +0100
@@ -0,0 +1,8 @@
+# Testing when adding two features
+
+expected_results = {
+ "test_target": {
+ "desc": "test uvisor feature",
+ "expected_features": ["UVISOR"]
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/pylint.py Mon Aug 29 11:18:36 2016 +0100 @@ -0,0 +1,48 @@ +"""A test that all code scores above a 9.25 in pylint""" + +import subprocess +import re +import os.path + +SCORE_REGEXP = re.compile( + r'^Your\ code\ has\ been\ rated\ at\ (\-?[0-9\.]+)/10') + +TOOLS_ROOT = os.path.dirname(os.path.dirname(__file__)) + + +def parse_score(pylint_output): + """Parse the score out of pylint's output as a float If the score is not + found, return 0.0. + """ + for line in pylint_output.splitlines(): + match = re.match(SCORE_REGEXP, line) + if match: + return float(match.group(1)) + return 0.0 + +def execute_pylint(filename): + """Execute a pylint process and collect it's output + """ + process = subprocess.Popen( + ["pylint", filename], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + stout, sterr = process.communicate() + status = process.poll() + return status, stout, sterr + +FILES = ["build_api.py", "config.py", "colorize.py", "detect_targets.py", + "hooks.py", "libraries.py", "memap.py", "options.py", "paths.py", + "targets.py", "test/pylint.py"] + +if __name__ == "__main__": + for python_module in FILES: + _, stdout, stderr = execute_pylint(os.path.join(TOOLS_ROOT, + python_module)) + score = parse_score(stdout) + if score < 9.25: + print(stdout) + + +
--- a/tests.py Mon Aug 29 10:55:42 2016 +0100
+++ b/tests.py Mon Aug 29 11:18:36 2016 +0100
@@ -180,7 +180,7 @@
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["analog_loop"],
- "mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
+ "mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K66F", "K22F", "LPC4088", "LPC1549",
"NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE", "NUCLEO_F207ZG",
"NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
"NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "NUCLEO_F446ZE",
@@ -328,6 +328,18 @@
"DISCO_F746NG", "DISCO_L476VG", "NUCLEO_L476RG", "NUCLEO_L432KC"]
},
{
+ "id": "MBED_A28", "description": "CAN loopback test",
+ "source_dir": join(TEST_DIR, "mbed", "can_loopback"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 20,
+ "mcu": ["B96B_F446VE",
+ "NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F207ZG",
+ "NUCLEO_F303RE", "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE","NUCLEO_F446ZE",
+ "DISCO_F469NI", "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG",
+ "DISCO_F746NG", "DISCO_L476VG", "NUCLEO_L476RG", "NUCLEO_L432KC"]
+ },
+ {
"id": "MBED_BLINKY", "description": "Blinky",
"source_dir": join(TEST_DIR, "mbed", "blinky"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
@@ -345,6 +357,7 @@
"id": "MBED_BUSOUT", "description": "BusOut",
"source_dir": join(TEST_DIR, "mbed", "bus_out"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "exclude_mcu": ["NUCLEO_L011K4"],
"automated": True,
"duration": 15,
},
@@ -505,6 +518,7 @@
"NRF51_MICROBIT", "NRF51_MICROBIT_B", "NRF51_MICROBIT_BOOT",
"NRF51_MICROBIT_B_BOOT", "NRF51_MICROBIT_B_OTA", "NRF51_MICROBIT_OTA",
"HRM1017", "HRM1017_BOOT", "HRM1701_OTA",
+ "NUCLEO_L011K4",
"TY51822R3", "TY51822R3_BOOT", "TY51822R3_OTA",
"NRF15_DONGLE", "NRF15_DONGLE_BOOT", "NRF15_DONGLE_OTA",
"ARCH_BLE", "ARCH_BLE_BOOT", "ARCH_BLE_OTA",
@@ -655,12 +669,14 @@
"id": "MBED_37", "description": "Serial NC RX",
"source_dir": join(TEST_DIR, "mbed", "serial_nc_rx"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "exclude_mcu": ["NUCLEO_L011K4"],
"automated": True
},
{
"id": "MBED_38", "description": "Serial NC TX",
"source_dir": join(TEST_DIR, "mbed", "serial_nc_tx"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "exclude_mcu": ["NUCLEO_L011K4"],
"automated": True
},
{
@@ -725,13 +741,13 @@
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_2", "description": "Mutex resource lock",
@@ -740,14 +756,14 @@
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F446ZE", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_3", "description": "Semaphore resource lock",
@@ -756,7 +772,7 @@
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
@@ -764,7 +780,7 @@
"NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_4", "description": "Signals messaging",
@@ -772,7 +788,7 @@
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
@@ -780,7 +796,7 @@
"NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_5", "description": "Queue messaging",
@@ -788,14 +804,14 @@
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_6", "description": "Mail messaging",
@@ -803,14 +819,14 @@
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_7", "description": "Timer",
@@ -820,14 +836,14 @@
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_8", "description": "ISR (Queue)",
@@ -835,14 +851,14 @@
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "KL25Z", "KL05Z", "K64F", "K66F", "KL46Z", "HEXIWEAR",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB", "NUCLEO_F429ZI",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB", "NUCLEO_F207ZG",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_L432KC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F446ZE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
"EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
+ "NRF51822", "NRF51_DK", "SEEED_TINY_BLE", "ARM_BEETLE_SOC", "NUCLEO_F767ZI"],
},
{
"id": "RTOS_9", "description": "SD File write-read",
@@ -850,8 +866,8 @@
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"peripherals": ["SD"],
- "mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
- "KL05Z", "K64F", "KL46Z", "RZ_A1H",
+ "mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z", "HEXIWEAR",
+ "KL05Z", "K64F", "K66F", "KL46Z", "RZ_A1H",
"DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI", "NUCLEO_F207ZG"],
},
@@ -1069,6 +1085,7 @@
"id": "EXAMPLE_1", "description": "/dev/null",
"source_dir": join(TEST_DIR, "mbed", "dev_null"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "exclude_mcu": ["NUCLEO_L011K4"],
"automated": True,
#"host_test" : "dev_null_auto",
},
--- a/toolchains/__init__.py Mon Aug 29 10:55:42 2016 +0100
+++ b/toolchains/__init__.py Mon Aug 29 11:18:36 2016 +0100
@@ -188,6 +188,23 @@
}
+def check_toolchain_path(function):
+ """Check if the path to toolchain is valid. Exit if not.
+ Use this function as a decorator. Causes a system exit if the path does
+ not exist. Execute the function as normal if the path does exist.
+
+ Positional arguments:
+ function -- the function to decorate
+ """
+ def perform_check(self, *args, **kwargs):
+ if not exists(self.toolchain_path) and not exists(self.toolchain_path+'.exe'):
+ error_string = 'Could not find executable for %s.\n Currently ' \
+ 'set search path: %s'% (self.name, self.toolchain_path)
+ raise Exception(error_string)
+ return function(self, *args, **kwargs)
+ return perform_check
+
+
class mbedToolchain:
# Verbose logging
VERBOSE = True
@@ -230,7 +247,8 @@
self.macros = macros or []
# Macros generated from toolchain and target rules/features
- self.symbols = None
+ self.asm_symbols = None
+ self.cxx_symbols = None
# Labels generated from toolchain and target rules/features (used for selective build)
self.labels = None
@@ -372,36 +390,50 @@
event['toolchain'] = self
return self.notify_fun(event, self.silent)
- def get_symbols(self):
- if self.symbols is None:
- # Target and Toolchain symbols
- labels = self.get_labels()
- self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
- self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
+ def get_symbols(self, for_asm=False):
+ if for_asm:
+ if self.asm_symbols is None:
+ self.asm_symbols = []
+
+ # Cortex CPU symbols
+ if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
+ self.asm_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
- # Cortex CPU symbols
- if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
- self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
+ # Add target's symbols
+ self.asm_symbols += self.target.macros
+ # Add extra symbols passed via 'macros' parameter
+ self.asm_symbols += self.macros
+ return list(set(self.asm_symbols)) # Return only unique symbols
+ else:
+ if self.cxx_symbols is None:
+ # Target and Toolchain symbols
+ labels = self.get_labels()
+ self.cxx_symbols = ["TARGET_%s" % t for t in labels['TARGET']]
+ self.cxx_symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
- # Symbols defined by the on-line build.system
- self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
- if MBED_ORG_USER:
- self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
+ # Cortex CPU symbols
+ if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
+ self.cxx_symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
+
+ # Symbols defined by the on-line build.system
+ self.cxx_symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
+ if MBED_ORG_USER:
+ self.cxx_symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
- # Add target's symbols
- self.symbols += self.target.macros
- # Add target's hardware
- self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
- # Add target's features
- self.symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
- # Add extra symbols passed via 'macros' parameter
- self.symbols += self.macros
+ # Add target's symbols
+ self.cxx_symbols += self.target.macros
+ # Add target's hardware
+ self.cxx_symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
+ # Add target's features
+ self.cxx_symbols += ["FEATURE_" + data + "=1" for data in self.target.features]
+ # Add extra symbols passed via 'macros' parameter
+ self.cxx_symbols += self.macros
- # Form factor variables
- if hasattr(self.target, 'supported_form_factors'):
- self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
+ # Form factor variables
+ if hasattr(self.target, 'supported_form_factors'):
+ self.cxx_symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
- return list(set(self.symbols)) # Return only unique symbols
+ return list(set(self.cxx_symbols)) # Return only unique symbols
# Extend the internal list of macros
def add_macros(self, new_macros):
@@ -687,6 +719,7 @@
# THIS METHOD IS BEING CALLED BY THE MBED ONLINE BUILD SYSTEM
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
+ @check_toolchain_path
def compile_sources(self, resources, build_path, inc_dirs=None):
# Web IDE progress bar for project build
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
@@ -796,7 +829,7 @@
except ToolException, err:
if p._taskqueue.queue:
p._taskqueue.queue.clear()
- sleep(0.1)
+ sleep(0.5)
p.terminate()
p.join()
raise ToolException(err)
@@ -885,6 +918,7 @@
else:
raise ToolException(_stderr)
+ @check_toolchain_path
def build_library(self, objects, dir, name):
needed_update = False
lib = self.STD_LIB_NAME % name
@@ -896,6 +930,7 @@
return needed_update
+ @check_toolchain_path
def link_program(self, r, tmp_path, name):
needed_update = False
ext = 'bin'
@@ -1020,7 +1055,7 @@
# Here we return memory statistics structure (constructed after
# call to generate_output) which contains raw data in bytes
# about sections + summary
- return memap.get_memory_summary()
+ return memap.mem_summary
# Set the configuration data
def set_config_data(self, config_data):
--- a/toolchains/arm.py Mon Aug 29 10:55:42 2016 +0100
+++ b/toolchains/arm.py Mon Aug 29 11:18:36 2016 +0100
@@ -15,12 +15,12 @@
limitations under the License.
"""
import re
-from os.path import join, dirname, splitext, basename, exists
+from os.path import join, dirname, splitext, basename
+from distutils.spawn import find_executable
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.hooks import hook_tool
from tools.utils import mkdir
-import copy
class ARM(mbedToolchain):
LINKER_EXT = '.sct'
@@ -56,6 +56,11 @@
else:
cpu = target.core
+ if not TOOLCHAIN_PATHS['ARM']:
+ exe = find_executable('armcc')
+ if exe:
+ TOOLCHAIN_PATHS['ARM'] = dirname(dirname(exe))
+
ARM_BIN = join(TOOLCHAIN_PATHS['ARM'], "bin")
ARM_INC = join(TOOLCHAIN_PATHS['ARM'], "include")
@@ -81,6 +86,8 @@
self.ar = join(ARM_BIN, "armar")
self.elf2bin = join(ARM_BIN, "fromelf")
+ self.toolchain_path = TOOLCHAIN_PATHS['ARM']
+
def parse_dependencies(self, dep_path):
dependencies = []
for line in open(dep_path).readlines():
@@ -128,16 +135,17 @@
def get_config_option(self, config_header):
return ['--preinclude=' + config_header]
- def get_compile_options(self, defines, includes):
+ def get_compile_options(self, defines, includes, for_asm=False):
opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['--via', self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
- config_header = self.get_config_header()
- if config_header is not None:
- opts = opts + self.get_config_option(config_header)
+ if not for_asm:
+ config_header = self.get_config_header()
+ if config_header is not None:
+ opts = opts + self.get_config_option(config_header)
return opts
@hook_tool
@@ -148,7 +156,7 @@
tempfile = join(dir, basename(object) + '.E.s')
# Build preprocess assemble command
- cmd_pre = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-E", "-o", tempfile, source]
+ cmd_pre = self.asm + self.get_compile_options(self.get_symbols(True), includes) + ["-E", "-o", tempfile, source]
# Build main assemble command
cmd = self.asm + ["-o", object, tempfile]
--- a/toolchains/gcc.py Mon Aug 29 10:55:42 2016 +0100
+++ b/toolchains/gcc.py Mon Aug 29 11:18:36 2016 +0100
@@ -16,6 +16,7 @@
"""
import re
from os.path import join, basename, splitext, dirname, exists
+from distutils.spawn import find_executable
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.hooks import hook_tool
@@ -39,7 +40,7 @@
'c': ["-std=gnu99"],
'cxx': ["-std=gnu++98", "-fno-rtti", "-Wvla"],
'ld': ["-Wl,--gc-sections", "-Wl,--wrap,main",
- "-Wl,--wrap,_malloc_r", "-Wl,--wrap,_free_r", "-Wl,--wrap,_realloc_r"],
+ "-Wl,--wrap,_malloc_r", "-Wl,--wrap,_free_r", "-Wl,--wrap,_realloc_r", "-Wl,--wrap,_calloc_r"],
}
def __init__(self, target, options=None, notify=None, macros=None, silent=False, tool_path="", extra_verbose=False):
@@ -110,6 +111,11 @@
self.ar = join(tool_path, "arm-none-eabi-ar")
self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
+ if tool_path:
+ self.toolchain_path = main_cc
+ else:
+ self.toolchain_path = find_executable("arm-none-eabi-gcc") or ''
+
def parse_dependencies(self, dep_path):
dependencies = []
buff = open(dep_path).readlines()
@@ -138,7 +144,7 @@
# The warning/error notification is multiline
msg = None
for line in output.splitlines():
- match = GCC.DIAGNOSTIC_PATTERN.match(line)
+ match = GCC.DIAGNOSTIC_PATTERN.search(line)
if match is not None:
if msg is not None:
self.cc_info(msg)
@@ -170,22 +176,23 @@
def get_config_option(self, config_header):
return ['-include', config_header]
- def get_compile_options(self, defines, includes):
+ def get_compile_options(self, defines, includes, for_asm=False):
opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['@%s' % self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
- config_header = self.get_config_header()
- if config_header is not None:
- opts = opts + self.get_config_option(config_header)
+ if not for_asm:
+ config_header = self.get_config_header()
+ if config_header is not None:
+ opts = opts + self.get_config_option(config_header)
return opts
@hook_tool
def assemble(self, source, object, includes):
# Build assemble command
- cmd = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-o", object, source]
+ cmd = self.asm + self.get_compile_options(self.get_symbols(True), includes) + ["-o", object, source]
# Call cmdline hook
cmd = self.hook.get_cmdline_assembler(cmd)
@@ -272,13 +279,13 @@
GCC.__init__(self, target, options, notify, macros, silent, TOOLCHAIN_PATHS['GCC_ARM'], extra_verbose=extra_verbose)
# Use latest gcc nanolib
- if "big-build" in self.options:
+ if "std-lib" in self.options:
use_nano = False
- elif "small-build" in self.options:
+ elif "small-lib" in self.options:
use_nano = True
- elif target.default_build == "standard":
+ elif target.default_lib == "std":
use_nano = False
- elif target.default_build == "small":
+ elif target.default_lib == "small":
use_nano = True
else:
use_nano = False
--- a/toolchains/iar.py Mon Aug 29 10:55:42 2016 +0100
+++ b/toolchains/iar.py Mon Aug 29 11:18:36 2016 +0100
@@ -17,6 +17,7 @@
import re
from os import remove
from os.path import join, exists, dirname, splitext, exists
+from distutils.spawn import find_executable
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.hooks import hook_tool
@@ -50,6 +51,12 @@
cpuchoice = "Cortex-M7"
else:
cpuchoice = target.core
+
+ if not TOOLCHAIN_PATHS['IAR']:
+ exe = find_executable('iccarm')
+ if exe:
+ TOOLCHAIN_PATHS['IAR'] = dirname(dirname(exe))
+
# flags_cmd are used only by our scripts, the project files have them already defined,
# using this flags results in the errors (duplication)
# asm accepts --cpu Core or --fpu FPU, not like c/c++ --cpu=Core
@@ -101,6 +108,8 @@
self.ar = join(IAR_BIN, "iarchive")
self.elf2bin = join(IAR_BIN, "ielftool")
+ self.toolchain_path = TOOLCHAIN_PATHS['IAR']
+
def parse_dependencies(self, dep_path):
return [(self.CHROOT if self.CHROOT else '')+path.strip() for path in open(dep_path).readlines()
if (path and not path.isspace())]
@@ -151,12 +160,7 @@
else:
opts += ["-I%s" % i for i in includes]
- config_header = self.get_config_header()
- if for_asm:
- # The assembler doesn't support '--preinclude', so we need to add
- # the macros directly
- opts = opts + ['-D%s' % d for d in self.get_config_macros()]
- else:
+ if not for_asm:
config_header = self.get_config_header()
if config_header is not None:
opts = opts + self.get_config_option(config_header)
@@ -165,7 +169,7 @@
@hook_tool
def assemble(self, source, object, includes):
# Build assemble command
- cmd = self.asm + self.get_compile_options(self.get_symbols(), includes, for_asm=True) + ["-o", object, source]
+ cmd = self.asm + self.get_compile_options(self.get_symbols(True), includes, True) + ["-o", object, source]
# Call cmdline hook
cmd = self.hook.get_cmdline_assembler(cmd)
--- a/utils.py Mon Aug 29 10:55:42 2016 +0100
+++ b/utils.py Mon Aug 29 11:18:36 2016 +0100
@@ -21,18 +21,26 @@
import math
from os import listdir, remove, makedirs
from shutil import copyfile
-from os.path import isdir, join, exists, split, relpath, splitext, abspath, commonprefix, normpath
+from os.path import isdir, join, exists, split, relpath, splitext, abspath
+from os.path import commonprefix, normpath
from subprocess import Popen, PIPE, STDOUT, call
import json
from collections import OrderedDict
import logging
def compile_worker(job):
+ """Standard task runner used for compiling
+
+ Positional argumets:
+ job - a dict containing a list of commands and the remaining arguments
+ to run_cmd
+ """
results = []
for command in job['commands']:
try:
- _, _stderr, _rc = run_cmd(command, work_dir=job['work_dir'], chroot=job['chroot'])
- except KeyboardInterrupt as e:
+ _, _stderr, _rc = run_cmd(command, work_dir=job['work_dir'],
+ chroot=job['chroot'])
+ except KeyboardInterrupt:
raise ToolException
results.append({
@@ -48,96 +56,143 @@
'results': results
}
-def cmd(l, check=True, verbose=False, shell=False, cwd=None):
- text = l if shell else ' '.join(l)
+def cmd(command, check=True, verbose=False, shell=False, cwd=None):
+ """A wrapper to run a command as a blocking job"""
+ text = command if shell else ' '.join(command)
if verbose:
print text
- rc = call(l, shell=shell, cwd=cwd)
- if check and rc != 0:
- raise Exception('ERROR %d: "%s"' % (rc, text))
+ return_code = call(command, shell=shell, cwd=cwd)
+ if check and return_code != 0:
+ raise Exception('ERROR %d: "%s"' % (return_code, text))
def run_cmd(command, work_dir=None, chroot=None, redirect=False):
+ """Run a command in the forground
+
+ Positional arguments:
+ command - the command to run
+
+ Keyword arguments:
+ work_dir - the working directory to run the command in
+ chroot - the chroot to run the command in
+ redirect - redirect the stderr to a pipe to be used later
+ """
if chroot:
# Conventions managed by the web team for the mbed.org build system
chroot_cmd = [
'/usr/sbin/chroot', '--userspec=33:33', chroot
]
- for c in command:
- chroot_cmd += [c.replace(chroot, '')]
+ for element in command:
+ chroot_cmd += [element.replace(chroot, '')]
- logging.debug("Running command %s"%' '.join(chroot_cmd))
+ logging.debug("Running command %s", ' '.join(chroot_cmd))
command = chroot_cmd
work_dir = None
try:
- p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=work_dir)
- _stdout, _stderr = p.communicate()
- except OSError as e:
+ process = Popen(command, stdout=PIPE,
+ stderr=STDOUT if redirect else PIPE, cwd=work_dir)
+ _stdout, _stderr = process.communicate()
+ except OSError:
print "[OS ERROR] Command: "+(' '.join(command))
raise
- return _stdout, _stderr, p.returncode
+ return _stdout, _stderr, process.returncode
def run_cmd_ext(command):
+ """ A version of run command that checks if the command exists befor running
+
+ Positional arguments:
+ command - the command line you are trying to invoke
+ """
assert is_cmd_valid(command[0])
- p = Popen(command, stdout=PIPE, stderr=PIPE)
- _stdout, _stderr = p.communicate()
- return _stdout, _stderr, p.returncode
+ process = Popen(command, stdout=PIPE, stderr=PIPE)
+ _stdout, _stderr = process.communicate()
+ return _stdout, _stderr, process.returncode
-def is_cmd_valid(cmd):
+def is_cmd_valid(command):
+ """ Verify that a command exists and is executable
+
+ Positional arguments:
+ command - the command to check
+ """
caller = get_caller_name()
- abspath = find_cmd_abspath(cmd)
- if not abspath:
- error("%s: Command '%s' can't be found" % (caller, cmd))
- if not is_exec(abspath):
- error("%s: Command '%s' resolves to file '%s' which is not executable" % (caller, cmd, abspath))
+ cmd_path = find_cmd_abspath(command)
+ if not cmd_path:
+ error("%s: Command '%s' can't be found" % (caller, command))
+ if not is_exec(cmd_path):
+ error("%s: Command '%s' resolves to file '%s' which is not executable"
+ % (caller, command, cmd_path))
return True
def is_exec(path):
+ """A simple check to verify that a path to an executable exists
+
+ Positional arguments:
+ path - the executable
+ """
return os.access(path, os.X_OK) or os.access(path+'.exe', os.X_OK)
-def find_cmd_abspath(cmd):
+def find_cmd_abspath(command):
""" Returns the absolute path to a command.
None is returned if no absolute path was found.
+
+ Positional arguhments:
+ command - the command to find the path of
"""
- if exists(cmd) or exists(cmd + '.exe'):
- return os.path.abspath(cmd)
+ if exists(command) or exists(command + '.exe'):
+ return os.path.abspath(command)
if not 'PATH' in os.environ:
- raise Exception("Can't find command path for current platform ('%s')" % sys.platform)
- PATH=os.environ['PATH']
- for path in PATH.split(os.pathsep):
- abspath = '%s/%s' % (path, cmd)
- if exists(abspath) or exists(abspath + '.exe'):
- return abspath
+ raise Exception("Can't find command path for current platform ('%s')"
+ % sys.platform)
+ path_env = os.environ['PATH']
+ for path in path_env.split(os.pathsep):
+ cmd_path = '%s/%s' % (path, command)
+ if exists(cmd_path) or exists(cmd_path + '.exe'):
+ return cmd_path
def mkdir(path):
+ """ a wrapped makedirs that only tries to create a directory if it does not
+ exist already
+
+ Positional arguments:
+ path - the path to maybe create
+ """
if not exists(path):
makedirs(path)
def copy_file(src, dst):
""" Implement the behaviour of "shutil.copy(src, dst)" without copying the
- permissions (this was causing errors with directories mounted with samba)
+ permissions (this was causing errors with directories mounted with samba)
+
+ Positional arguments:
+ src - the source of the copy operation
+ dst - the destination of the copy operation
"""
if isdir(dst):
- _, file = split(src)
- dst = join(dst, file)
+ _, base = split(src)
+ dst = join(dst, base)
copyfile(src, dst)
-def delete_dir_files(dir):
- if not exists(dir):
+def delete_dir_files(directory):
+ """ A function that does rm -rf
+
+ Positional arguments:
+ directory - the directory to remove
+ """
+ if not exists(directory):
return
- for f in listdir(dir):
- file = join(dir, f)
- if not isdir(file):
+ for element in listdir(directory):
+ to_remove = join(directory, element)
+ if not isdir(to_remove):
remove(file)
@@ -145,34 +200,58 @@
"""
When called inside a function, it returns the name
of the caller of that function.
+
+ Keyword arguments:
+ steps - the number of steps up the stack the calling function is
"""
return inspect.stack()[steps][3]
def error(msg):
+ """Fatal error, abort hard
+
+ Positional arguments:
+ msg - the message to print before crashing
+ """
print("ERROR: %s" % msg)
sys.exit(1)
def rel_path(path, base, dot=False):
- p = relpath(path, base)
- if dot and not p.startswith('.'):
- p = './' + p
- return p
+ """Relative path calculation that optionaly always starts with a dot
+
+ Positional arguments:
+ path - the path to make relative
+ base - what to make the path relative to
+
+ Keyword arguments:
+ dot - if True, the path will always start with a './'
+ """
+ final_path = relpath(path, base)
+ if dot and not final_path.startswith('.'):
+ final_path = './' + final_path
+ return final_path
class ToolException(Exception):
+ """A class representing an exception throw by the tools"""
pass
class NotSupportedException(Exception):
+ """A class a toolchain not supporting a particular target"""
pass
class InvalidReleaseTargetException(Exception):
pass
def split_path(path):
- base, file = split(path)
- name, ext = splitext(file)
+ """spilt a file name into it's directory name, base name, and extension
+
+ Positional arguments:
+ path - the file name to split
+ """
+ base, has_ext = split(path)
+ name, ext = splitext(has_ext)
return base, name, ext
@@ -181,12 +260,15 @@
This roughly translates to the number of path separators (os.sep) + 1.
Ex. Given "path/to/dir", this would return 3
Special cases: "." and "/" return 0
+
+ Positional arguments:
+ path - the path to calculate the depth of
"""
normalized_path = normpath(path)
path_depth = 0
head, tail = split(normalized_path)
- while(tail and tail != '.'):
+ while tail and tail != '.':
path_depth += 1
head, tail = split(head)
@@ -194,18 +276,28 @@
def args_error(parser, message):
+ """Abort with an error that was generated by the arguments to a CLI program
+
+ Positional arguments:
+ parser - the ArgumentParser object that parsed the command line
+ message - what went wrong
+ """
print "\n\n%s\n\n" % message
parser.print_help()
sys.exit()
def construct_enum(**enums):
- """ Create your own pseudo-enums """
+ """ Create your own pseudo-enums
+
+ Keyword arguments:
+ * - a member of the Enum you are creating and it's value
+ """
return type('Enum', (), enums)
def check_required_modules(required_modules, verbose=True):
- """ Function checks for Python modules which should be "importable" (installed)
+ """ Function checks for Python modules which should be "importable"
before test suite can be used.
@return returns True if all modules are installed already
"""
@@ -214,63 +306,84 @@
for module_name in required_modules:
try:
imp.find_module(module_name)
- except ImportError as e:
+ except ImportError:
# We also test against a rare case: module is an egg file
try:
__import__(module_name)
- except ImportError as e:
+ except ImportError as exc:
not_installed_modules.append(module_name)
if verbose:
- print "Error: %s" % e
+ print "Error: %s" % exc
if verbose:
if not_installed_modules:
- print "Warning: Module(s) %s not installed. Please install required module(s) before using this script."% (', '.join(not_installed_modules))
+ print ("Warning: Module(s) %s not installed. Please install " + \
+ "required module(s) before using this script.")\
+ % (', '.join(not_installed_modules))
if not_installed_modules:
return False
else:
return True
-# Utility function: traverse a dictionary and change all the strings in the dictionary to
-# ASCII from Unicode. Useful when reading ASCII JSON data, because the JSON decoder always
-# returns Unicode string.
-# Based on http://stackoverflow.com/a/13105359
-def dict_to_ascii(input):
- if isinstance(input, dict):
- return OrderedDict([(dict_to_ascii(key), dict_to_ascii(value)) for key, value in input.iteritems()])
- elif isinstance(input, list):
- return [dict_to_ascii(element) for element in input]
- elif isinstance(input, unicode):
- return input.encode('ascii')
+def dict_to_ascii(dictionary):
+ """ Utility function: traverse a dictionary and change all the strings in
+ the dictionary to ASCII from Unicode. Useful when reading ASCII JSON data,
+ because the JSON decoder always returns Unicode string. Based on
+ http://stackoverflow.com/a/13105359
+
+ Positional arguments:
+ dictionary - The dict that contains some Unicode that should be ASCII
+ """
+ if isinstance(dictionary, dict):
+ return OrderedDict([(dict_to_ascii(key), dict_to_ascii(value))
+ for key, value in dictionary.iteritems()])
+ elif isinstance(dictionary, list):
+ return [dict_to_ascii(element) for element in dictionary]
+ elif isinstance(dictionary, unicode):
+ return dictionary.encode('ascii')
else:
- return input
+ return dictionary
+
+def json_file_to_dict(fname):
+ """ Read a JSON file and return its Python representation, transforming all
+ the strings from Unicode to ASCII. The order of keys in the JSON file is
+ preserved.
-# Read a JSON file and return its Python representation, transforming all the strings from Unicode
-# to ASCII. The order of keys in the JSON file is preserved.
-def json_file_to_dict(fname):
+ Positional arguments:
+ fname - the name of the file to parse
+ """
try:
- with open(fname, "rt") as f:
- return dict_to_ascii(json.load(f, object_pairs_hook=OrderedDict))
+ with open(fname, "r") as file_obj:
+ return dict_to_ascii(json.load(file_obj,
+ object_pairs_hook=OrderedDict))
except (ValueError, IOError):
sys.stderr.write("Error parsing '%s':\n" % fname)
raise
# Wowza, double closure
-def argparse_type(casedness, prefer_hyphen=False) :
- def middle(list, type_name):
- # validate that an argument passed in (as string) is a member of the list of possible
- # arguments. Offer a suggestion if the case of the string, or the hyphens/underscores
- # do not match the expected style of the argument.
+def argparse_type(casedness, prefer_hyphen=False):
+ def middle(lst, type_name):
def parse_type(string):
- if prefer_hyphen: newstring = casedness(string).replace("_","-")
- else: newstring = casedness(string).replace("-","_")
- if string in list:
+ """ validate that an argument passed in (as string) is a member of
+ the list of possible arguments. Offer a suggestion if the case of
+ the string, or the hyphens/underscores do not match the expected
+ style of the argument.
+ """
+ if prefer_hyphen:
+ newstring = casedness(string).replace("_", "-")
+ else:
+ newstring = casedness(string).replace("-", "_")
+ if string in lst:
return string
- elif string not in list and newstring in list:
- raise argparse.ArgumentTypeError("{0} is not a supported {1}. Did you mean {2}?".format(string, type_name, newstring))
+ elif string not in lst and newstring in lst:
+ raise argparse.ArgumentTypeError(
+ "{0} is not a supported {1}. Did you mean {2}?".format(
+ string, type_name, newstring))
else:
- raise argparse.ArgumentTypeError("{0} is not a supported {1}. Supported {1}s are:\n{2}".format(string, type_name, columnate(list)))
+ raise argparse.ArgumentTypeError(
+ "{0} is not a supported {1}. Supported {1}s are:\n{2}".
+ format(string, type_name, columnate(lst)))
return parse_type
return middle
@@ -281,15 +394,19 @@
argparse_lowercase_hyphen_type = argparse_type(str.lower, True)
def argparse_force_type(case):
- def middle(list, type_name):
- # validate that an argument passed in (as string) is a member of the list of possible
- # arguments after converting it's case. Offer a suggestion if the hyphens/underscores
- # do not match the expected style of the argument.
+ """ validate that an argument passed in (as string) is a member of the list
+ of possible arguments after converting it's case.
+ """
+ def middle(lst, type_name):
+ """ The parser type generator"""
def parse_type(string):
- for option in list:
+ """ The parser type"""
+ for option in lst:
if case(string) == case(option):
return option
- raise argparse.ArgumentTypeError("{0} is not a supported {1}. Supported {1}s are:\n{2}".format(string, type_name, columnate(list)))
+ raise argparse.ArgumentTypeError(
+ "{0} is not a supported {1}. Supported {1}s are:\n{2}".
+ format(string, type_name, columnate(lst)))
return parse_type
return middle
@@ -297,30 +414,42 @@
argparse_force_uppercase_type = argparse_force_type(str.upper)
argparse_force_lowercase_type = argparse_force_type(str.lower)
-# An argument parser combinator that takes in an argument parser and creates a new parser that
-# accepts a comma separated list of the same thing.
-def argparse_many(fn):
+def argparse_many(func):
+ """ An argument parser combinator that takes in an argument parser and
+ creates a new parser that accepts a comma separated list of the same thing.
+ """
def wrap(string):
- return [fn(s) for s in string.split(",")]
+ """ The actual parser"""
+ return [func(s) for s in string.split(",")]
return wrap
-# An argument parser that verifies that a string passed in corresponds to a file
-def argparse_filestring_type(string) :
- if exists(string) :
+def argparse_filestring_type(string):
+ """ An argument parser that verifies that a string passed in corresponds
+ to a file"""
+ if exists(string):
return string
- else :
- raise argparse.ArgumentTypeError("{0}"" does not exist in the filesystem.".format(string))
+ else:
+ raise argparse.ArgumentTypeError(
+ "{0}"" does not exist in the filesystem.".format(string))
+
+def columnate(strings, separator=", ", chars=80):
+ """ render a list of strings as a in a bunch of columns
-# render a list of strings as a in a bunch of columns
-def columnate(strings, seperator=", ", chars=80):
+ Positional arguments:
+ strings - the strings to columnate
+
+ Keyword arguments;
+ separator - the separation between the columns
+ chars - the maximum with of a row
+ """
col_width = max(len(s) for s in strings)
- total_width = col_width + len(seperator)
+ total_width = col_width + len(separator)
columns = math.floor(chars / total_width)
output = ""
- for i, s in zip(range(len(strings)), strings):
- append = s
+ for i, string in zip(range(len(strings)), strings):
+ append = string
if i != len(strings) - 1:
- append += seperator
+ append += separator
if i % columns == columns - 1:
append += "\n"
else:
@@ -328,13 +457,16 @@
output += append
return output
-# fail if argument provided is a parent of the specified directory
def argparse_dir_not_parent(other):
+ """fail if argument provided is a parent of the specified directory"""
def parse_type(not_parent):
+ """The parser type"""
abs_other = abspath(other)
abs_not_parent = abspath(not_parent)
if abs_not_parent == commonprefix([abs_not_parent, abs_other]):
- raise argparse.ArgumentTypeError("{0} may not be a parent directory of {1}".format(not_parent, other))
+ raise argparse.ArgumentTypeError(
+ "{0} may not be a parent directory of {1}".format(
+ not_parent, other))
else:
return not_parent
return parse_type