Squashed 'tmk_core/' changes from 7967731..b9e0ea0
b9e0ea0 Merge commit '7fa9d8bdea3773d1195b04d98fcf27cf48ddd81d' as 'tool/mbed/mbed-sdk' 7fa9d8b Squashed 'tool/mbed/mbed-sdk/' content from commit 7c21ce5 git-subtree-dir: tmk_core git-subtree-split: b9e0ea08cb940de20b3610ecdda18e9d8cd7c552
This commit is contained in:
parent
a20ef7052c
commit
1fe4406f37
4198 changed files with 2016457 additions and 0 deletions
745
tool/mbed/mbed-sdk/workspace_tools/toolchains/__init__.py
Normal file
745
tool/mbed/mbed-sdk/workspace_tools/toolchains/__init__.py
Normal file
|
@ -0,0 +1,745 @@
|
|||
"""
|
||||
mbed SDK
|
||||
Copyright (c) 2011-2013 ARM Limited
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from os import stat, walk
|
||||
from copy import copy
|
||||
from time import time, sleep
|
||||
from types import ListType
|
||||
from shutil import copyfile
|
||||
from os.path import join, splitext, exists, relpath, dirname, basename, split
|
||||
from inspect import getmro
|
||||
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from workspace_tools.utils import run_cmd, mkdir, rel_path, ToolException, split_path
|
||||
from workspace_tools.settings import BUILD_OPTIONS, MBED_ORG_USER
|
||||
import workspace_tools.hooks as hooks
|
||||
|
||||
|
||||
#Disables multiprocessing if set to higher number than the host machine CPUs
|
||||
CPU_COUNT_MIN = 1
|
||||
|
||||
def print_notify(event, silent=False):
|
||||
""" Default command line notification
|
||||
"""
|
||||
if event['type'] in ['info', 'debug']:
|
||||
print event['message']
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
print '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
if not silent:
|
||||
print '%s: %s' % (event['action'].title(), basename(event['file']))
|
||||
|
||||
def print_notify_verbose(event, silent=False):
|
||||
""" Default command line notification with more verbose mode
|
||||
"""
|
||||
if event['type'] in ['info', 'debug']:
|
||||
print_notify(event) # standard handle
|
||||
|
||||
elif event['type'] == 'cc':
|
||||
event['severity'] = event['severity'].title()
|
||||
event['file'] = basename(event['file'])
|
||||
event['mcu_name'] = "None"
|
||||
event['toolchain'] = "None"
|
||||
event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
|
||||
event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
|
||||
print '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
|
||||
|
||||
elif event['type'] == 'progress':
|
||||
print_notify(event) # standard handle
|
||||
|
||||
def compile_worker(job):
|
||||
results = []
|
||||
for command in job['commands']:
|
||||
_, _stderr, _rc = run_cmd(command, job['work_dir'])
|
||||
results.append({
|
||||
'code': _rc,
|
||||
'output': _stderr,
|
||||
'command': command
|
||||
})
|
||||
|
||||
return {
|
||||
'source': job['source'],
|
||||
'object': job['object'],
|
||||
'commands': job['commands'],
|
||||
'results': results
|
||||
}
|
||||
|
||||
class Resources:
|
||||
def __init__(self, base_path=None):
|
||||
self.base_path = base_path
|
||||
|
||||
self.inc_dirs = []
|
||||
self.headers = []
|
||||
|
||||
self.s_sources = []
|
||||
self.c_sources = []
|
||||
self.cpp_sources = []
|
||||
|
||||
self.lib_dirs = set([])
|
||||
self.objects = []
|
||||
self.libraries = []
|
||||
|
||||
# mbed special files
|
||||
self.lib_builds = []
|
||||
self.lib_refs = []
|
||||
|
||||
self.repo_dirs = []
|
||||
self.repo_files = []
|
||||
|
||||
self.linker_script = None
|
||||
|
||||
# Other files
|
||||
self.hex_files = []
|
||||
self.bin_files = []
|
||||
|
||||
def add(self, resources):
|
||||
self.inc_dirs += resources.inc_dirs
|
||||
self.headers += resources.headers
|
||||
|
||||
self.s_sources += resources.s_sources
|
||||
self.c_sources += resources.c_sources
|
||||
self.cpp_sources += resources.cpp_sources
|
||||
|
||||
self.lib_dirs |= resources.lib_dirs
|
||||
self.objects += resources.objects
|
||||
self.libraries += resources.libraries
|
||||
|
||||
self.lib_builds += resources.lib_builds
|
||||
self.lib_refs += resources.lib_refs
|
||||
|
||||
self.repo_dirs += resources.repo_dirs
|
||||
self.repo_files += resources.repo_files
|
||||
|
||||
if resources.linker_script is not None:
|
||||
self.linker_script = resources.linker_script
|
||||
|
||||
self.hex_files += resources.hex_files
|
||||
self.bin_files += resources.bin_files
|
||||
|
||||
def relative_to(self, base, dot=False):
|
||||
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
||||
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
|
||||
v = [rel_path(f, base, dot) for f in getattr(self, field)]
|
||||
setattr(self, field, v)
|
||||
if self.linker_script is not None:
|
||||
self.linker_script = rel_path(self.linker_script, base, dot)
|
||||
|
||||
def win_to_unix(self):
|
||||
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
|
||||
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
|
||||
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
|
||||
v = [f.replace('\\', '/') for f in getattr(self, field)]
|
||||
setattr(self, field, v)
|
||||
if self.linker_script is not None:
|
||||
self.linker_script = self.linker_script.replace('\\', '/')
|
||||
|
||||
def __str__(self):
|
||||
s = []
|
||||
|
||||
for (label, resources) in (
|
||||
('Include Directories', self.inc_dirs),
|
||||
('Headers', self.headers),
|
||||
|
||||
('Assembly sources', self.s_sources),
|
||||
('C sources', self.c_sources),
|
||||
('C++ sources', self.cpp_sources),
|
||||
|
||||
('Library directories', self.lib_dirs),
|
||||
('Objects', self.objects),
|
||||
('Libraries', self.libraries),
|
||||
|
||||
('Hex files', self.hex_files),
|
||||
('Bin files', self.bin_files),
|
||||
):
|
||||
if resources:
|
||||
s.append('%s:\n ' % label + '\n '.join(resources))
|
||||
|
||||
if self.linker_script:
|
||||
s.append('Linker Script: ' + self.linker_script)
|
||||
|
||||
return '\n'.join(s)
|
||||
|
||||
|
||||
# Support legacy build conventions: the original mbed build system did not have
|
||||
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
|
||||
# had the knowledge of a list of these directories to be ignored.
|
||||
LEGACY_IGNORE_DIRS = set([
|
||||
'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
|
||||
'ARM', 'GCC_ARM', 'GCC_CR', 'GCC_CS', 'IAR', 'uARM'
|
||||
])
|
||||
LEGACY_TOOLCHAIN_NAMES = {
|
||||
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
|
||||
'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', 'GCC_CS': 'GCC_CS',
|
||||
'IAR': 'IAR',
|
||||
}
|
||||
|
||||
|
||||
class mbedToolchain:
|
||||
VERBOSE = True
|
||||
|
||||
CORTEX_SYMBOLS = {
|
||||
"Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"],
|
||||
"Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS"],
|
||||
"Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
|
||||
"Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"],
|
||||
"Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4"],
|
||||
"Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1"],
|
||||
"Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7"],
|
||||
"Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1"],
|
||||
"Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
|
||||
}
|
||||
|
||||
GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
|
||||
GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?P<severity>warning) \[(?P<file>[^:]+):(?P<line>\d+)\] \- (?P<message>.*)"')
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
self.target = target
|
||||
self.name = self.__class__.__name__
|
||||
self.hook = hooks.Hook(target, self)
|
||||
self.silent = silent
|
||||
|
||||
self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
|
||||
|
||||
self.notify_fun = notify if notify is not None else print_notify
|
||||
self.options = options if options is not None else []
|
||||
|
||||
self.macros = macros or []
|
||||
self.options.extend(BUILD_OPTIONS)
|
||||
if self.options:
|
||||
self.info("Build Options: %s" % (', '.join(self.options)))
|
||||
|
||||
self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
|
||||
|
||||
self.symbols = None
|
||||
self.labels = None
|
||||
self.has_config = False
|
||||
|
||||
self.build_all = False
|
||||
self.timestamp = time()
|
||||
self.jobs = 1
|
||||
|
||||
self.CHROOT = None
|
||||
|
||||
self.mp_pool = None
|
||||
|
||||
def notify(self, event):
|
||||
""" Little closure for notify functions
|
||||
"""
|
||||
return self.notify_fun(event, self.silent)
|
||||
|
||||
def __exit__(self):
|
||||
if self.mp_pool is not None:
|
||||
self.mp_pool.terminate()
|
||||
|
||||
def goanna_parse_line(self, line):
|
||||
if "analyze" in self.options:
|
||||
return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_symbols(self):
|
||||
if self.symbols is None:
|
||||
# Target and Toolchain symbols
|
||||
labels = self.get_labels()
|
||||
self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
|
||||
self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
|
||||
|
||||
# Config support
|
||||
if self.has_config:
|
||||
self.symbols.append('HAVE_MBED_CONFIG_H')
|
||||
|
||||
# Cortex CPU symbols
|
||||
if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
|
||||
self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
|
||||
|
||||
# Symbols defined by the on-line build.system
|
||||
self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, '__MBED__=1'])
|
||||
if MBED_ORG_USER:
|
||||
self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
|
||||
|
||||
# Add target's symbols
|
||||
self.symbols += self.target.macros
|
||||
# Add extra symbols passed via 'macros' parameter
|
||||
self.symbols += self.macros
|
||||
|
||||
# Form factor variables
|
||||
if hasattr(self.target, 'supported_form_factors'):
|
||||
self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
|
||||
|
||||
return self.symbols
|
||||
|
||||
def get_labels(self):
|
||||
if self.labels is None:
|
||||
toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
|
||||
toolchain_labels.remove('mbedToolchain')
|
||||
self.labels = {
|
||||
'TARGET': self.target.get_labels(),
|
||||
'TOOLCHAIN': toolchain_labels
|
||||
}
|
||||
return self.labels
|
||||
|
||||
def need_update(self, target, dependencies):
|
||||
if self.build_all:
|
||||
return True
|
||||
|
||||
if not exists(target):
|
||||
return True
|
||||
|
||||
target_mod_time = stat(target).st_mtime
|
||||
|
||||
for d in dependencies:
|
||||
|
||||
# Some objects are not provided with full path and here we do not have
|
||||
# information about the library paths. Safe option: assume an update
|
||||
if not d or not exists(d):
|
||||
return True
|
||||
|
||||
if stat(d).st_mtime >= target_mod_time:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def scan_resources(self, path):
|
||||
labels = self.get_labels()
|
||||
resources = Resources(path)
|
||||
self.has_config = False
|
||||
|
||||
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
|
||||
When topdown is True, the caller can modify the dirnames list in-place
|
||||
(perhaps using del or slice assignment), and walk() will only recurse into
|
||||
the subdirectories whose names remain in dirnames; this can be used to prune
|
||||
the search, impose a specific order of visiting, or even to inform walk()
|
||||
about directories the caller creates or renames before it resumes walk()
|
||||
again. Modifying dirnames when topdown is False is ineffective, because in
|
||||
bottom-up mode the directories in dirnames are generated before dirpath
|
||||
itself is generated.
|
||||
"""
|
||||
for root, dirs, files in walk(path):
|
||||
# Remove ignored directories
|
||||
for d in copy(dirs):
|
||||
if d == '.hg':
|
||||
dir_path = join(root, d)
|
||||
resources.repo_dirs.append(dir_path)
|
||||
resources.repo_files.extend(self.scan_repository(dir_path))
|
||||
|
||||
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
|
||||
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
|
||||
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])):
|
||||
dirs.remove(d)
|
||||
|
||||
# Add root to include paths
|
||||
resources.inc_dirs.append(root)
|
||||
|
||||
for file in files:
|
||||
file_path = join(root, file)
|
||||
_, ext = splitext(file)
|
||||
ext = ext.lower()
|
||||
|
||||
if ext == '.s':
|
||||
resources.s_sources.append(file_path)
|
||||
|
||||
elif ext == '.c':
|
||||
resources.c_sources.append(file_path)
|
||||
|
||||
elif ext == '.cpp':
|
||||
resources.cpp_sources.append(file_path)
|
||||
|
||||
elif ext == '.h' or ext == '.hpp':
|
||||
if basename(file_path) == "mbed_config.h":
|
||||
self.has_config = True
|
||||
resources.headers.append(file_path)
|
||||
|
||||
elif ext == '.o':
|
||||
resources.objects.append(file_path)
|
||||
|
||||
elif ext == self.LIBRARY_EXT:
|
||||
resources.libraries.append(file_path)
|
||||
resources.lib_dirs.add(root)
|
||||
|
||||
elif ext == self.LINKER_EXT:
|
||||
if resources.linker_script is not None:
|
||||
self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
|
||||
resources.linker_script = file_path
|
||||
|
||||
elif ext == '.lib':
|
||||
resources.lib_refs.append(file_path)
|
||||
|
||||
elif ext == '.bld':
|
||||
resources.lib_builds.append(file_path)
|
||||
|
||||
elif file == '.hgignore':
|
||||
resources.repo_files.append(file_path)
|
||||
|
||||
elif ext == '.hex':
|
||||
resources.hex_files.append(file_path)
|
||||
|
||||
elif ext == '.bin':
|
||||
resources.bin_files.append(file_path)
|
||||
|
||||
return resources
|
||||
|
||||
def scan_repository(self, path):
|
||||
resources = []
|
||||
|
||||
for root, dirs, files in walk(path):
|
||||
# Remove ignored directories
|
||||
for d in copy(dirs):
|
||||
if d == '.' or d == '..':
|
||||
dirs.remove(d)
|
||||
|
||||
for file in files:
|
||||
file_path = join(root, file)
|
||||
resources.append(file_path)
|
||||
|
||||
return resources
|
||||
|
||||
def copy_files(self, files_paths, trg_path, rel_path=None):
|
||||
# Handle a single file
|
||||
if type(files_paths) != ListType: files_paths = [files_paths]
|
||||
|
||||
for source in files_paths:
|
||||
if source is None:
|
||||
files_paths.remove(source)
|
||||
|
||||
for source in files_paths:
|
||||
if rel_path is not None:
|
||||
relative_path = relpath(source, rel_path)
|
||||
else:
|
||||
_, relative_path = split(source)
|
||||
|
||||
target = join(trg_path, relative_path)
|
||||
|
||||
if (target != source) and (self.need_update(target, [source])):
|
||||
self.progress("copy", relative_path)
|
||||
mkdir(dirname(target))
|
||||
copyfile(source, target)
|
||||
|
||||
def relative_object_path(self, build_path, base_dir, source):
|
||||
source_dir, name, _ = split_path(source)
|
||||
obj_dir = join(build_path, relpath(source_dir, base_dir))
|
||||
mkdir(obj_dir)
|
||||
return join(obj_dir, name + '.o')
|
||||
|
||||
def compile_sources(self, resources, build_path, inc_dirs=None):
|
||||
# Web IDE progress bar for project build
|
||||
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
|
||||
self.to_be_compiled = len(files_to_compile)
|
||||
self.compiled = 0
|
||||
|
||||
#for i in self.build_params:
|
||||
# self.debug(i)
|
||||
# self.debug("%s" % self.build_params[i])
|
||||
|
||||
inc_paths = resources.inc_dirs
|
||||
if inc_dirs is not None:
|
||||
inc_paths.extend(inc_dirs)
|
||||
|
||||
objects = []
|
||||
queue = []
|
||||
prev_dir = None
|
||||
|
||||
# The dependency checking for C/C++ is delegated to the compiler
|
||||
base_path = resources.base_path
|
||||
files_to_compile.sort()
|
||||
for source in files_to_compile:
|
||||
_, name, _ = split_path(source)
|
||||
object = self.relative_object_path(build_path, base_path, source)
|
||||
|
||||
# Avoid multiple mkdir() calls on same work directory
|
||||
work_dir = dirname(object)
|
||||
if work_dir is not prev_dir:
|
||||
prev_dir = work_dir
|
||||
mkdir(work_dir)
|
||||
|
||||
# Queue mode (multiprocessing)
|
||||
commands = self.compile_command(source, object, inc_paths)
|
||||
if commands is not None:
|
||||
queue.append({
|
||||
'source': source,
|
||||
'object': object,
|
||||
'commands': commands,
|
||||
'work_dir': work_dir,
|
||||
'chroot': self.CHROOT
|
||||
})
|
||||
else:
|
||||
objects.append(object)
|
||||
|
||||
# Use queues/multiprocessing if cpu count is higher than setting
|
||||
jobs = self.jobs if self.jobs else cpu_count()
|
||||
if jobs > CPU_COUNT_MIN and len(queue) > jobs:
|
||||
return self.compile_queue(queue, objects)
|
||||
else:
|
||||
return self.compile_seq(queue, objects)
|
||||
|
||||
def compile_seq(self, queue, objects):
|
||||
for item in queue:
|
||||
result = compile_worker(item)
|
||||
|
||||
self.compiled += 1
|
||||
self.progress("compile", item['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.debug("Command: %s" % ' '.join(res['command']))
|
||||
self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
res['command']
|
||||
])
|
||||
objects.append(result['object'])
|
||||
return objects
|
||||
|
||||
def compile_queue(self, queue, objects):
|
||||
jobs_count = int(self.jobs if self.jobs else cpu_count())
|
||||
p = Pool(processes=jobs_count)
|
||||
|
||||
results = []
|
||||
for i in range(len(queue)):
|
||||
results.append(p.apply_async(compile_worker, [queue[i]]))
|
||||
|
||||
itr = 0
|
||||
while True:
|
||||
itr += 1
|
||||
if itr > 30000:
|
||||
p.terminate()
|
||||
p.join()
|
||||
raise ToolException("Compile did not finish in 5 minutes")
|
||||
|
||||
pending = 0
|
||||
for r in results:
|
||||
if r._ready is True:
|
||||
try:
|
||||
result = r.get()
|
||||
results.remove(r)
|
||||
|
||||
self.compiled += 1
|
||||
self.progress("compile", result['source'], build_update=True)
|
||||
for res in result['results']:
|
||||
self.debug("Command: %s" % ' '.join(res['command']))
|
||||
self.compile_output([
|
||||
res['code'],
|
||||
res['output'],
|
||||
res['command']
|
||||
])
|
||||
objects.append(result['object'])
|
||||
except ToolException, err:
|
||||
p.terminate()
|
||||
p.join()
|
||||
raise ToolException(err)
|
||||
else:
|
||||
pending += 1
|
||||
if pending > jobs_count:
|
||||
break
|
||||
|
||||
|
||||
if len(results) == 0:
|
||||
break
|
||||
|
||||
sleep(0.01)
|
||||
|
||||
results = None
|
||||
p.terminate()
|
||||
p.join()
|
||||
|
||||
return objects
|
||||
|
||||
def compile_command(self, source, object, includes):
|
||||
# Check dependencies
|
||||
_, ext = splitext(source)
|
||||
ext = ext.lower()
|
||||
|
||||
if ext == '.c' or ext == '.cpp':
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
|
||||
if len(deps) == 0 or self.need_update(object, deps):
|
||||
if ext == '.c':
|
||||
return self.compile_c(source, object, includes)
|
||||
else:
|
||||
return self.compile_cpp(source, object, includes)
|
||||
elif ext == '.s':
|
||||
deps = [source]
|
||||
if self.need_update(object, deps):
|
||||
return self.assemble(source, object, includes)
|
||||
else:
|
||||
return False
|
||||
|
||||
return None
|
||||
|
||||
def compile_output(self, output=[]):
|
||||
_rc = output[0]
|
||||
_stderr = output[1]
|
||||
command = output[2]
|
||||
|
||||
# Parse output for Warnings and Errors
|
||||
self.parse_output(_stderr)
|
||||
self.debug("Return: %s"% _rc)
|
||||
for error_line in _stderr.splitlines():
|
||||
self.debug("Output: %s"% error_line)
|
||||
|
||||
# Check return code
|
||||
if _rc != 0:
|
||||
for line in _stderr.splitlines():
|
||||
self.tool_error(line)
|
||||
raise ToolException(_stderr)
|
||||
|
||||
def compile(self, cc, source, object, includes):
|
||||
_, ext = splitext(source)
|
||||
ext = ext.lower()
|
||||
|
||||
command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source]
|
||||
|
||||
if hasattr(self, "get_dep_opt"):
|
||||
base, _ = splitext(object)
|
||||
dep_path = base + '.d'
|
||||
command.extend(self.get_dep_opt(dep_path))
|
||||
|
||||
if hasattr(self, "cc_extra"):
|
||||
command.extend(self.cc_extra(base))
|
||||
|
||||
return [command]
|
||||
|
||||
def compile_c(self, source, object, includes):
|
||||
return self.compile(self.cc, source, object, includes)
|
||||
|
||||
def compile_cpp(self, source, object, includes):
|
||||
return self.compile(self.cppc, source, object, includes)
|
||||
|
||||
def build_library(self, objects, dir, name):
|
||||
lib = self.STD_LIB_NAME % name
|
||||
fout = join(dir, lib)
|
||||
if self.need_update(fout, objects):
|
||||
self.info("Library: %s" % lib)
|
||||
self.archive(objects, fout)
|
||||
|
||||
def link_program(self, r, tmp_path, name):
|
||||
ext = 'bin'
|
||||
if hasattr(self.target, 'OUTPUT_EXT'):
|
||||
ext = self.target.OUTPUT_EXT
|
||||
|
||||
if hasattr(self.target, 'OUTPUT_NAMING'):
|
||||
self.var("binary_naming", self.target.OUTPUT_NAMING)
|
||||
if self.target.OUTPUT_NAMING == "8.3":
|
||||
name = name[0:8]
|
||||
ext = ext[0:3]
|
||||
|
||||
filename = name+'.'+ext
|
||||
elf = join(tmp_path, name + '.elf')
|
||||
bin = join(tmp_path, filename)
|
||||
|
||||
if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
|
||||
self.progress("link", name)
|
||||
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
|
||||
|
||||
if self.need_update(bin, [elf]):
|
||||
self.progress("elf2bin", name)
|
||||
|
||||
self.binary(r, elf, bin)
|
||||
|
||||
self.var("compile_succeded", True)
|
||||
self.var("binary", filename)
|
||||
|
||||
return bin
|
||||
|
||||
def default_cmd(self, command):
|
||||
_stdout, _stderr, _rc = run_cmd(command)
|
||||
# Print all warning / erros from stderr to console output
|
||||
for error_line in _stderr.splitlines():
|
||||
print error_line
|
||||
|
||||
self.debug("Command: %s"% ' '.join(command))
|
||||
self.debug("Return: %s"% _rc)
|
||||
|
||||
for output_line in _stdout.splitlines():
|
||||
self.debug("Output: %s"% output_line)
|
||||
for error_line in _stderr.splitlines():
|
||||
self.debug("Errors: %s"% error_line)
|
||||
|
||||
if _rc != 0:
|
||||
for line in _stderr.splitlines():
|
||||
self.tool_error(line)
|
||||
raise ToolException(_stderr)
|
||||
|
||||
### NOTIFICATIONS ###
|
||||
def info(self, message):
|
||||
self.notify({'type': 'info', 'message': message})
|
||||
|
||||
def debug(self, message):
|
||||
if self.VERBOSE:
|
||||
if type(message) is ListType:
|
||||
message = ' '.join(message)
|
||||
message = "[DEBUG] " + message
|
||||
self.notify({'type': 'debug', 'message': message})
|
||||
|
||||
def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
|
||||
self.notify({'type': 'cc',
|
||||
'severity': severity,
|
||||
'file': file,
|
||||
'line': line,
|
||||
'message': message,
|
||||
'target_name': target_name,
|
||||
'toolchain_name': toolchain_name})
|
||||
|
||||
def progress(self, action, file, build_update=False):
|
||||
msg = {'type': 'progress', 'action': action, 'file': file}
|
||||
if build_update:
|
||||
msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
|
||||
self.notify(msg)
|
||||
|
||||
def tool_error(self, message):
|
||||
self.notify({'type': 'tool_error', 'message': message})
|
||||
|
||||
def var(self, key, value):
|
||||
self.notify({'type': 'var', 'key': key, 'val': value})
|
||||
|
||||
from workspace_tools.settings import ARM_BIN
|
||||
from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH, GCC_CS_PATH, CW_EWL_PATH, CW_GCC_PATH
|
||||
from workspace_tools.settings import IAR_PATH
|
||||
|
||||
TOOLCHAIN_BIN_PATH = {
|
||||
'ARM': ARM_BIN,
|
||||
'uARM': ARM_BIN,
|
||||
'GCC_ARM': GCC_ARM_PATH,
|
||||
'GCC_CS': GCC_CS_PATH,
|
||||
'GCC_CR': GCC_CR_PATH,
|
||||
'GCC_CW_EWL': CW_EWL_PATH,
|
||||
'GCC_CW_NEWLIB': CW_GCC_PATH,
|
||||
'IAR': IAR_PATH
|
||||
}
|
||||
|
||||
from workspace_tools.toolchains.arm import ARM_STD, ARM_MICRO
|
||||
from workspace_tools.toolchains.gcc import GCC_ARM, GCC_CS, GCC_CR
|
||||
from workspace_tools.toolchains.gcc import GCC_CW_EWL, GCC_CW_NEWLIB
|
||||
from workspace_tools.toolchains.iar import IAR
|
||||
|
||||
TOOLCHAIN_CLASSES = {
|
||||
'ARM': ARM_STD,
|
||||
'uARM': ARM_MICRO,
|
||||
'GCC_ARM': GCC_ARM,
|
||||
'GCC_CS': GCC_CS,
|
||||
'GCC_CR': GCC_CR,
|
||||
'GCC_CW_EWL': GCC_CW_EWL,
|
||||
'GCC_CW_NEWLIB': GCC_CW_NEWLIB,
|
||||
'IAR': IAR
|
||||
}
|
||||
|
||||
TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
|
187
tool/mbed/mbed-sdk/workspace_tools/toolchains/arm.py
Normal file
187
tool/mbed/mbed-sdk/workspace_tools/toolchains/arm.py
Normal file
|
@ -0,0 +1,187 @@
|
|||
"""
|
||||
mbed SDK
|
||||
Copyright (c) 2011-2013 ARM Limited
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import re
|
||||
from os.path import join
|
||||
|
||||
from workspace_tools.toolchains import mbedToolchain
|
||||
from workspace_tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB
|
||||
from workspace_tools.hooks import hook_tool
|
||||
from workspace_tools.settings import GOANNA_PATH
|
||||
|
||||
class ARM(mbedToolchain):
|
||||
LINKER_EXT = '.sct'
|
||||
LIBRARY_EXT = '.ar'
|
||||
|
||||
STD_LIB_NAME = "%s.ar"
|
||||
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+): (?P<severity>Warning|Error): (?P<message>.+)')
|
||||
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
mbedToolchain.__init__(self, target, options, notify, macros, silent)
|
||||
|
||||
if target.core == "Cortex-M0+":
|
||||
cpu = "Cortex-M0"
|
||||
elif target.core == "Cortex-M4F":
|
||||
cpu = "Cortex-M4.fp"
|
||||
elif target.core == "Cortex-M7F":
|
||||
cpu = "Cortex-M7.fp.sp"
|
||||
else:
|
||||
cpu = target.core
|
||||
|
||||
main_cc = join(ARM_BIN, "armcc")
|
||||
common = ["-c",
|
||||
"--cpu=%s" % cpu, "--gnu",
|
||||
"-Otime", "--split_sections", "--apcs=interwork",
|
||||
"--brief_diagnostics", "--restrict", "--multibyte_chars"
|
||||
]
|
||||
|
||||
if "save-asm" in self.options:
|
||||
common.extend(["--asm", "--interleave"])
|
||||
|
||||
if "debug-info" in self.options:
|
||||
common.append("-g")
|
||||
common.append("-O0")
|
||||
else:
|
||||
common.append("-O3")
|
||||
|
||||
common_c = [
|
||||
"--md", "--no_depend_system_headers",
|
||||
'-I%s' % ARM_INC
|
||||
]
|
||||
|
||||
self.asm = [main_cc] + common + ['-I%s' % ARM_INC]
|
||||
if not "analyze" in self.options:
|
||||
self.cc = [main_cc] + common + common_c + ["--c99"]
|
||||
self.cppc = [main_cc] + common + common_c + ["--cpp", "--no_rtti"]
|
||||
else:
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--c99"]
|
||||
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + common + common_c + ["--cpp", "--no_rtti"]
|
||||
|
||||
self.ld = [join(ARM_BIN, "armlink")]
|
||||
self.sys_libs = []
|
||||
|
||||
self.ar = join(ARM_BIN, "armar")
|
||||
self.elf2bin = join(ARM_BIN, "fromelf")
|
||||
|
||||
def remove_option(self, option):
|
||||
for tool in [self.asm, self.cc, self.cppc]:
|
||||
if option in tool:
|
||||
tool.remove(option)
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
# Preprocess first, then assemble
|
||||
tempfile = object + '.E.s'
|
||||
return [
|
||||
self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source],
|
||||
self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile])
|
||||
]
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
dependencies = []
|
||||
for line in open(dep_path).readlines():
|
||||
match = ARM.DEP_PATTERN.match(line)
|
||||
if match is not None:
|
||||
dependencies.append(match.group('file'))
|
||||
return dependencies
|
||||
|
||||
def parse_output(self, output):
|
||||
for line in output.splitlines():
|
||||
match = ARM.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message'),
|
||||
target_name=self.target.name,
|
||||
toolchain_name=self.name
|
||||
)
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message')
|
||||
)
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
return ["--depend", dep_path]
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, '-r', lib_path] + objects)
|
||||
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
if len(lib_dirs):
|
||||
args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"]
|
||||
else:
|
||||
args = ["-o", output, "--info=totals", "--list=.link_totals.txt"]
|
||||
|
||||
if mem_map:
|
||||
args.extend(["--scatter", mem_map])
|
||||
|
||||
if hasattr(self.target, "link_cmdline_hook"):
|
||||
args = self.target.link_cmdline_hook(self.__class__.__name__, args)
|
||||
|
||||
self.default_cmd(self.ld + args + objects + libraries + self.sys_libs)
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
args = [self.elf2bin, '--bin', '-o', bin, elf]
|
||||
|
||||
if hasattr(self.target, "binary_cmdline_hook"):
|
||||
args = self.target.binary_cmdline_hook(self.__class__.__name__, args)
|
||||
|
||||
self.default_cmd(args)
|
||||
|
||||
class ARM_STD(ARM):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
ARM.__init__(self, target, options, notify, macros, silent)
|
||||
self.cc += ["-D__ASSERT_MSG"]
|
||||
self.cppc += ["-D__ASSERT_MSG"]
|
||||
self.ld.append("--libpath=%s" % ARM_LIB)
|
||||
|
||||
|
||||
class ARM_MICRO(ARM):
|
||||
PATCHED_LIBRARY = False
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
ARM.__init__(self, target, options, notify, macros, silent)
|
||||
|
||||
# Compiler
|
||||
self.asm += ["-D__MICROLIB"]
|
||||
self.cc += ["--library_type=microlib", "-D__MICROLIB", "-D__ASSERT_MSG"]
|
||||
self.cppc += ["--library_type=microlib", "-D__MICROLIB", "-D__ASSERT_MSG"]
|
||||
|
||||
# Linker
|
||||
self.ld.append("--library_type=microlib")
|
||||
|
||||
# We had to patch microlib to add C++ support
|
||||
# In later releases this patch should have entered mainline
|
||||
if ARM_MICRO.PATCHED_LIBRARY:
|
||||
self.ld.append("--noscanlib")
|
||||
|
||||
# System Libraries
|
||||
self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
|
||||
|
||||
if target.core == "Cortex-M3":
|
||||
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
|
||||
|
||||
elif target.core in ["Cortex-M0", "Cortex-M0+"]:
|
||||
self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
|
||||
else:
|
||||
self.ld.append("--libpath=%s" % ARM_LIB)
|
257
tool/mbed/mbed-sdk/workspace_tools/toolchains/gcc.py
Normal file
257
tool/mbed/mbed-sdk/workspace_tools/toolchains/gcc.py
Normal file
|
@ -0,0 +1,257 @@
|
|||
"""
|
||||
mbed SDK
|
||||
Copyright (c) 2011-2013 ARM Limited
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import re
|
||||
from os.path import join, basename, splitext
|
||||
|
||||
from workspace_tools.toolchains import mbedToolchain
|
||||
from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH, GCC_CS_PATH, CW_EWL_PATH, CW_GCC_PATH
|
||||
from workspace_tools.settings import GOANNA_PATH
|
||||
from workspace_tools.hooks import hook_tool
|
||||
|
||||
class GCC(mbedToolchain):
|
||||
LINKER_EXT = '.ld'
|
||||
LIBRARY_EXT = '.a'
|
||||
|
||||
STD_LIB_NAME = "lib%s.a"
|
||||
CIRCULAR_DEPENDENCIES = True
|
||||
DIAGNOSTIC_PATTERN = re.compile('((?P<line>\d+):)(\d+:)? (?P<severity>warning|error): (?P<message>.+)')
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False, tool_path=""):
|
||||
mbedToolchain.__init__(self, target, options, notify, macros, silent)
|
||||
|
||||
if target.core == "Cortex-M0+":
|
||||
cpu = "cortex-m0plus"
|
||||
elif target.core == "Cortex-M4F":
|
||||
cpu = "cortex-m4"
|
||||
else:
|
||||
cpu = target.core.lower()
|
||||
|
||||
self.cpu = ["-mcpu=%s" % cpu]
|
||||
if target.core.startswith("Cortex"):
|
||||
self.cpu.append("-mthumb")
|
||||
|
||||
if target.core == "Cortex-M4F":
|
||||
self.cpu.append("-mfpu=fpv4-sp-d16")
|
||||
self.cpu.append("-mfloat-abi=softfp")
|
||||
|
||||
if target.core == "Cortex-A9":
|
||||
self.cpu.append("-mthumb-interwork")
|
||||
self.cpu.append("-marm")
|
||||
self.cpu.append("-march=armv7-a")
|
||||
self.cpu.append("-mfpu=vfpv3-d16")
|
||||
self.cpu.append("-mfloat-abi=hard")
|
||||
self.cpu.append("-mno-unaligned-access")
|
||||
|
||||
|
||||
# Note: We are using "-O2" instead of "-Os" to avoid this known GCC bug:
|
||||
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46762
|
||||
common_flags = ["-c", "-Wall", "-Wextra",
|
||||
"-Wno-unused-parameter", "-Wno-missing-field-initializers",
|
||||
"-fmessage-length=0", "-fno-exceptions", "-fno-builtin",
|
||||
"-ffunction-sections", "-fdata-sections",
|
||||
"-MMD", "-fno-delete-null-pointer-checks", "-fomit-frame-pointer"
|
||||
] + self.cpu
|
||||
|
||||
if "save-asm" in self.options:
|
||||
common_flags.append("-save-temps")
|
||||
|
||||
if "debug-info" in self.options:
|
||||
common_flags.append("-g")
|
||||
common_flags.append("-O0")
|
||||
else:
|
||||
common_flags.append("-O2")
|
||||
|
||||
main_cc = join(tool_path, "arm-none-eabi-gcc")
|
||||
main_cppc = join(tool_path, "arm-none-eabi-g++")
|
||||
self.asm = [main_cc, "-x", "assembler-with-cpp"] + common_flags
|
||||
if not "analyze" in self.options:
|
||||
self.cc = [main_cc, "-std=gnu99"] + common_flags
|
||||
self.cppc =[main_cppc, "-std=gnu++98", "-fno-rtti"] + common_flags
|
||||
else:
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "-std=gnu99", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
|
||||
self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cppc.replace('\\', '/'), "-std=gnu++98", "-fno-rtti", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
|
||||
|
||||
self.ld = [join(tool_path, "arm-none-eabi-gcc"), "-Wl,--gc-sections", "-Wl,--wrap,main"] + self.cpu
|
||||
self.sys_libs = ["stdc++", "supc++", "m", "c", "gcc"]
|
||||
|
||||
self.ar = join(tool_path, "arm-none-eabi-ar")
|
||||
self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
dependencies = []
|
||||
for line in open(dep_path).readlines()[1:]:
|
||||
file = line.replace('\\\n', '').strip()
|
||||
if file:
|
||||
# GCC might list more than one dependency on a single line, in this case
|
||||
# the dependencies are separated by a space. However, a space might also
|
||||
# indicate an actual space character in a dependency path, but in this case
|
||||
# the space character is prefixed by a backslash.
|
||||
# Temporary replace all '\ ' with a special char that is not used (\a in this
|
||||
# case) to keep them from being interpreted by 'split' (they will be converted
|
||||
# back later to a space char)
|
||||
file = file.replace('\\ ', '\a')
|
||||
if file.find(" ") == -1:
|
||||
dependencies.append(file.replace('\a', ' '))
|
||||
else:
|
||||
dependencies = dependencies + [f.replace('\a', ' ') for f in file.split(" ")]
|
||||
return dependencies
|
||||
|
||||
def parse_output(self, output):
|
||||
# The warning/error notification is multiline
|
||||
WHERE, WHAT = 0, 1
|
||||
state, file, message = WHERE, None, None
|
||||
for line in output.splitlines():
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message'),
|
||||
target_name=self.target.name,
|
||||
toolchain_name=self.name
|
||||
)
|
||||
continue
|
||||
|
||||
# Each line should start with the file information: "filepath: ..."
|
||||
# i should point past the file path ^
|
||||
# avoid the first column in Windows (C:\)
|
||||
i = line.find(':', 2)
|
||||
if i == -1: continue
|
||||
|
||||
if state == WHERE:
|
||||
file = line[:i]
|
||||
message = line[i+1:].strip() + ' '
|
||||
state = WHAT
|
||||
|
||||
elif state == WHAT:
|
||||
match = GCC.DIAGNOSTIC_PATTERN.match(line[i+1:])
|
||||
if match is None:
|
||||
state = WHERE
|
||||
continue
|
||||
|
||||
self.cc_info(
|
||||
match.group('severity'),
|
||||
file, match.group('line'),
|
||||
message + match.group('message')
|
||||
)
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
self.default_cmd([self.ar, "rcs", lib_path] + objects)
|
||||
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
libs = []
|
||||
for l in libraries:
|
||||
name, _ = splitext(basename(l))
|
||||
libs.append("-l%s" % name[3:])
|
||||
libs.extend(["-l%s" % l for l in self.sys_libs])
|
||||
|
||||
# NOTE: There is a circular dependency between the mbed library and the clib
|
||||
# We could define a set of week symbols to satisfy the clib dependencies in "sys.o",
|
||||
# but if an application uses only clib symbols and not mbed symbols, then the final
|
||||
# image is not correctly retargeted
|
||||
if self.CIRCULAR_DEPENDENCIES:
|
||||
libs.extend(libs)
|
||||
|
||||
self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] +
|
||||
objects + ["-L%s" % L for L in lib_dirs] + libs))
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin]))
|
||||
|
||||
|
||||
class GCC_ARM(GCC):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC.__init__(self, target, options, notify, macros, silent, GCC_ARM_PATH)
|
||||
|
||||
# Use latest gcc nanolib
|
||||
self.ld.append("--specs=nano.specs")
|
||||
if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
|
||||
self.ld.extend(["-u _printf_float", "-u _scanf_float"])
|
||||
elif target.name in ["RZ_A1H", "ARCH_MAX", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F401RE", "NUCLEO_F411RE"]:
|
||||
self.ld.extend(["-u_printf_float", "-u_scanf_float"])
|
||||
|
||||
self.sys_libs.append("nosys")
|
||||
|
||||
|
||||
class GCC_CR(GCC):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC.__init__(self, target, options, notify, macros, silent, GCC_CR_PATH)
|
||||
|
||||
additional_compiler_flags = [
|
||||
"-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP",
|
||||
]
|
||||
self.cc += additional_compiler_flags
|
||||
self.cppc += additional_compiler_flags
|
||||
|
||||
# Use latest gcc nanolib
|
||||
self.ld.append("--specs=nano.specs")
|
||||
if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
|
||||
self.ld.extend(["-u _printf_float", "-u _scanf_float"])
|
||||
self.ld += ["-nostdlib"]
|
||||
|
||||
|
||||
class GCC_CS(GCC):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC.__init__(self, target, options, notify, macros, silent, GCC_CS_PATH)
|
||||
|
||||
|
||||
class GCC_CW(GCC):
|
||||
ARCH_LIB = {
|
||||
"Cortex-M0+": "armv6-m",
|
||||
}
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC.__init__(self, target, options, notify, macros, silent, CW_GCC_PATH)
|
||||
|
||||
|
||||
class GCC_CW_EWL(GCC_CW):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC_CW.__init__(self, target, options, notify, macros, silent)
|
||||
|
||||
# Compiler
|
||||
common = [
|
||||
'-mfloat-abi=soft',
|
||||
'-nostdinc', '-I%s' % join(CW_EWL_PATH, "EWL_C", "include"),
|
||||
]
|
||||
self.cc += common + [
|
||||
'-include', join(CW_EWL_PATH, "EWL_C", "include", 'lib_c99.prefix')
|
||||
]
|
||||
self.cppc += common + [
|
||||
'-nostdinc++', '-I%s' % join(CW_EWL_PATH, "EWL_C++", "include"),
|
||||
'-include', join(CW_EWL_PATH, "EWL_C++", "include", 'lib_ewl_c++.prefix')
|
||||
]
|
||||
|
||||
# Linker
|
||||
self.sys_libs = []
|
||||
self.CIRCULAR_DEPENDENCIES = False
|
||||
self.ld = [join(CW_GCC_PATH, "arm-none-eabi-g++"),
|
||||
"-Xlinker --gc-sections",
|
||||
"-L%s" % join(CW_EWL_PATH, "lib", GCC_CW.ARCH_LIB[target.core]),
|
||||
"-n", "-specs=ewl_c++.specs", "-mfloat-abi=soft",
|
||||
"-Xlinker --undefined=__pformatter_", "-Xlinker --defsym=__pformatter=__pformatter_",
|
||||
"-Xlinker --undefined=__sformatter", "-Xlinker --defsym=__sformatter=__sformatter",
|
||||
] + self.cpu
|
||||
|
||||
|
||||
class GCC_CW_NEWLIB(GCC_CW):
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
GCC_CW.__init__(self, target, options, notify, macros, silent)
|
112
tool/mbed/mbed-sdk/workspace_tools/toolchains/iar.py
Normal file
112
tool/mbed/mbed-sdk/workspace_tools/toolchains/iar.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
"""
|
||||
mbed SDK
|
||||
Copyright (c) 2011-2013 ARM Limited
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import re
|
||||
from os import remove
|
||||
from os.path import join, exists
|
||||
|
||||
from workspace_tools.toolchains import mbedToolchain
|
||||
from workspace_tools.settings import IAR_PATH
|
||||
from workspace_tools.settings import GOANNA_PATH
|
||||
from workspace_tools.hooks import hook_tool
|
||||
|
||||
class IAR(mbedToolchain):
|
||||
LIBRARY_EXT = '.a'
|
||||
LINKER_EXT = '.icf'
|
||||
STD_LIB_NAME = "%s.a"
|
||||
|
||||
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)",(?P<line>[\d]+)\s+(?P<severity>Warning|Error)(?P<message>.+)')
|
||||
|
||||
def __init__(self, target, options=None, notify=None, macros=None, silent=False):
|
||||
mbedToolchain.__init__(self, target, options, notify, macros, silent)
|
||||
|
||||
c_flags = [
|
||||
"--cpu=%s" % target.core, "--thumb",
|
||||
"--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h"),
|
||||
"-e", # Enable IAR language extension
|
||||
"--no_wrap_diagnostics",
|
||||
# Pa050: No need to be notified about "non-native end of line sequence"
|
||||
# Pa084: Pointless integer comparison -> checks for the values of an enum, but we use values outside of the enum to notify errors (ie: NC).
|
||||
# Pa093: Implicit conversion from float to integer (ie: wait_ms(85.4) -> wait_ms(85))
|
||||
# Pa082: Operation involving two values from two registers (ie: (float)(*obj->MR)/(float)(LPC_PWM1->MR0))
|
||||
"--diag_suppress=Pa050,Pa084,Pa093,Pa082",
|
||||
]
|
||||
|
||||
if "debug-info" in self.options:
|
||||
c_flags.append("-r")
|
||||
c_flags.append("-On")
|
||||
else:
|
||||
c_flags.append("-Oh")
|
||||
|
||||
IAR_BIN = join(IAR_PATH, "bin")
|
||||
main_cc = join(IAR_BIN, "iccarm")
|
||||
self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", target.core]
|
||||
if not "analyze" in self.options:
|
||||
self.cc = [main_cc] + c_flags
|
||||
self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions"] + c_flags
|
||||
else:
|
||||
self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + c_flags
|
||||
self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions"] + c_flags
|
||||
self.ld = join(IAR_BIN, "ilinkarm")
|
||||
self.ar = join(IAR_BIN, "iarchive")
|
||||
self.elf2bin = join(IAR_BIN, "ielftool")
|
||||
|
||||
def parse_output(self, output):
|
||||
for line in output.splitlines():
|
||||
match = IAR.DIAGNOSTIC_PATTERN.match(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message'),
|
||||
target_name=self.target.name,
|
||||
toolchain_name=self.name
|
||||
)
|
||||
match = self.goanna_parse_line(line)
|
||||
if match is not None:
|
||||
self.cc_info(
|
||||
match.group('severity').lower(),
|
||||
match.group('file'),
|
||||
match.group('line'),
|
||||
match.group('message')
|
||||
)
|
||||
|
||||
def get_dep_opt(self, dep_path):
|
||||
return ["--dependencies", dep_path]
|
||||
|
||||
def cc_extra(self, base):
|
||||
return ["-l", base + '.s']
|
||||
|
||||
def parse_dependencies(self, dep_path):
|
||||
return [path.strip() for path in open(dep_path).readlines()
|
||||
if (path and not path.isspace())]
|
||||
|
||||
def assemble(self, source, object, includes):
|
||||
return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
|
||||
|
||||
def archive(self, objects, lib_path):
|
||||
if exists(lib_path):
|
||||
remove(lib_path)
|
||||
self.default_cmd([self.ar, lib_path] + objects)
|
||||
|
||||
def link(self, output, objects, libraries, lib_dirs, mem_map):
|
||||
args = [self.ld, "-o", output, "--config", mem_map, "--skip_dynamic_initialization"]
|
||||
self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries))
|
||||
|
||||
@hook_tool
|
||||
def binary(self, resources, elf, bin):
|
||||
self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin]))
|
Loading…
Add table
Add a link
Reference in a new issue