From 2d8cf737e44ff13d491072a45cf5a05594c387b5 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 24 Apr 2018 13:19:57 -0500 Subject: [PATCH 01/44] Move resource scanning into it's own module --- tools/build_api.py | 42 +-- tools/config/__init__.py | 6 +- tools/resources/__init__.py | 577 +++++++++++++++++++++++++++++++++++ tools/toolchains/__init__.py | 497 +----------------------------- 4 files changed, 582 insertions(+), 540 deletions(-) create mode 100644 tools/resources/__init__.py diff --git a/tools/build_api.py b/tools/build_api.py index c861532987f..cd254078f0b 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -42,6 +42,7 @@ MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) +from .resources import scan_resources from .targets import TARGET_NAMES, TARGET_MAP from .libraries import Library from .toolchains import TOOLCHAIN_CLASSES @@ -440,46 +441,6 @@ def merge_region_list(region_list, destination, notify, padding=b'\xFF'): (merged.maxaddr() - merged.minaddr() + 1)) merged.tofile(destination, format=format.strip(".")) -def scan_resources(src_paths, toolchain, dependencies_paths=None, - inc_dirs=None, base_path=None, collect_ignores=False): - """ Scan resources using initialized toolcain - - Positional arguments - src_paths - the paths to source directories - toolchain - valid toolchain object - dependencies_paths - dependency paths that we should scan for include dirs - inc_dirs - additional include directories which should be added to - the scanner resources - """ - - # Scan src_path - resources = toolchain.scan_resources(src_paths[0], base_path=base_path, - collect_ignores=collect_ignores) - for path in src_paths[1:]: - resources.add(toolchain.scan_resources(path, base_path=base_path, - collect_ignores=collect_ignores)) - - # Scan dependency paths for include dirs - if dependencies_paths is not None: - for path in dependencies_paths: - lib_resources = toolchain.scan_resources(path) - resources.inc_dirs.extend(lib_resources.inc_dirs) - - # Add additional include directories if passed - if inc_dirs: - if isinstance(inc_dirs, list): - resources.inc_dirs.extend(inc_dirs) - else: - resources.inc_dirs.append(inc_dirs) - - # Load resources into the config system which might expand/modify resources - # based on config data - resources = toolchain.config.load_resources(resources) - - # Set the toolchain's configuration data - toolchain.set_config_data(toolchain.config.get_config_data()) - - return resources def build_project(src_paths, build_path, target, toolchain_name, libraries_paths=None, linker_script=None, clean=False, @@ -557,6 +518,7 @@ def build_project(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) + print(resources) # Change linker script if specified if linker_script is not None: diff --git a/tools/config/__init__.py b/tools/config/__init__.py index 57958f8eba1..5155777f41d 100644 --- a/tools/config/__init__.py +++ b/tools/config/__init__.py @@ -1091,9 +1091,7 @@ def load_resources(self, resources): if features == prev_features: break - for feature in features: - if feature in resources.features: - resources.add(resources.features[feature]) + resources.add_features(features) prev_features = features self.validate_config() @@ -1103,8 +1101,6 @@ def load_resources(self, resources): "rtos" in self.lib_config_data): raise NotSupportedException("Target does not support mbed OS 5") - return resources - @staticmethod def config_to_header(config, fname=None): """ Convert the configuration data to the content of a C header file, diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py new file mode 100644 index 00000000000..10602bb04cb --- /dev/null +++ b/tools/resources/__init__.py @@ -0,0 +1,577 @@ +# mbed SDK +# Copyright (c) 2011-2013 ARM Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The scanning rules and Resources object. + +A project in Mbed OS contains metadata in the file system as directory names. +These directory names adhere to a set of rules referred to as canning rules. +The following are the English version of the scanning rules: + +Directory names starting with "TEST_", "TARGET_", "TOOLCHAIN_" and "FEATURE_" +are excluded from a build unless one of the following is true: + * The suffix after "TARGET_" is a target label (see target.labels). + * The suffix after "TOOLCHAIN_" is a toolchain label, defined by the + inheritance hierarchy of the toolchain class. + * The suffix after "FEATURE_" is a member of `target.features`. + + +""" + +from __future__ import print_function, division, absolute_import + +import fnmatch +import re +from copy import copy +from itertools import chain +from os import walk +from os.path import (join, splitext, dirname, relpath, basename, split, normcase) + +from ..toolchains import TOOLCHAINS + +# Support legacy build conventions: the original mbed build system did not have +# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but +# had the knowledge of a list of these directories to be ignored. +LEGACY_IGNORE_DIRS = set([ + 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', + 'ARM', 'uARM', 'IAR', + 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', + 'ARMC6' +]) +LEGACY_TOOLCHAIN_NAMES = { + 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', + 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', + 'IAR': 'IAR', + 'ARMC6': 'ARMC6', +} + +class LazyDict(object): + def __init__(self): + self.eager = {} + self.lazy = {} + + def add_lazy(self, key, thunk): + if key in self.eager: + del self.eager[key] + self.lazy[key] = thunk + + def __getitem__(self, key): + if (key not in self.eager + and key in self.lazy): + self.eager[key] = self.lazy[key]() + del self.lazy[key] + return self.eager[key] + + def __setitem__(self, key, value): + self.eager[key] = value + + def __delitem__(self, key): + if key in self.eager: + del self.eager[key] + else: + del self.lazy[key] + + def __contains__(self, key): + return key in self.eager or key in self.lazy + + def __iter__(self): + return chain(iter(self.eager), iter(self.lazy)) + + def __len__(self): + return len(self.eager) + len(self.lazy) + + def __str__(self): + return "Lazy{%s}" % ( + ", ".join("%r: %r" % (k, v) for k, v in + chain(self.eager.items(), ((k, "not evaluated") + for k in self.lazy)))) + + def update(self, other): + if isinstance(other, LazyDict): + self.eager.update(other.eager) + self.lazy.update(other.lazy) + else: + self.eager.update(other) + + def items(self): + """Warning: This forces the evaluation all of the items in this LazyDict + that are iterated over.""" + for k, v in self.eager.items(): + yield k, v + for k in self.lazy.keys(): + yield k, self[k] + + def apply(self, fn): + """Delay the application of a computation to all items of the lazy dict. + Does no computation now. Instead the comuptation is performed when a + consumer attempts to access a value in this LazyDict""" + new_lazy = {} + for k, f in self.lazy.items(): + def closure(f=f): + return fn(f()) + new_lazy[k] = closure + for k, v in self.eager.items(): + def closure(v=v): + return fn(v) + new_lazy[k] = closure + self.lazy = new_lazy + self.eager = {} + +class Resources: + def __init__(self, base_path=None, collect_ignores=False): + self.base_path = base_path + self.collect_ignores = collect_ignores + + self.file_basepath = {} + + self.inc_dirs = [] + self.headers = [] + + self.s_sources = [] + self.c_sources = [] + self.cpp_sources = [] + + self.lib_dirs = set([]) + self.objects = [] + self.libraries = [] + + # mbed special files + self.lib_builds = [] + self.lib_refs = [] + + self.repo_dirs = [] + self.repo_files = [] + + self.linker_script = None + + # Other files + self.hex_files = [] + self.bin_files = [] + self.json_files = [] + + # Features + self.features = LazyDict() + self.ignored_dirs = [] + + self.labels = {} + + # Pre-mbed 2.0 ignore dirs + self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) + + # Ignore patterns from .mbedignore files + self.ignore_patterns = [] + self._ignore_regex = re.compile("$^") + + def __add__(self, resources): + if resources is None: + return self + else: + return self.add(resources) + + def __radd__(self, resources): + if resources is None: + return self + else: + return self.add(resources) + + def ignore_dir(self, directory): + if self.collect_ignores: + self.ignored_dirs.append(directory) + + def add(self, resources): + for f,p in resources.file_basepath.items(): + self.file_basepath[f] = p + + self.inc_dirs += resources.inc_dirs + self.headers += resources.headers + + self.s_sources += resources.s_sources + self.c_sources += resources.c_sources + self.cpp_sources += resources.cpp_sources + + self.lib_dirs |= resources.lib_dirs + self.objects += resources.objects + self.libraries += resources.libraries + + self.lib_builds += resources.lib_builds + self.lib_refs += resources.lib_refs + + self.repo_dirs += resources.repo_dirs + self.repo_files += resources.repo_files + + if resources.linker_script is not None: + self.linker_script = resources.linker_script + + self.hex_files += resources.hex_files + self.bin_files += resources.bin_files + self.json_files += resources.json_files + + self.features.update(resources.features) + self.ignored_dirs += resources.ignored_dirs + + return self + + def rewrite_basepath(self, file_name, export_path, loc): + """ Replace the basepath of filename with export_path + + Positional arguments: + file_name - the absolute path to a file + export_path - the final destination of the file after export + """ + new_f = join(loc, relpath(file_name, self.file_basepath[file_name])) + self.file_basepath[new_f] = export_path + return new_f + + def subtract_basepath(self, export_path, loc=""): + """ Rewrite all of the basepaths with the export_path + + Positional arguments: + export_path - the final destination of the resources with respect to the + generated project files + """ + keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', + 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script', + 'lib_dirs'] + for key in keys: + vals = getattr(self, key) + if isinstance(vals, set): + vals = list(vals) + if isinstance(vals, list): + new_vals = [] + for val in vals: + new_vals.append(self.rewrite_basepath( + val, export_path, loc)) + if isinstance(getattr(self, key), set): + setattr(self, key, set(new_vals)) + else: + setattr(self, key, new_vals) + elif vals: + setattr(self, key, self.rewrite_basepath( + vals, export_path, loc)) + def closure(res, export_path=export_path, loc=loc): + res.subtract_basepath(export_path, loc) + return res + self.features.apply(closure) + + def _collect_duplicates(self, dupe_dict, dupe_headers): + for filename in self.s_sources + self.c_sources + self.cpp_sources: + objname, _ = splitext(basename(filename)) + dupe_dict.setdefault(objname, set()) + dupe_dict[objname] |= set([filename]) + for filename in self.headers: + headername = basename(filename) + dupe_headers.setdefault(headername, set()) + dupe_headers[headername] |= set([headername]) + for res in self.features.values(): + res._collect_duplicates(dupe_dict, dupe_headers) + return dupe_dict, dupe_headers + + def detect_duplicates(self, toolchain): + """Detect all potential ambiguities in filenames and report them with + a toolchain notification + + Positional Arguments: + toolchain - used for notifications + """ + count = 0 + dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict()) + for objname, filenames in dupe_dict.items(): + if len(filenames) > 1: + count+=1 + toolchain.tool_error( + "Object file %s.o is not unique! It could be made from: %s"\ + % (objname, " ".join(filenames))) + for headername, locations in dupe_headers.items(): + if len(locations) > 1: + count+=1 + toolchain.tool_error( + "Header file %s is not unique! It could be: %s" %\ + (headername, " ".join(locations))) + return count + + + def relative_to(self, base, dot=False): + for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', + 'cpp_sources', 'lib_dirs', 'objects', 'libraries', + 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', + 'hex_files', 'bin_files', 'json_files']: + v = [rel_path(f, base, dot) for f in getattr(self, field)] + setattr(self, field, v) + + def to_apply(feature, base=base, dot=dot): + feature.relative_to(base, dot) + self.features.apply(to_apply) + + if self.linker_script is not None: + self.linker_script = rel_path(self.linker_script, base, dot) + + def win_to_unix(self): + for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', + 'cpp_sources', 'lib_dirs', 'objects', 'libraries', + 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', + 'hex_files', 'bin_files', 'json_files']: + v = [f.replace('\\', '/') for f in getattr(self, field)] + setattr(self, field, v) + + def to_apply(feature): + feature.win_to_unix() + self.features.apply(to_apply) + + if self.linker_script is not None: + self.linker_script = self.linker_script.replace('\\', '/') + + def __str__(self): + s = [] + + for (label, resources) in ( + ('Include Directories', self.inc_dirs), + ('Headers', self.headers), + + ('Assembly sources', self.s_sources), + ('C sources', self.c_sources), + ('C++ sources', self.cpp_sources), + + ('Library directories', self.lib_dirs), + ('Objects', self.objects), + ('Libraries', self.libraries), + + ('Hex files', self.hex_files), + ('Bin files', self.bin_files), + + ('Features', self.features), + ): + if resources: + s.append('%s:\n ' % label + '\n '.join(resources)) + + if self.linker_script: + s.append('Linker Script: ' + self.linker_script) + + return '\n'.join(s) + + + def _add_labels(self, prefix, labels): + self.labels.setdefault(prefix, []) + self.labels[prefix].extend(labels) + + def add_toolchain_labels(self, toolchain): + for prefix, value in toolchain.get_labels().items(): + self._add_labels(prefix, value) + + def get_labels(self): + """ + """ + + def is_ignored(self, file_path): + """Check if file path is ignored by any .mbedignore thus far""" + return self._ignore_regex.match(normcase(file_path)) + + def add_ignore_patterns(self, root, base_path, patterns): + """Add a series of patterns to the ignored paths + + Positional arguments: + root - the directory containing the ignore file + base_path - the location that the scan started from + patterns - the list of patterns we will ignore in the future + """ + real_base = relpath(root, base_path) + if real_base == ".": + self.ignore_patterns.extend(normcase(p) for p in patterns) + else: + self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns) + if self.ignore_patterns: + self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) + + + def add_features(self, features): + for feat in features: + self.features[feat] + + # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a + # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file + # on every file it considers adding to the resources object. + def add_directory(self, path, base_path, exclude_paths=None): + """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) + When topdown is True, the caller can modify the dirnames list in-place + (perhaps using del or slice assignment), and walk() will only recurse into + the subdirectories whose names remain in dirnames; this can be used to prune + the search, impose a specific order of visiting, or even to inform walk() + about directories the caller creates or renames before it resumes walk() + again. Modifying dirnames when topdown is False is ineffective, because in + bottom-up mode the directories in dirnames are generated before dirpath + itself is generated. + """ + if base_path is None: + base_path = path + print("%s %s %s" % (path, base_path, exclude_paths)) + for root, dirs, files in walk(path, followlinks=True): + # Check if folder contains .mbedignore + if ".mbedignore" in files: + with open (join(root,".mbedignore"), "r") as f: + lines=f.readlines() + lines = [l.strip() for l in lines] # Strip whitespaces + lines = [l for l in lines if l != ""] # Strip empty lines + lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines + # Append root path to glob patterns and append patterns to ignore_patterns + self.add_ignore_patterns(root, base_path, lines) + + # Skip the whole folder if ignored, e.g. .mbedignore containing '*' + root_path =join(relpath(root, base_path)) + if self.is_ignored(join(root_path,"")): + self.ignore_dir(root_path) + dirs[:] = [] + continue + + for d in copy(dirs): + dir_path = join(root, d) + # Add internal repo folders/files. This is needed for exporters + if d == '.hg' or d == '.git': + self.repo_dirs.append(dir_path) + + if ((d.startswith('.') or d in self.legacy_ignore_dirs) or + # Ignore targets that do not match the TARGET in extra_labels list + (d.startswith('TARGET_') and d[7:] not in self.labels['TARGET']) or + # Ignore toolchain that do not match the current TOOLCHAIN + (d.startswith('TOOLCHAIN_') and d[10:] not in self.labels['TOOLCHAIN']) or + # Ignore .mbedignore files + self.is_ignored(join(relpath(root, base_path), d,"")) or + # Ignore TESTS dir + (d == 'TESTS')): + print("ignoreing %s" % dir_path) + self.ignore_dir(dir_path) + dirs.remove(d) + elif d.startswith('FEATURE_'): + # Recursively scan features but ignore them in the current scan. + # These are dynamically added by the config system if the conditions are matched + def closure (dir_path=dir_path, base_path=base_path): + return self.add_directory(dir_path, base_path=base_path) + print("lazying %s" % dir_path) + self.features.add_lazy(d[8:], closure) + self.ignore_dir(dir_path) + dirs.remove(d) + elif exclude_paths: + for exclude_path in exclude_paths: + rel_path = relpath(dir_path, exclude_path) + if not (rel_path.startswith('..')): + print("excluding %s" % dir_path) + self.ignore_dir(dir_path) + dirs.remove(d) + break + + # Add root to include paths + root = root.rstrip("/") + self.inc_dirs.append(root) + self.file_basepath[root] = base_path + + for file in files: + file_path = join(root, file) + self._add_file(file_path, base_path) + + # A helper function for both scan_resources and _add_dir. _add_file adds one file + # (*file_path*) to the resources object based on the file type. + def _add_file(self, file_path, base_path, exclude_paths=None): + + if (self.is_ignored(relpath(file_path, base_path)) or + basename(file_path).startswith(".")): + self.ignore_dir(relpath(file_path, base_path)) + return + + self.file_basepath[file_path] = base_path + _, ext = splitext(file_path) + ext = ext.lower() + + if ext == '.s': + self.s_sources.append(file_path) + + elif ext == '.c': + self.c_sources.append(file_path) + + elif ext == '.cpp' or ext == '.cc': + self.cpp_sources.append(file_path) + + elif ext == '.h' or ext == '.hpp' or ext == '.hh': + self.headers.append(file_path) + + elif ext == '.o': + self.objects.append(file_path) + + elif ext in ('.a', '.ar'): + self.libraries.append(file_path) + self.lib_dirs.add(dirname(file_path)) + + elif ext in ('.sct', '.icf', '.ld'): + if self.linker_script is not None: + self.notify.info("Warning: Multiple linker scripts detected: %s and %s" % (self.linker_script, file_path)) + self.linker_script = file_path + + elif ext == '.lib': + self.lib_refs.append(file_path) + + elif ext == '.bld': + self.lib_builds.append(file_path) + + elif basename(file_path) == '.hgignore': + self.repo_files.append(file_path) + + elif basename(file_path) == '.gitignore': + self.repo_files.append(file_path) + + elif ext == '.hex': + self.hex_files.append(file_path) + + elif ext == '.bin': + self.bin_files.append(file_path) + + elif ext == '.json': + self.json_files.append(file_path) + + +def scan_resources(src_paths, toolchain, dependencies_paths=None, + inc_dirs=None, base_path=None, collect_ignores=False): + """ Scan resources using initialized toolcain + + Positional arguments + src_paths - the paths to source directories + toolchain - valid toolchain object + dependencies_paths - dependency paths that we should scan for include dirs + inc_dirs - additional include directories which should be added to + the scanner resources + """ + + resources = Resources(base_path, collect_ignores) + resources.add_toolchain_labels(toolchain) + for path in src_paths: + resources.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) + + # Scan dependency paths for include dirs + if dependencies_paths is not None: + for path in dependencies_paths: + lib_resources = toolchain.scan_resources(path) + resources.inc_dirs.extend(lib_resources.inc_dirs) + + # Add additional include directories if passed + if inc_dirs: + if isinstance(inc_dirs, list): + resources.inc_dirs.extend(inc_dirs) + else: + resources.inc_dirs.append(inc_dirs) + + # Load resources into the config system which might expand/modify resources + # based on config data + toolchain.config.load_resources(resources) + + # Set the toolchain's configuration data + toolchain.set_config_data(toolchain.config.get_config_data()) + + return resources diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index d0ed37850b3..3a340fbb518 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -25,7 +25,6 @@ from shutil import copyfile from os.path import (join, splitext, exists, relpath, dirname, basename, split, abspath, isfile, isdir, normcase) -from itertools import chain from inspect import getmro from copy import deepcopy from collections import namedtuple @@ -33,7 +32,6 @@ from distutils.spawn import find_executable from multiprocessing import Pool, cpu_count from hashlib import md5 -import fnmatch from ..utils import (run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path, compile_worker) @@ -48,314 +46,6 @@ CPU_COUNT_MIN = 1 CPU_COEF = 1 -class LazyDict(object): - def __init__(self): - self.eager = {} - self.lazy = {} - - def add_lazy(self, key, thunk): - if key in self.eager: - del self.eager[key] - self.lazy[key] = thunk - - def __getitem__(self, key): - if (key not in self.eager - and key in self.lazy): - self.eager[key] = self.lazy[key]() - del self.lazy[key] - return self.eager[key] - - def __setitem__(self, key, value): - self.eager[key] = value - - def __delitem__(self, key): - if key in self.eager: - del self.eager[key] - else: - del self.lazy[key] - - def __contains__(self, key): - return key in self.eager or key in self.lazy - - def __iter__(self): - return chain(iter(self.eager), iter(self.lazy)) - - def __len__(self): - return len(self.eager) + len(self.lazy) - - def __str__(self): - return "Lazy{%s}" % ( - ", ".join("%r: %r" % (k, v) for k, v in - chain(self.eager.items(), ((k, "not evaluated") - for k in self.lazy)))) - - def update(self, other): - if isinstance(other, LazyDict): - self.eager.update(other.eager) - self.lazy.update(other.lazy) - else: - self.eager.update(other) - - def items(self): - """Warning: This forces the evaluation all of the items in this LazyDict - that are iterated over.""" - for k, v in self.eager.items(): - yield k, v - for k in self.lazy.keys(): - yield k, self[k] - - def apply(self, fn): - """Delay the application of a computation to all items of the lazy dict. - Does no computation now. Instead the comuptation is performed when a - consumer attempts to access a value in this LazyDict""" - new_lazy = {} - for k, f in self.lazy.items(): - def closure(f=f): - return fn(f()) - new_lazy[k] = closure - for k, v in self.eager.items(): - def closure(v=v): - return fn(v) - new_lazy[k] = closure - self.lazy = new_lazy - self.eager = {} - -class Resources: - def __init__(self, base_path=None, collect_ignores=False): - self.base_path = base_path - self.collect_ignores = collect_ignores - - self.file_basepath = {} - - self.inc_dirs = [] - self.headers = [] - - self.s_sources = [] - self.c_sources = [] - self.cpp_sources = [] - - self.lib_dirs = set([]) - self.objects = [] - self.libraries = [] - - # mbed special files - self.lib_builds = [] - self.lib_refs = [] - - self.repo_dirs = [] - self.repo_files = [] - - self.linker_script = None - - # Other files - self.hex_files = [] - self.bin_files = [] - self.json_files = [] - - # Features - self.features = LazyDict() - self.ignored_dirs = [] - - def __add__(self, resources): - if resources is None: - return self - else: - return self.add(resources) - - def __radd__(self, resources): - if resources is None: - return self - else: - return self.add(resources) - - def ignore_dir(self, directory): - if self.collect_ignores: - self.ignored_dirs.append(directory) - - def add(self, resources): - self.file_basepath.update(resources.file_basepath) - - self.inc_dirs += resources.inc_dirs - self.headers += resources.headers - - self.s_sources += resources.s_sources - self.c_sources += resources.c_sources - self.cpp_sources += resources.cpp_sources - - self.lib_dirs |= resources.lib_dirs - self.objects += resources.objects - self.libraries += resources.libraries - - self.lib_builds += resources.lib_builds - self.lib_refs += resources.lib_refs - - self.repo_dirs += resources.repo_dirs - self.repo_files += resources.repo_files - - if resources.linker_script is not None: - self.linker_script = resources.linker_script - - self.hex_files += resources.hex_files - self.bin_files += resources.bin_files - self.json_files += resources.json_files - - self.features.update(resources.features) - self.ignored_dirs += resources.ignored_dirs - - return self - - def rewrite_basepath(self, file_name, export_path, loc): - """ Replace the basepath of filename with export_path - - Positional arguments: - file_name - the absolute path to a file - export_path - the final destination of the file after export - """ - new_f = join(loc, relpath(file_name, self.file_basepath[file_name])) - self.file_basepath[new_f] = export_path - return new_f - - def subtract_basepath(self, export_path, loc=""): - """ Rewrite all of the basepaths with the export_path - - Positional arguments: - export_path - the final destination of the resources with respect to the - generated project files - """ - keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', - 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script', - 'lib_dirs'] - for key in keys: - vals = getattr(self, key) - if isinstance(vals, set): - vals = list(vals) - if isinstance(vals, list): - new_vals = [] - for val in vals: - new_vals.append(self.rewrite_basepath( - val, export_path, loc)) - if isinstance(getattr(self, key), set): - setattr(self, key, set(new_vals)) - else: - setattr(self, key, new_vals) - elif vals: - setattr(self, key, self.rewrite_basepath( - vals, export_path, loc)) - def closure(res, export_path=export_path, loc=loc): - res.subtract_basepath(export_path, loc) - return res - self.features.apply(closure) - - def _collect_duplicates(self, dupe_dict, dupe_headers): - for filename in self.s_sources + self.c_sources + self.cpp_sources: - objname, _ = splitext(basename(filename)) - dupe_dict.setdefault(objname, set()) - dupe_dict[objname] |= set([filename]) - for filename in self.headers: - headername = basename(filename) - dupe_headers.setdefault(headername, set()) - dupe_headers[headername] |= set([headername]) - return dupe_dict, dupe_headers - - def detect_duplicates(self, toolchain): - """Detect all potential ambiguities in filenames and report them with - a toolchain notification - - Positional Arguments: - toolchain - used for notifications - """ - count = 0 - dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict()) - for objname, filenames in dupe_dict.items(): - if len(filenames) > 1: - count+=1 - toolchain.notify.tool_error( - "Object file %s.o is not unique! It could be made from: %s"\ - % (objname, " ".join(filenames))) - for headername, locations in dupe_headers.items(): - if len(locations) > 1: - count+=1 - toolchain.notify.tool_error( - "Header file %s is not unique! It could be: %s" %\ - (headername, " ".join(locations))) - return count - - - def relative_to(self, base, dot=False): - for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', - 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', - 'hex_files', 'bin_files', 'json_files']: - v = [rel_path(f, base, dot) for f in getattr(self, field)] - setattr(self, field, v) - - def to_apply(feature, base=base, dot=dot): - feature.relative_to(base, dot) - self.features.apply(to_apply) - - if self.linker_script is not None: - self.linker_script = rel_path(self.linker_script, base, dot) - - def win_to_unix(self): - for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', - 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', - 'hex_files', 'bin_files', 'json_files']: - v = [f.replace('\\', '/') for f in getattr(self, field)] - setattr(self, field, v) - - def to_apply(feature): - feature.win_to_unix() - self.features.apply(to_apply) - - if self.linker_script is not None: - self.linker_script = self.linker_script.replace('\\', '/') - - def __str__(self): - s = [] - - for (label, resources) in ( - ('Include Directories', self.inc_dirs), - ('Headers', self.headers), - - ('Assembly sources', self.s_sources), - ('C sources', self.c_sources), - ('C++ sources', self.cpp_sources), - - ('Library directories', self.lib_dirs), - ('Objects', self.objects), - ('Libraries', self.libraries), - - ('Hex files', self.hex_files), - ('Bin files', self.bin_files), - - ('Features', self.features), - ): - if resources: - s.append('%s:\n ' % label + '\n '.join(resources)) - - if self.linker_script: - s.append('Linker Script: ' + self.linker_script) - - return '\n'.join(s) - -# Support legacy build conventions: the original mbed build system did not have -# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but -# had the knowledge of a list of these directories to be ignored. -LEGACY_IGNORE_DIRS = set([ - 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', - 'ARM', 'uARM', 'IAR', - 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', - 'ARMC6' -]) -LEGACY_TOOLCHAIN_NAMES = { - 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', - 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', - 'IAR': 'IAR', - 'ARMC6': 'ARMC6', -} - - class mbedToolchain: # Verbose logging VERBOSE = True @@ -440,12 +130,6 @@ def __init__(self, target, notify=None, macros=None, build_profile=None, # Number of concurrent build jobs. 0 means auto (based on host system cores) self.jobs = 0 - # Ignore patterns from .mbedignore files - self.ignore_patterns = [] - self._ignore_regex = re.compile("$^") - - # Pre-mbed 2.0 ignore dirs - self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]]) # Output notify function # This function is passed all events, and expected to handle notification of the @@ -584,185 +268,6 @@ def need_update(self, target, dependencies): return False - def is_ignored(self, file_path): - """Check if file path is ignored by any .mbedignore thus far""" - return self._ignore_regex.match(normcase(file_path)) - - def add_ignore_patterns(self, root, base_path, patterns): - """Add a series of patterns to the ignored paths - - Positional arguments: - root - the directory containing the ignore file - base_path - the location that the scan started from - patterns - the list of patterns we will ignore in the future - """ - real_base = relpath(root, base_path) - if real_base == ".": - self.ignore_patterns.extend(normcase(p) for p in patterns) - else: - self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns) - if self.ignore_patterns: - self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) - - # Create a Resources object from the path pointed to by *path* by either traversing a - # a directory structure, when *path* is a directory, or adding *path* to the resources, - # when *path* is a file. - # The parameter *base_path* is used to set the base_path attribute of the Resources - # object and the parameter *exclude_paths* is used by the directory traversal to - # exclude certain paths from the traversal. - def scan_resources(self, path, exclude_paths=None, base_path=None, - collect_ignores=False): - self.progress("scan", path) - - resources = Resources(path, collect_ignores=collect_ignores) - if not base_path: - if isfile(path): - base_path = dirname(path) - else: - base_path = path - resources.base_path = base_path - - if isfile(path): - self._add_file(path, resources, base_path, exclude_paths=exclude_paths) - else: - self._add_dir(path, resources, base_path, exclude_paths=exclude_paths) - return resources - - # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a - # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file - # on every file it considers adding to the resources object. - def _add_dir(self, path, resources, base_path, exclude_paths=None): - """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) - When topdown is True, the caller can modify the dirnames list in-place - (perhaps using del or slice assignment), and walk() will only recurse into - the subdirectories whose names remain in dirnames; this can be used to prune - the search, impose a specific order of visiting, or even to inform walk() - about directories the caller creates or renames before it resumes walk() - again. Modifying dirnames when topdown is False is ineffective, because in - bottom-up mode the directories in dirnames are generated before dirpath - itself is generated. - """ - labels = self.get_labels() - for root, dirs, files in walk(path, followlinks=True): - # Check if folder contains .mbedignore - if ".mbedignore" in files: - with open (join(root,".mbedignore"), "r") as f: - lines=f.readlines() - lines = [l.strip() for l in lines] # Strip whitespaces - lines = [l for l in lines if l != ""] # Strip empty lines - lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines - # Append root path to glob patterns and append patterns to ignore_patterns - self.add_ignore_patterns(root, base_path, lines) - - # Skip the whole folder if ignored, e.g. .mbedignore containing '*' - root_path =join(relpath(root, base_path)) - if (self.is_ignored(join(root_path,"")) or - self.build_dir == root_path): - resources.ignore_dir(root_path) - dirs[:] = [] - continue - - for d in copy(dirs): - dir_path = join(root, d) - # Add internal repo folders/files. This is needed for exporters - if d == '.hg' or d == '.git': - resources.repo_dirs.append(dir_path) - - if ((d.startswith('.') or d in self.legacy_ignore_dirs) or - # Ignore targets that do not match the TARGET in extra_labels list - (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or - # Ignore toolchain that do not match the current TOOLCHAIN - (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or - # Ignore .mbedignore files - self.is_ignored(join(relpath(root, base_path), d,"")) or - # Ignore TESTS dir - (d == 'TESTS')): - resources.ignore_dir(dir_path) - dirs.remove(d) - elif d.startswith('FEATURE_'): - # Recursively scan features but ignore them in the current scan. - # These are dynamically added by the config system if the conditions are matched - def closure (dir_path=dir_path, base_path=base_path): - return self.scan_resources(dir_path, base_path=base_path, - collect_ignores=resources.collect_ignores) - resources.features.add_lazy(d[8:], closure) - resources.ignore_dir(dir_path) - dirs.remove(d) - elif exclude_paths: - for exclude_path in exclude_paths: - rel_path = relpath(dir_path, exclude_path) - if not (rel_path.startswith('..')): - resources.ignore_dir(dir_path) - dirs.remove(d) - break - - # Add root to include paths - root = root.rstrip("/") - resources.inc_dirs.append(root) - resources.file_basepath[root] = base_path - - for file in files: - file_path = join(root, file) - self._add_file(file_path, resources, base_path) - - # A helper function for both scan_resources and _add_dir. _add_file adds one file - # (*file_path*) to the resources object based on the file type. - def _add_file(self, file_path, resources, base_path, exclude_paths=None): - - if (self.is_ignored(relpath(file_path, base_path)) or - basename(file_path).startswith(".")): - resources.ignore_dir(relpath(file_path, base_path)) - return - - resources.file_basepath[file_path] = base_path - _, ext = splitext(file_path) - ext = ext.lower() - - if ext == '.s': - resources.s_sources.append(file_path) - - elif ext == '.c': - resources.c_sources.append(file_path) - - elif ext == '.cpp' or ext == '.cc': - resources.cpp_sources.append(file_path) - - elif ext == '.h' or ext == '.hpp' or ext == '.hh': - resources.headers.append(file_path) - - elif ext == '.o': - resources.objects.append(file_path) - - elif ext == self.LIBRARY_EXT: - resources.libraries.append(file_path) - resources.lib_dirs.add(dirname(file_path)) - - elif ext == self.LINKER_EXT: - if resources.linker_script is not None: - self.notify.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path)) - resources.linker_script = file_path - - elif ext == '.lib': - resources.lib_refs.append(file_path) - - elif ext == '.bld': - resources.lib_builds.append(file_path) - - elif basename(file_path) == '.hgignore': - resources.repo_files.append(file_path) - - elif basename(file_path) == '.gitignore': - resources.repo_files.append(file_path) - - elif ext == '.hex': - resources.hex_files.append(file_path) - - elif ext == '.bin': - resources.bin_files.append(file_path) - - elif ext == '.json': - resources.json_files.append(file_path) - def scan_repository(self, path): resources = [] @@ -1246,6 +751,8 @@ def add_regions(self): # Set the configuration data def set_config_data(self, config_data): self.config_data = config_data + # new configuration data can change labels, so clear the cache + self.labels = None self.add_regions() # Creates the configuration header if needed: From 519e33866788d5fa5f50030a381308505118551d Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 24 Apr 2018 14:05:46 -0500 Subject: [PATCH 02/44] Rework export resource scanning --- tools/build_api.py | 5 +-- tools/export/__init__.py | 15 ++++--- tools/resources/__init__.py | 79 ++++++++++++++++++------------------- 3 files changed, 50 insertions(+), 49 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index cd254078f0b..267b7e43963 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -42,7 +42,7 @@ MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) -from .resources import scan_resources +from .resources import Resources from .targets import TARGET_NAMES, TARGET_MAP from .libraries import Library from .toolchains import TOOLCHAIN_CLASSES @@ -517,8 +517,7 @@ def build_project(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources - resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) - print(resources) + resources = Resources().scan_with_toolchain(src_paths, toolchain, inc_dirs) # Change linker script if specified if linker_script is not None: diff --git a/tools/export/__init__.py b/tools/export/__init__.py index f0b46c45ce2..80efa6d0d4c 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -25,8 +25,8 @@ from shutil import rmtree, copyfile import zipfile -from ..build_api import prepare_toolchain, scan_resources -from ..toolchains import Resources +from ..resources import Resources +from ..build_api import prepare_toolchain from ..targets import TARGET_NAMES from . import (lpcxpresso, ds5_5, iar, makefile, embitz, coide, kds, simplicity, atmelstudio, mcuxpresso, sw4stm32, e2studio, zip, cmsis, uvision, @@ -275,10 +275,13 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None, if name is None: name = basename(normpath(abspath(src_paths[0]))) - resource_dict = {loc: sum((toolchain.scan_resources(p, collect_ignores=True) - for p in path), - Resources()) - for loc, path in src_paths.items()} + resource_dict = {} + for loc, path in src_paths.items(): + res = Resources(collect_ignores=True) + res.add_toolchain_labels(toolchain) + for p in path: + res.add_directory(p, None) + resource_dict[loc] = res resources = Resources() for loc, res in resource_dict.items(): diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 10602bb04cb..d1f30ce3cad 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -129,7 +129,7 @@ def closure(v=v): self.lazy = new_lazy self.eager = {} -class Resources: +class Resources(object): def __init__(self, base_path=None, collect_ignores=False): self.base_path = base_path self.collect_ignores = collect_ignores @@ -414,7 +414,6 @@ def add_directory(self, path, base_path, exclude_paths=None): """ if base_path is None: base_path = path - print("%s %s %s" % (path, base_path, exclude_paths)) for root, dirs, files in walk(path, followlinks=True): # Check if folder contains .mbedignore if ".mbedignore" in files: @@ -448,7 +447,6 @@ def add_directory(self, path, base_path, exclude_paths=None): self.is_ignored(join(relpath(root, base_path), d,"")) or # Ignore TESTS dir (d == 'TESTS')): - print("ignoreing %s" % dir_path) self.ignore_dir(dir_path) dirs.remove(d) elif d.startswith('FEATURE_'): @@ -456,7 +454,6 @@ def add_directory(self, path, base_path, exclude_paths=None): # These are dynamically added by the config system if the conditions are matched def closure (dir_path=dir_path, base_path=base_path): return self.add_directory(dir_path, base_path=base_path) - print("lazying %s" % dir_path) self.features.add_lazy(d[8:], closure) self.ignore_dir(dir_path) dirs.remove(d) @@ -464,7 +461,6 @@ def closure (dir_path=dir_path, base_path=base_path): for exclude_path in exclude_paths: rel_path = relpath(dir_path, exclude_path) if not (rel_path.startswith('..')): - print("excluding %s" % dir_path) self.ignore_dir(dir_path) dirs.remove(d) break @@ -537,41 +533,44 @@ def _add_file(self, file_path, base_path, exclude_paths=None): self.json_files.append(file_path) -def scan_resources(src_paths, toolchain, dependencies_paths=None, - inc_dirs=None, base_path=None, collect_ignores=False): - """ Scan resources using initialized toolcain - - Positional arguments - src_paths - the paths to source directories - toolchain - valid toolchain object - dependencies_paths - dependency paths that we should scan for include dirs - inc_dirs - additional include directories which should be added to - the scanner resources - """ - - resources = Resources(base_path, collect_ignores) - resources.add_toolchain_labels(toolchain) - for path in src_paths: - resources.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) - - # Scan dependency paths for include dirs - if dependencies_paths is not None: - for path in dependencies_paths: - lib_resources = toolchain.scan_resources(path) - resources.inc_dirs.extend(lib_resources.inc_dirs) - - # Add additional include directories if passed - if inc_dirs: - if isinstance(inc_dirs, list): - resources.inc_dirs.extend(inc_dirs) - else: - resources.inc_dirs.append(inc_dirs) + def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, + inc_dirs=None, base_path=None, exclude=True): + """ Scan resources using initialized toolcain - # Load resources into the config system which might expand/modify resources - # based on config data - toolchain.config.load_resources(resources) + Positional arguments + src_paths - the paths to source directories + toolchain - valid toolchain object + dependencies_paths - dependency paths that we should scan for include dirs + inc_dirs - additional include directories which should be added to + the scanner resources + """ - # Set the toolchain's configuration data - toolchain.set_config_data(toolchain.config.get_config_data()) + self.add_toolchain_labels(toolchain) + for path in src_paths: + if exclude: + self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) + else: + self.add_directory(path, base_path) + + # Scan dependency paths for include dirs + if dependencies_paths is not None: + for path in dependencies_paths: + lib_self = self.__class__(self.base_path, self.collect_ignores)\ + .scan_with_toolchain([path], toolchain) + self.inc_dirs.extend(lib_self.inc_dirs) + + # Add additional include directories if passed + if inc_dirs: + if isinstance(inc_dirs, list): + self.inc_dirs.extend(inc_dirs) + else: + self.inc_dirs.append(inc_dirs) + + # Load self into the config system which might expand/modify self + # based on config data + toolchain.config.load_resources(self) + + # Set the toolchain's configuration data + toolchain.set_config_data(toolchain.config.get_config_data()) - return resources + return self From 87146cd66af216d4dfe9b15e4c82fe58d224d4c8 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 24 Apr 2018 14:47:33 -0500 Subject: [PATCH 03/44] Cleanup and reduced scanning --- tools/build_api.py | 2 +- tools/resources/__init__.py | 28 ++++++++++++++++++---------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 267b7e43963..74c4e9a3dbd 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -517,7 +517,7 @@ def build_project(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources - resources = Resources().scan_with_toolchain(src_paths, toolchain, inc_dirs) + resources = Resources().scan_with_toolchain(src_paths, toolchain, inc_dirs=inc_dirs) # Change linker script if specified if linker_script is not None: diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index d1f30ce3cad..ce1ac42c7de 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -37,7 +37,8 @@ from copy import copy from itertools import chain from os import walk -from os.path import (join, splitext, dirname, relpath, basename, split, normcase) +from os.path import (join, splitext, dirname, relpath, basename, split, normcase, + abspath, exists) from ..toolchains import TOOLCHAINS @@ -51,8 +52,10 @@ 'ARMC6' ]) LEGACY_TOOLCHAIN_NAMES = { - 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM', - 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR', + 'ARM_STD':'ARM', + 'ARM_MICRO': 'uARM', + 'GCC_ARM': 'GCC_ARM', + 'GCC_CR': 'GCC_CR', 'IAR': 'IAR', 'ARMC6': 'ARMC6', } @@ -368,6 +371,8 @@ def _add_labels(self, prefix, labels): def add_toolchain_labels(self, toolchain): for prefix, value in toolchain.get_labels().items(): self._add_labels(prefix, value) + self.legacy_ignore_dirs -= set( + [toolchain.target.name, LEGACY_TOOLCHAIN_NAMES[toolchain.name]]) def get_labels(self): """ @@ -544,19 +549,22 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, inc_dirs - additional include directories which should be added to the scanner resources """ - + print(src_paths) self.add_toolchain_labels(toolchain) for path in src_paths: - if exclude: - self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) - else: - self.add_directory(path, base_path) + if exists(path): + toolchain.progress("scan", abspath(path)) + if exclude: + self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) + else: + self.add_directory(path, base_path) # Scan dependency paths for include dirs if dependencies_paths is not None: - for path in dependencies_paths: + toolchain.progress("dep", dependencies_paths) + for dep in dependencies_paths: lib_self = self.__class__(self.base_path, self.collect_ignores)\ - .scan_with_toolchain([path], toolchain) + .scan_with_toolchain([dep], toolchain) self.inc_dirs.extend(lib_self.inc_dirs) # Add additional include directories if passed From 3e6e9e8c2676619457b9a6c9e873d58a5fa1889a Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 24 Apr 2018 15:01:14 -0500 Subject: [PATCH 04/44] Remove unused git hook --- tools/git_hooks/find_duplicates.py | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100755 tools/git_hooks/find_duplicates.py diff --git a/tools/git_hooks/find_duplicates.py b/tools/git_hooks/find_duplicates.py deleted file mode 100755 index 40531994e2a..00000000000 --- a/tools/git_hooks/find_duplicates.py +++ /dev/null @@ -1,26 +0,0 @@ -from os import walk -from os.path import join, abspath, dirname, basename, splitext -import sys - -ROOT = abspath(join(dirname(__file__), "..", "..")) -sys.path.insert(0, ROOT) - -from tools.toolchains.gcc import GCC_ARM -from tools.targets import TARGET_MAP -from argparse import ArgumentParser - -if __name__ == "__main__": - parser = ArgumentParser("Find duplicate file names within a directory structure") - parser.add_argument("dirs", help="Directories to search for duplicate file names" - , nargs="*") - parser.add_argument("--silent", help="Supress printing of filenames, just return number of duplicates", action="store_true") - args = parser.parse_args() - - toolchain = GCC_ARM(TARGET_MAP["K64F"]) - - resources = sum([toolchain.scan_resources(d) for d in args.dirs], None) - - scanned_files = {} - - exit(resources.detect_duplicates(toolchain)) - From 7c4230285488feb63c51b6b2b1057545a82dc557 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 24 Apr 2018 15:01:24 -0500 Subject: [PATCH 05/44] Update test builder to use new resources --- tools/build_api.py | 9 +++------ tools/resources/__init__.py | 6 +++++- tools/test_api.py | 12 +++++------- 3 files changed, 13 insertions(+), 14 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 74c4e9a3dbd..a9a3f570ed0 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -137,8 +137,7 @@ def get_config(src_paths, target, toolchain_name, app_config=None): toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, app_config=app_config) - # Scan src_path for config files - scan_resources(src_paths, toolchain) + res = Resources().scan_with_toolchain(src_paths, toolchain, exclude=False) if toolchain.config.has_regions: _ = list(toolchain.config.regions) @@ -676,10 +675,8 @@ def build_library(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources - resources = scan_resources(src_paths, toolchain, - dependencies_paths=dependencies_paths, - inc_dirs=inc_dirs) - + resources = Resources().scan_with_toolchain( + src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs) # Copy headers, objects and static libraries - all files needed for # static lib diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index ce1ac42c7de..5db02c505a3 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -168,7 +168,11 @@ def __init__(self, base_path=None, collect_ignores=False): self.features = LazyDict() self.ignored_dirs = [] - self.labels = {} + self.labels = { + "TARGET": [], + "TOOLCHAIN": [], + "FEATURE": [] + } # Pre-mbed 2.0 ignore dirs self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) diff --git a/tools/test_api.py b/tools/test_api.py index 5b569f2f379..fe5053f55da 100644 --- a/tools/test_api.py +++ b/tools/test_api.py @@ -65,8 +65,8 @@ from tools.build_api import create_result from tools.build_api import add_result_to_report from tools.build_api import prepare_toolchain -from tools.build_api import scan_resources from tools.build_api import get_config +from tools.resources import Resources from tools.libraries import LIBRARIES, LIBRARY_MAP from tools.options import extract_profile from tools.toolchains import TOOLCHAIN_PATHS @@ -2082,12 +2082,9 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): # List of common folders: (predicate function, path) tuple commons = [] - # Prepare the toolchain - toolchain = prepare_toolchain([base_dir], None, target_name, toolchain_name, - app_config=app_config) - # Scan the directory for paths to probe for 'TESTS' folders - base_resources = scan_resources([base_dir], toolchain) + base_resources = Resources() + base_resources.add_directory(base_dir, None) dirs = base_resources.inc_dirs for directory in dirs: @@ -2096,7 +2093,8 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): # If the directory contains a subdirectory called 'TESTS', scan it for test cases if 'TESTS' in subdirs: walk_base_dir = join(directory, 'TESTS') - test_resources = toolchain.scan_resources(walk_base_dir, base_path=base_dir) + test_resources = Resources() + test_resources.add_directory(walk_base_dir, base_dir) # Loop through all subdirectories for d in test_resources.inc_dirs: From 4c7cf21feb26cc13d1bfe0c20107008636a5e95f Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 4 May 2018 13:06:49 -0500 Subject: [PATCH 06/44] Require notifier in resources --- tools/build_api.py | 8 +++++--- tools/resources/__init__.py | 6 +++--- tools/test_api.py | 4 ++-- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index a9a3f570ed0..31944e910f9 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -43,6 +43,7 @@ MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) from .resources import Resources +from .notifier.mock import MockNotifier from .targets import TARGET_NAMES, TARGET_MAP from .libraries import Library from .toolchains import TOOLCHAIN_CLASSES @@ -137,7 +138,7 @@ def get_config(src_paths, target, toolchain_name, app_config=None): toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, app_config=app_config) - res = Resources().scan_with_toolchain(src_paths, toolchain, exclude=False) + res = Resources(MockNotifier()).scan_with_toolchain(src_paths, toolchain, exclude=False) if toolchain.config.has_regions: _ = list(toolchain.config.regions) @@ -516,7 +517,8 @@ def build_project(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources - resources = Resources().scan_with_toolchain(src_paths, toolchain, inc_dirs=inc_dirs) + resources = Resources(notify).scan_with_toolchain( + src_paths, toolchain, inc_dirs=inc_dirs) # Change linker script if specified if linker_script is not None: @@ -675,7 +677,7 @@ def build_library(src_paths, build_path, target, toolchain_name, try: # Call unified scan_resources - resources = Resources().scan_with_toolchain( + resources = Resources(notify).scan_with_toolchain( src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs) # Copy headers, objects and static libraries - all files needed for diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 5db02c505a3..b29a2aa1a97 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -133,7 +133,8 @@ def closure(v=v): self.eager = {} class Resources(object): - def __init__(self, base_path=None, collect_ignores=False): + def __init__(self, notify, base_path=None, collect_ignores=False): + self.notify = notify self.base_path = base_path self.collect_ignores = collect_ignores @@ -421,6 +422,7 @@ def add_directory(self, path, base_path, exclude_paths=None): bottom-up mode the directories in dirnames are generated before dirpath itself is generated. """ + self.notify.progress("scan", abspath(path)) if base_path is None: base_path = path for root, dirs, files in walk(path, followlinks=True): @@ -553,11 +555,9 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, inc_dirs - additional include directories which should be added to the scanner resources """ - print(src_paths) self.add_toolchain_labels(toolchain) for path in src_paths: if exists(path): - toolchain.progress("scan", abspath(path)) if exclude: self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) else: diff --git a/tools/test_api.py b/tools/test_api.py index fe5053f55da..123c07daab7 100644 --- a/tools/test_api.py +++ b/tools/test_api.py @@ -2083,7 +2083,7 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): commons = [] # Scan the directory for paths to probe for 'TESTS' folders - base_resources = Resources() + base_resources = Resources(MockNotifier()) base_resources.add_directory(base_dir, None) dirs = base_resources.inc_dirs @@ -2093,7 +2093,7 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): # If the directory contains a subdirectory called 'TESTS', scan it for test cases if 'TESTS' in subdirs: walk_base_dir = join(directory, 'TESTS') - test_resources = Resources() + test_resources = Resources(MockNotifier()) test_resources.add_directory(walk_base_dir, base_dir) # Loop through all subdirectories From 361fc65f08536e54d0269e10a36992e44f1cf6a5 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 4 May 2018 13:40:40 -0500 Subject: [PATCH 07/44] Allow missing toolchain parameter to get_config --- tools/build_api.py | 24 ++++++++++++++---------- tools/get_config.py | 4 +--- tools/resources/__init__.py | 15 +++++++++++++++ tools/test_api.py | 2 +- 4 files changed, 31 insertions(+), 14 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 31944e910f9..30ede6c4e80 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -122,7 +122,7 @@ def add_result_to_report(report, result): result_wrap = {0: result} report[target][toolchain][id_name].append(result_wrap) -def get_config(src_paths, target, toolchain_name, app_config=None): +def get_config(src_paths, target, toolchain_name=None, app_config=None): """Get the configuration object for a target-toolchain combination Positional arguments: @@ -134,16 +134,20 @@ def get_config(src_paths, target, toolchain_name, app_config=None): if not isinstance(src_paths, list): src_paths = [src_paths] - # Pass all params to the unified prepare_resources() - toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, - app_config=app_config) - - res = Resources(MockNotifier()).scan_with_toolchain(src_paths, toolchain, exclude=False) - if toolchain.config.has_regions: - _ = list(toolchain.config.regions) + res = Resources(MockNotifier()) + if toolchain_name: + toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, + app_config=app_config) + config = toolchain.config + res.scan_with_toolchain(src_paths, toolchain, exclude=False) + else: + config = Config(target, src_paths, app_config=app_config) + res.scan_with_config(src_paths, config, exclude=False) + if config.has_regions: + _ = list(config.regions) - cfg, macros = toolchain.config.get_config_data() - features = toolchain.config.get_features() + cfg, macros = config.get_config_data() + features = config.get_features() return cfg, macros, features def is_official_target(target_name, version): diff --git a/tools/get_config.py b/tools/get_config.py index 9a3bb0155b2..1cc55c67efb 100644 --- a/tools/get_config.py +++ b/tools/get_config.py @@ -54,9 +54,7 @@ target = extract_mcus(parser, options)[0] # Toolchain - if options.tool is None: - args_error(parser, "argument -t/--toolchain is required") - toolchain = options.tool[0] + toolchain = options.tool[0] if options.tool is not None else None options.prefix = options.prefix or [""] diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index b29a2aa1a97..33884509bcb 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -373,6 +373,9 @@ def _add_labels(self, prefix, labels): self.labels.setdefault(prefix, []) self.labels[prefix].extend(labels) + def add_target_labels(self, target): + self._add_labels("TARGET_", target.labels) + def add_toolchain_labels(self, toolchain): for prefix, value in toolchain.get_labels().items(): self._add_labels(prefix, value) @@ -586,3 +589,15 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, toolchain.set_config_data(toolchain.config.get_config_data()) return self + + def scan_with_config(self, src_paths, config, exclude=True, base_path=None): + if config.target: + self.add_target_labels(config.target) + for path in src_paths: + if exists(path): + if exclude: + self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) + else: + self.add_directory(path, base_path) + config.load_resources(self) + return self diff --git a/tools/test_api.py b/tools/test_api.py index 123c07daab7..e2f6bfa2558 100644 --- a/tools/test_api.py +++ b/tools/test_api.py @@ -2228,7 +2228,7 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name, else: target_name = target target = TARGET_MAP[target_name] - cfg, _, _ = get_config(base_source_paths, target_name, toolchain_name, app_config=app_config) + cfg, _, _ = get_config(base_source_paths, target, app_config=app_config) baud_rate = 9600 if 'platform.stdio-baud-rate' in cfg: From 0ad5df379c9f484ee6558270c9735e118089a2ad Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 6 Jun 2018 15:34:44 -0500 Subject: [PATCH 08/44] Create fully incremental scans without lazy dicts --- tools/resources/__init__.py | 128 ++++++------------------------------ 1 file changed, 20 insertions(+), 108 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 33884509bcb..454d3d3e5d3 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -60,83 +60,13 @@ 'ARMC6': 'ARMC6', } -class LazyDict(object): - def __init__(self): - self.eager = {} - self.lazy = {} - - def add_lazy(self, key, thunk): - if key in self.eager: - del self.eager[key] - self.lazy[key] = thunk - - def __getitem__(self, key): - if (key not in self.eager - and key in self.lazy): - self.eager[key] = self.lazy[key]() - del self.lazy[key] - return self.eager[key] - - def __setitem__(self, key, value): - self.eager[key] = value - - def __delitem__(self, key): - if key in self.eager: - del self.eager[key] - else: - del self.lazy[key] - - def __contains__(self, key): - return key in self.eager or key in self.lazy - - def __iter__(self): - return chain(iter(self.eager), iter(self.lazy)) - - def __len__(self): - return len(self.eager) + len(self.lazy) - - def __str__(self): - return "Lazy{%s}" % ( - ", ".join("%r: %r" % (k, v) for k, v in - chain(self.eager.items(), ((k, "not evaluated") - for k in self.lazy)))) - - def update(self, other): - if isinstance(other, LazyDict): - self.eager.update(other.eager) - self.lazy.update(other.lazy) - else: - self.eager.update(other) - - def items(self): - """Warning: This forces the evaluation all of the items in this LazyDict - that are iterated over.""" - for k, v in self.eager.items(): - yield k, v - for k in self.lazy.keys(): - yield k, self[k] - - def apply(self, fn): - """Delay the application of a computation to all items of the lazy dict. - Does no computation now. Instead the comuptation is performed when a - consumer attempts to access a value in this LazyDict""" - new_lazy = {} - for k, f in self.lazy.items(): - def closure(f=f): - return fn(f()) - new_lazy[k] = closure - for k, v in self.eager.items(): - def closure(v=v): - return fn(v) - new_lazy[k] = closure - self.lazy = new_lazy - self.eager = {} class Resources(object): def __init__(self, notify, base_path=None, collect_ignores=False): self.notify = notify self.base_path = base_path self.collect_ignores = collect_ignores + self._label_paths = [] self.file_basepath = {} @@ -165,8 +95,6 @@ def __init__(self, notify, base_path=None, collect_ignores=False): self.bin_files = [] self.json_files = [] - # Features - self.features = LazyDict() self.ignored_dirs = [] self.labels = { @@ -226,8 +154,8 @@ def add(self, resources): self.bin_files += resources.bin_files self.json_files += resources.json_files - self.features.update(resources.features) self.ignored_dirs += resources.ignored_dirs + self._label_paths += resources._label_paths return self @@ -271,7 +199,6 @@ def subtract_basepath(self, export_path, loc=""): def closure(res, export_path=export_path, loc=loc): res.subtract_basepath(export_path, loc) return res - self.features.apply(closure) def _collect_duplicates(self, dupe_dict, dupe_headers): for filename in self.s_sources + self.c_sources + self.cpp_sources: @@ -282,8 +209,6 @@ def _collect_duplicates(self, dupe_dict, dupe_headers): headername = basename(filename) dupe_headers.setdefault(headername, set()) dupe_headers[headername] |= set([headername]) - for res in self.features.values(): - res._collect_duplicates(dupe_dict, dupe_headers) return dupe_dict, dupe_headers def detect_duplicates(self, toolchain): @@ -318,10 +243,6 @@ def relative_to(self, base, dot=False): v = [rel_path(f, base, dot) for f in getattr(self, field)] setattr(self, field, v) - def to_apply(feature, base=base, dot=dot): - feature.relative_to(base, dot) - self.features.apply(to_apply) - if self.linker_script is not None: self.linker_script = rel_path(self.linker_script, base, dot) @@ -333,10 +254,6 @@ def win_to_unix(self): v = [f.replace('\\', '/') for f in getattr(self, field)] setattr(self, field, v) - def to_apply(feature): - feature.win_to_unix() - self.features.apply(to_apply) - if self.linker_script is not None: self.linker_script = self.linker_script.replace('\\', '/') @@ -357,8 +274,6 @@ def __str__(self): ('Hex files', self.hex_files), ('Bin files', self.bin_files), - - ('Features', self.features), ): if resources: s.append('%s:\n ' % label + '\n '.join(resources)) @@ -372,6 +287,13 @@ def __str__(self): def _add_labels(self, prefix, labels): self.labels.setdefault(prefix, []) self.labels[prefix].extend(labels) + prefixed_labels = set("%s_%s" % (prefix, label) for label in labels) + for path, base_path in self._label_paths: + if basename(path) in prefixed_labels: + self.add_directory(path, base_path) + self._label_paths = [(p, b) for p, b in self._label_paths + if basename(p) not in prefixed_labels] + def add_target_labels(self, target): self._add_labels("TARGET_", target.labels) @@ -408,8 +330,7 @@ def add_ignore_patterns(self, root, base_path, patterns): def add_features(self, features): - for feat in features: - self.features[feat] + self._add_labels("FEATURE", features) # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file @@ -448,27 +369,17 @@ def add_directory(self, path, base_path, exclude_paths=None): for d in copy(dirs): dir_path = join(root, d) - # Add internal repo folders/files. This is needed for exporters if d == '.hg' or d == '.git': self.repo_dirs.append(dir_path) - - if ((d.startswith('.') or d in self.legacy_ignore_dirs) or - # Ignore targets that do not match the TARGET in extra_labels list - (d.startswith('TARGET_') and d[7:] not in self.labels['TARGET']) or - # Ignore toolchain that do not match the current TOOLCHAIN + if ((d.startswith('TARGET_') and d[7:] not in self.labels['TARGET']) or (d.startswith('TOOLCHAIN_') and d[10:] not in self.labels['TOOLCHAIN']) or - # Ignore .mbedignore files - self.is_ignored(join(relpath(root, base_path), d,"")) or - # Ignore TESTS dir - (d == 'TESTS')): - self.ignore_dir(dir_path) - dirs.remove(d) - elif d.startswith('FEATURE_'): - # Recursively scan features but ignore them in the current scan. - # These are dynamically added by the config system if the conditions are matched - def closure (dir_path=dir_path, base_path=base_path): - return self.add_directory(dir_path, base_path=base_path) - self.features.add_lazy(d[8:], closure) + (d.startswith('FEATURE_') and d[8:] not in self.labels['FEATURE'])): + self._label_paths.append((dir_path, base_path)) + self.ignore_dir(dir_path) + dirs.remove(d) + elif ((d.startswith('.') or d in self.legacy_ignore_dirs) or + self.is_ignored(join(relpath(root, base_path), d,"")) or + (d == 'TESTS')): self.ignore_dir(dir_path) dirs.remove(d) elif exclude_paths: @@ -523,7 +434,8 @@ def _add_file(self, file_path, base_path, exclude_paths=None): elif ext in ('.sct', '.icf', '.ld'): if self.linker_script is not None: self.notify.info("Warning: Multiple linker scripts detected: %s and %s" % (self.linker_script, file_path)) - self.linker_script = file_path + else: + self.linker_script = file_path elif ext == '.lib': self.lib_refs.append(file_path) From 8dab7417aebb14b8f3b73ba679a2882e525066ea Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 10:45:53 -0500 Subject: [PATCH 09/44] Cleanup comments and code near add_directory --- tools/resources/__init__.py | 54 ++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 454d3d3e5d3..76321029f8b 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -14,10 +14,10 @@ # limitations under the License. """ -The scanning rules and Resources object. +# The scanning rules and Resources object. A project in Mbed OS contains metadata in the file system as directory names. -These directory names adhere to a set of rules referred to as canning rules. +These directory names adhere to a set of rules referred to as scanning rules. The following are the English version of the scanning rules: Directory names starting with "TEST_", "TARGET_", "TOOLCHAIN_" and "FEATURE_" @@ -294,9 +294,11 @@ def _add_labels(self, prefix, labels): self._label_paths = [(p, b) for p, b in self._label_paths if basename(p) not in prefixed_labels] - def add_target_labels(self, target): - self._add_labels("TARGET_", target.labels) + self._add_labels("TARGET", target.labels) + + def add_features(self, features): + self._add_labels("FEATURE", features) def add_toolchain_labels(self, toolchain): for prefix, value in toolchain.get_labels().items(): @@ -328,39 +330,37 @@ def add_ignore_patterns(self, root, base_path, patterns): if self.ignore_patterns: self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) + def _not_current_label(self, dirname, label_type): + return (dirname.startswith(label_type + "_") and + dirname[len(label_type) + 1:] not in self.labels[label_type]) - def add_features(self, features): - self._add_labels("FEATURE", features) + def add_directory(self, path, base_path=None, exclude_paths=None): + """ Scan a directory and include its resources in this resources obejct - # A helper function for scan_resources. _add_dir traverses *path* (assumed to be a - # directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file - # on every file it considers adding to the resources object. - def add_directory(self, path, base_path, exclude_paths=None): - """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) - When topdown is True, the caller can modify the dirnames list in-place - (perhaps using del or slice assignment), and walk() will only recurse into - the subdirectories whose names remain in dirnames; this can be used to prune - the search, impose a specific order of visiting, or even to inform walk() - about directories the caller creates or renames before it resumes walk() - again. Modifying dirnames when topdown is False is ineffective, because in - bottom-up mode the directories in dirnames are generated before dirpath - itself is generated. + Positional arguments: + path - the path to search for resources + + Keyword arguments + base_path - If this is part of an incremental scan, include the origin + directory root of the scan here + exclude_paths - A list of paths that are to be excluded from a build """ self.notify.progress("scan", abspath(path)) + if base_path is None: base_path = path + if self.collect_ignores and path in self.ignored_dirs: + self.ignored_dirs.remove(path) + for root, dirs, files in walk(path, followlinks=True): # Check if folder contains .mbedignore if ".mbedignore" in files: with open (join(root,".mbedignore"), "r") as f: lines=f.readlines() - lines = [l.strip() for l in lines] # Strip whitespaces - lines = [l for l in lines if l != ""] # Strip empty lines - lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines - # Append root path to glob patterns and append patterns to ignore_patterns + lines = [l.strip() for l in lines + if l.strip() != "" and not l.startswith("#")] self.add_ignore_patterns(root, base_path, lines) - # Skip the whole folder if ignored, e.g. .mbedignore containing '*' root_path =join(relpath(root, base_path)) if self.is_ignored(join(root_path,"")): self.ignore_dir(root_path) @@ -371,9 +371,9 @@ def add_directory(self, path, base_path, exclude_paths=None): dir_path = join(root, d) if d == '.hg' or d == '.git': self.repo_dirs.append(dir_path) - if ((d.startswith('TARGET_') and d[7:] not in self.labels['TARGET']) or - (d.startswith('TOOLCHAIN_') and d[10:] not in self.labels['TOOLCHAIN']) or - (d.startswith('FEATURE_') and d[8:] not in self.labels['FEATURE'])): + + if (any(self._not_current_label(d, t) for t + in ['TARGET', 'TOOLCHAIN', 'FEATURE'])): self._label_paths.append((dir_path, base_path)) self.ignore_dir(dir_path) dirs.remove(d) From 786c2289582edaa816643ab45e9cccc02f762331 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 10:47:36 -0500 Subject: [PATCH 10/44] Cleanup legacy target+toolchain consts --- tools/resources/__init__.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 76321029f8b..c1e5bea0097 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -46,10 +46,28 @@ # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but # had the knowledge of a list of these directories to be ignored. LEGACY_IGNORE_DIRS = set([ - 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z', - 'ARM', 'uARM', 'IAR', - 'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB', - 'ARMC6' + # Legacy Targets + 'LPC11U24', + 'LPC1768', + 'LPC2368', + 'LPC4088', + 'LPC812', + 'KL25Z', + + # Legacy Toolchains + 'ARM', + 'uARM', + 'IAR', + 'GCC_ARM', + 'GCC_CS', + 'GCC_CR', + 'GCC_CW', + 'GCC_CW_EWL', + 'GCC_CW_NEWLIB', + 'ARMC6', + + # Tests, here for simplicity + 'TESTS', ]) LEGACY_TOOLCHAIN_NAMES = { 'ARM_STD':'ARM', @@ -378,8 +396,7 @@ def add_directory(self, path, base_path=None, exclude_paths=None): self.ignore_dir(dir_path) dirs.remove(d) elif ((d.startswith('.') or d in self.legacy_ignore_dirs) or - self.is_ignored(join(relpath(root, base_path), d,"")) or - (d == 'TESTS')): + self.is_ignored(join(relpath(root, base_path), d,""))): self.ignore_dir(dir_path) dirs.remove(d) elif exclude_paths: From 226082999a2b44adc449053511a8bf50469c1a5b Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 10:56:10 -0500 Subject: [PATCH 11/44] Remove base_path argument from several methods --- tools/resources/__init__.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index c1e5bea0097..d53a411e459 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -477,23 +477,26 @@ def _add_file(self, file_path, base_path, exclude_paths=None): def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, - inc_dirs=None, base_path=None, exclude=True): + inc_dirs=None, exclude=True): """ Scan resources using initialized toolcain Positional arguments src_paths - the paths to source directories toolchain - valid toolchain object + + Keyword arguments dependencies_paths - dependency paths that we should scan for include dirs inc_dirs - additional include directories which should be added to - the scanner resources + the scanner resources + exclude - Exclude the toolchain's build directory from the resources """ self.add_toolchain_labels(toolchain) for path in src_paths: if exists(path): if exclude: - self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) + self.add_directory(path, exclude_paths=[toolchain.build_dir]) else: - self.add_directory(path, base_path) + self.add_directory(path) # Scan dependency paths for include dirs if dependencies_paths is not None: @@ -519,14 +522,11 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, return self - def scan_with_config(self, src_paths, config, exclude=True, base_path=None): + def scan_with_config(self, src_paths, config): if config.target: self.add_target_labels(config.target) for path in src_paths: if exists(path): - if exclude: - self.add_directory(path, base_path, exclude_paths=[toolchain.build_dir]) - else: - self.add_directory(path, base_path) + self.add_directory(path) config.load_resources(self) return self From 73e890fdc6cf695a8df3917b92988c2a7d334c13 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 11:04:51 -0500 Subject: [PATCH 12/44] Simplify exclusion logic --- tools/resources/__init__.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index d53a411e459..0fddf6fefeb 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -369,6 +369,9 @@ def add_directory(self, path, base_path=None, exclude_paths=None): base_path = path if self.collect_ignores and path in self.ignored_dirs: self.ignored_dirs.remove(path) + if exclude_paths: + self.add_ignore_patterns( + path, base_path, [join(e, "*") for e in exclude_paths]) for root, dirs, files in walk(path, followlinks=True): # Check if folder contains .mbedignore @@ -395,17 +398,10 @@ def add_directory(self, path, base_path=None, exclude_paths=None): self._label_paths.append((dir_path, base_path)) self.ignore_dir(dir_path) dirs.remove(d) - elif ((d.startswith('.') or d in self.legacy_ignore_dirs) or - self.is_ignored(join(relpath(root, base_path), d,""))): + elif (d.startswith('.') or d in self.legacy_ignore_dirs or + self.is_ignored(join(root_path, d, ""))): self.ignore_dir(dir_path) dirs.remove(d) - elif exclude_paths: - for exclude_path in exclude_paths: - rel_path = relpath(dir_path, exclude_path) - if not (rel_path.startswith('..')): - self.ignore_dir(dir_path) - dirs.remove(d) - break # Add root to include paths root = root.rstrip("/") @@ -416,9 +412,10 @@ def add_directory(self, path, base_path=None, exclude_paths=None): file_path = join(root, file) self._add_file(file_path, base_path) - # A helper function for both scan_resources and _add_dir. _add_file adds one file - # (*file_path*) to the resources object based on the file type. - def _add_file(self, file_path, base_path, exclude_paths=None): + def _add_file(self, file_path, base_path): + """ Add a single file into the resources object that was found by + scanning starting as base_path + """ if (self.is_ignored(relpath(file_path, base_path)) or basename(file_path).startswith(".")): From 9d8c925f7620853a7dc922c53d83d55511132653 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 11:13:59 -0500 Subject: [PATCH 13/44] Remove unused function --- tools/resources/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 0fddf6fefeb..517af68dabb 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -324,10 +324,6 @@ def add_toolchain_labels(self, toolchain): self.legacy_ignore_dirs -= set( [toolchain.target.name, LEGACY_TOOLCHAIN_NAMES[toolchain.name]]) - def get_labels(self): - """ - """ - def is_ignored(self, file_path): """Check if file path is ignored by any .mbedignore thus far""" return self._ignore_regex.match(normcase(file_path)) From 1f6862b41fe668ab208f58dd956781142754b1b6 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 11:17:36 -0500 Subject: [PATCH 14/44] Remove needless None-passing It's now the default --- tools/export/__init__.py | 2 +- tools/test_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/export/__init__.py b/tools/export/__init__.py index 80efa6d0d4c..b395e88f396 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -280,7 +280,7 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None, res = Resources(collect_ignores=True) res.add_toolchain_labels(toolchain) for p in path: - res.add_directory(p, None) + res.add_directory(p) resource_dict[loc] = res resources = Resources() diff --git a/tools/test_api.py b/tools/test_api.py index e2f6bfa2558..5ae7d0fdc8a 100644 --- a/tools/test_api.py +++ b/tools/test_api.py @@ -2084,7 +2084,7 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): # Scan the directory for paths to probe for 'TESTS' folders base_resources = Resources(MockNotifier()) - base_resources.add_directory(base_dir, None) + base_resources.add_directory(base_dir) dirs = base_resources.inc_dirs for directory in dirs: From 9326386dafac830298f321cf97362d80931a5eba Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 12:57:28 -0500 Subject: [PATCH 15/44] Switch to Resources in build_api and tests --- tools/build_api.py | 62 +++++++------------------- tools/resources/__init__.py | 2 +- tools/test/build_api/build_api_test.py | 25 ++++++----- 3 files changed, 30 insertions(+), 59 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 30ede6c4e80..4f70409e234 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -142,7 +142,7 @@ def get_config(src_paths, target, toolchain_name=None, app_config=None): res.scan_with_toolchain(src_paths, toolchain, exclude=False) else: config = Config(target, src_paths, app_config=app_config) - res.scan_with_config(src_paths, config, exclude=False) + res.scan_with_config(src_paths, config) if config.has_regions: _ = list(config.regions) @@ -520,7 +520,6 @@ def build_project(src_paths, build_path, target, toolchain_name, vendor_label) try: - # Call unified scan_resources resources = Resources(notify).scan_with_toolchain( src_paths, toolchain, inc_dirs=inc_dirs) @@ -680,7 +679,6 @@ def build_library(src_paths, build_path, target, toolchain_name, raise Exception(error_msg) try: - # Call unified scan_resources resources = Resources(notify).scan_with_toolchain( src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs) @@ -783,7 +781,6 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None, build_path = lib.build_dir dependencies_paths = lib.dependencies inc_dirs = lib.inc_dirs - inc_dirs_ext = lib.inc_dirs_ext if not isinstance(src_paths, list): src_paths = [src_paths] @@ -791,7 +788,7 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None, # The first path will give the name to the library name = basename(src_paths[0]) - if report != None: + if report is not None: start = time() id_name = name.upper() description = name @@ -831,48 +828,22 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None, ignore=ignore) notify.info("Building library %s (%s, %s)" % - (name.upper(), target.name, toolchain_name)) + (name.upper(), target.name, toolchain_name)) # Take into account the library configuration (MBED_CONFIG_FILE) config = toolchain.config config.add_config_files([MBED_CONFIG_FILE]) # Scan Resources - resources = [] - for src_path in src_paths: - resources.append(toolchain.scan_resources(src_path)) - - # Add extra include directories / files which are required by library - # This files usually are not in the same directory as source files so - # previous scan will not include them - if inc_dirs_ext is not None: - for inc_ext in inc_dirs_ext: - resources.append(toolchain.scan_resources(inc_ext)) - - # Dependencies Include Paths - dependencies_include_dir = [] - if dependencies_paths is not None: - for path in dependencies_paths: - lib_resources = toolchain.scan_resources(path) - dependencies_include_dir.extend(lib_resources.inc_dirs) - dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) - - if inc_dirs: - dependencies_include_dir.extend(inc_dirs) - - # Add other discovered configuration data to the configuration object - for res in resources: - config.load_resources(res) - toolchain.set_config_data(toolchain.config.get_config_data()) - + resources = Resources(notify).scan_with_toolchain( + src_paths + (lib.inc_dirs_ext or []), toolchain, + inc_dirs=inc_dirs, dependencies_paths=dependencies_paths) # Copy Headers - for resource in resources: - toolchain.copy_files(resource.headers, build_path, - resources=resource) + toolchain.copy_files(resources.headers, build_path, + resources=resources) - dependencies_include_dir.extend( - toolchain.scan_resources(build_path).inc_dirs) + dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs # Compile Sources objects = [] @@ -973,7 +944,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, # mbed notify.info("Building library %s (%s, %s)" % - ('MBED', target.name, toolchain_name)) + ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) @@ -982,16 +953,16 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), (MBED_HAL, MBED_LIBRARIES_HAL)]: - resources = toolchain.scan_resources(dir) + resources = Resources(notify).scan_with_toolchain([dir], toolchain) toolchain.copy_files(resources.headers, dest) library_incdirs.append(dest) - cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH) + cmsis_implementation = Resources(notify).scan_with_toolchain([MBED_CMSIS_PATH], toolchain) toolchain.copy_files(cmsis_implementation.headers, build_target) toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain) toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain) - hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH) + hal_implementation = Resources(notify).scan_with_toolchain([MBED_TARGETS_PATH], toolchain) toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries + @@ -999,15 +970,14 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, build_target, resources=hal_implementation) toolchain.copy_files(hal_implementation.linker_script, build_toolchain) toolchain.copy_files(hal_implementation.bin_files, build_toolchain) - incdirs = toolchain.scan_resources(build_target).inc_dirs + incdirs = Resources(notify).scan_with_toolchain([build_target], toolchain).inc_dirs objects = toolchain.compile_sources(cmsis_implementation + hal_implementation, library_incdirs + incdirs + [tmp_path]) toolchain.copy_files(objects, build_toolchain) # Common Sources - mbed_resources = None - for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: - mbed_resources += toolchain.scan_resources(dir) + mbed_resources = Resources(notify).scan_with_toolchain( + [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain) objects = toolchain.compile_sources(mbed_resources, library_incdirs + incdirs) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 517af68dabb..79fbf2b9d8d 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -495,7 +495,7 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, if dependencies_paths is not None: toolchain.progress("dep", dependencies_paths) for dep in dependencies_paths: - lib_self = self.__class__(self.base_path, self.collect_ignores)\ + lib_self = self.__class__(self.notify, self.collect_ignores)\ .scan_with_toolchain([dep], toolchain) self.inc_dirs.extend(lib_self.inc_dirs) diff --git a/tools/test/build_api/build_api_test.py b/tools/test/build_api/build_api_test.py index f0093f5e4b1..86ffde8edda 100644 --- a/tools/test/build_api/build_api_test.py +++ b/tools/test/build_api/build_api_test.py @@ -18,8 +18,8 @@ import unittest from collections import namedtuple from mock import patch, MagicMock -from tools.build_api import prepare_toolchain, build_project, build_library,\ - scan_resources +from tools.build_api import prepare_toolchain, build_project, build_library +from tools.resources import Resources from tools.toolchains import TOOLCHAINS from tools.notifier.mock import MockNotifier @@ -65,9 +65,10 @@ def tearDown(self): def test_always_complete_build(self, *_): notify = MockNotifier() toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target, - self.toolchain_name, notify=notify) + self.toolchain_name, notify=notify) - res = scan_resources(self.src_paths, toolchain) + res = Resources(MockNotifier()).scan_with_toolchain( + self.src_paths, toolchain) toolchain.RESPONSE_FILES=False toolchain.config_processed = True @@ -116,7 +117,7 @@ def test_prepare_toolchain_no_app_config(self, mock_config_init): mock_config_init.assert_called_once_with(self.target, self.src_paths, app_config=None) - @patch('tools.build_api.scan_resources') + @patch('tools.build_api.Resources') @patch('tools.build_api.mkdir') @patch('os.path.exists') @patch('tools.build_api.prepare_toolchain') @@ -127,7 +128,7 @@ def test_build_project_app_config(self, mock_prepare_toolchain, mock_exists, _, :param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_exists: mock of function os.path.exists :param _: mock of function mkdir (not tested) - :param __: mock of function scan_resources (not tested) + :param __: mock of class Resources (not tested) :return: """ notify = MockNotifier() @@ -146,7 +147,7 @@ def test_build_project_app_config(self, mock_prepare_toolchain, mock_exists, _, self.assertEqual(args[1]['app_config'], app_config, "prepare_toolchain was called with an incorrect app_config") - @patch('tools.build_api.scan_resources') + @patch('tools.build_api.Resources') @patch('tools.build_api.mkdir') @patch('os.path.exists') @patch('tools.build_api.prepare_toolchain') @@ -157,7 +158,7 @@ def test_build_project_no_app_config(self, mock_prepare_toolchain, mock_exists, :param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_exists: mock of function os.path.exists :param _: mock of function mkdir (not tested) - :param __: mock of function scan_resources (not tested) + :param __: mock of class Resources (not tested) :return: """ notify = MockNotifier() @@ -176,7 +177,7 @@ def test_build_project_no_app_config(self, mock_prepare_toolchain, mock_exists, self.assertEqual(args[1]['app_config'], None, "prepare_toolchain was called with an incorrect app_config") - @patch('tools.build_api.scan_resources') + @patch('tools.build_api.Resources') @patch('tools.build_api.mkdir') @patch('os.path.exists') @patch('tools.build_api.prepare_toolchain') @@ -187,7 +188,7 @@ def test_build_library_app_config(self, mock_prepare_toolchain, mock_exists, _, :param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_exists: mock of function os.path.exists :param _: mock of function mkdir (not tested) - :param __: mock of function scan_resources (not tested) + :param __: mock of class Resources (not tested) :return: """ notify = MockNotifier() @@ -203,7 +204,7 @@ def test_build_library_app_config(self, mock_prepare_toolchain, mock_exists, _, self.assertEqual(args[1]['app_config'], app_config, "prepare_toolchain was called with an incorrect app_config") - @patch('tools.build_api.scan_resources') + @patch('tools.build_api.Resources') @patch('tools.build_api.mkdir') @patch('os.path.exists') @patch('tools.build_api.prepare_toolchain') @@ -214,7 +215,7 @@ def test_build_library_no_app_config(self, mock_prepare_toolchain, mock_exists, :param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_exists: mock of function os.path.exists :param _: mock of function mkdir (not tested) - :param __: mock of function scan_resources (not tested) + :param __: mock of class Resources (not tested) :return: """ notify = MockNotifier() From a57942a813194fb2d88813829eaae6458e120d98 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 12:57:56 -0500 Subject: [PATCH 16/44] Remove test about unused parameter in find_tests --- tools/test/test_api/test_api_test.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/tools/test/test_api/test_api_test.py b/tools/test/test_api/test_api_test.py index 255081cfe43..827472ad311 100644 --- a/tools/test/test_api/test_api_test.py +++ b/tools/test/test_api/test_api_test.py @@ -33,29 +33,3 @@ def setUp(self): self.base_dir = 'base_dir' self.target = "K64F" self.toolchain_name = "ARM" - -@pytest.mark.parametrize("base_dir", ["base_dir"]) -@pytest.mark.parametrize("target", ["K64F"]) -@pytest.mark.parametrize("toolchain_name", ["ARM"]) -@pytest.mark.parametrize("app_config", ["app_config", None]) -def test_find_tests_app_config(base_dir, target, toolchain_name, app_config): - """ - Test find_tests for correct use of app_config - - :param base_dir: dummy value for the test base directory - :param target: the target to "test" for - :param toolchain_name: the toolchain to use for "testing" - :param app_config: Application configuration parameter to find tests - """ - set_targets_json_location() - with patch('tools.test_api.scan_resources') as mock_scan_resources,\ - patch('tools.test_api.prepare_toolchain') as mock_prepare_toolchain: - mock_scan_resources().inc_dirs.return_value = [] - - find_tests(base_dir, target, toolchain_name, app_config=app_config) - - args = mock_prepare_toolchain.call_args - assert 'app_config' in args[1],\ - "prepare_toolchain was not called with app_config" - assert args[1]['app_config'] == app_config,\ - "prepare_toolchain was called with an incorrect app_config" From 6ca218544d7e0e7bf0c94f94a862709ec7627ad3 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 13:27:05 -0500 Subject: [PATCH 17/44] Correct resources usage in tests --- tools/resources/__init__.py | 9 +++------ tools/test/toolchains/api_test.py | 16 ++++++++++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 79fbf2b9d8d..ea9150c7cae 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -229,25 +229,22 @@ def _collect_duplicates(self, dupe_dict, dupe_headers): dupe_headers[headername] |= set([headername]) return dupe_dict, dupe_headers - def detect_duplicates(self, toolchain): + def detect_duplicates(self): """Detect all potential ambiguities in filenames and report them with a toolchain notification - - Positional Arguments: - toolchain - used for notifications """ count = 0 dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict()) for objname, filenames in dupe_dict.items(): if len(filenames) > 1: count+=1 - toolchain.tool_error( + self.notify.tool_error( "Object file %s.o is not unique! It could be made from: %s"\ % (objname, " ".join(filenames))) for headername, locations in dupe_headers.items(): if len(locations) > 1: count+=1 - toolchain.tool_error( + self.notify.tool_error( "Header file %s is not unique! It could be: %s" %\ (headername, " ".join(locations))) return count diff --git a/tools/test/toolchains/api_test.py b/tools/test/toolchains/api_test.py index 6fd9d6ff454..6a8da396efa 100644 --- a/tools/test/toolchains/api_test.py +++ b/tools/test/toolchains/api_test.py @@ -11,9 +11,13 @@ "..")) sys.path.insert(0, ROOT) -from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES,\ - Resources, TOOLCHAIN_PATHS, mbedToolchain -from tools.targets import TARGET_MAP +from tools.toolchains import ( + TOOLCHAIN_CLASSES, + TOOLCHAIN_PATHS, + mbedToolchain, +) +from tools.resources import LEGACY_TOOLCHAIN_NAMES, Resources +from tools.targets import TARGET_MAP, set_targets_json_location from tools.notifier.mock import MockNotifier ALPHABET = [char for char in printable if char not in [u'.', u'/', u'\\']] @@ -111,6 +115,7 @@ def test_toolchain_profile_c(profile, source_file): filename = deepcopy(source_file) filename[-1] += ".c" to_compile = os.path.join(*filename) + set_targets_json_location() with patch('os.mkdir') as _mkdir: for _, tc_class in TOOLCHAIN_CLASSES.items(): toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile, @@ -241,12 +246,11 @@ def test_detect_duplicates(filenames): s_sources = [os.path.join(name, "dupe.s") for name in filenames] cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames] notify = MockNotifier() - toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify) - res = Resources() + res = Resources(notify) res.c_sources = c_sources res.s_sources = s_sources res.cpp_sources = cpp_sources - assert res.detect_duplicates(toolchain) == 1,\ + assert res.detect_duplicates() == 1,\ "Not Enough duplicates found" notification = notify.messages[0] From 06f3fca6cfda04ef1e8d6b6799af09944214c7b3 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 11 Jun 2018 13:40:44 -0500 Subject: [PATCH 18/44] Correct the _only_ detect_duplicates call --- tools/build_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/build_api.py b/tools/build_api.py index 4f70409e234..e43ee6fbd7a 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -561,7 +561,7 @@ def build_project(src_paths, build_path, target, toolchain_name, map_html = join(build_path, name + "_map.html") memap_instance.generate_output('html', stats_depth, map_html) - resources.detect_duplicates(toolchain) + resources.detect_duplicates() if report != None: end = time() From de913e1ea283adead7f2fed2b7271c9abe397d59 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 12 Jun 2018 15:00:56 -0500 Subject: [PATCH 19/44] Improve resources API to include more use cases --- tools/build_api.py | 4 +- tools/config/__init__.py | 22 +- tools/export/__init__.py | 100 ++++---- tools/export/exporters.py | 24 +- tools/export/uvision/__init__.py | 9 +- tools/project.py | 24 +- tools/resources/__init__.py | 379 +++++++++++++++---------------- tools/toolchains/__init__.py | 37 ++- 8 files changed, 295 insertions(+), 304 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index e43ee6fbd7a..809e67f4d11 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -42,7 +42,7 @@ MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) -from .resources import Resources +from .resources import Resources, FileType from .notifier.mock import MockNotifier from .targets import TARGET_NAMES, TARGET_MAP from .libraries import Library @@ -529,7 +529,7 @@ def build_project(src_paths, build_path, target, toolchain_name, # Compile Sources objects = toolchain.compile_sources(resources, resources.inc_dirs) - resources.objects.extend(objects) + resources.add_files_to_type(FileType.OBJECT, objects) # Link Program if toolchain.config.has_regions: diff --git a/tools/config/__init__.py b/tools/config/__init__.py index 5155777f41d..e1ad2be77d9 100644 --- a/tools/config/__init__.py +++ b/tools/config/__init__.py @@ -30,6 +30,7 @@ from jinja2.environment import Environment from jsonschema import Draft4Validator, RefResolver +from ..resources import FileType from ..utils import (json_file_to_dict, intelhex_offset, integer, NotSupportedException) from ..arm_pack_manager import Cache @@ -61,6 +62,14 @@ BOOTLOADER_OVERRIDES = ROM_OVERRIDES | RAM_OVERRIDES +ALLOWED_FEATURES = [ + "UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE", + "NANOSTACK","CRYPTOCELL310", + # Nanostack configurations + "LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL", + "THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER", + "ETHERNET_HOST", +] # Base class for all configuration exceptions class ConfigException(Exception): @@ -396,13 +405,6 @@ class Config(object): __unused_overrides = set(["target.bootloader_img", "target.restrict_size", "target.mbed_app_start", "target.mbed_app_size"]) - # Allowed features in configurations - __allowed_features = [ - "UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE", "NANOSTACK","CRYPTOCELL310", - # Nanostack configurations - "LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL", "THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER", "ETHERNET_HOST" - ] - @classmethod def find_app_config(cls, top_level_dirs): app_config_location = None @@ -1043,7 +1045,7 @@ def get_features(self): .update_target(self.target) for feature in self.target.features: - if feature not in self.__allowed_features: + if feature not in ALLOWED_FEATURES: raise ConfigException( "Feature '%s' is not a supported features" % feature) @@ -1084,7 +1086,9 @@ def load_resources(self, resources): while True: # Add/update the configuration with any .json files found while # scanning - self.add_config_files(resources.json_files) + self.add_config_files( + f.path for f in resources.get_file_refs(FileType.JSON) + ) # Add features while we find new ones features = set(self.get_features()) diff --git a/tools/export/__init__.py b/tools/export/__init__.py index b395e88f396..99609634dd0 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -18,14 +18,15 @@ from __future__ import print_function, division, absolute_import import sys -from os.path import join, abspath, dirname, exists +from os.path import join, abspath, dirname, exists, isfile from os.path import basename, relpath, normpath, splitext from os import makedirs, walk import copy from shutil import rmtree, copyfile import zipfile -from ..resources import Resources +from ..resources import Resources, FileType, FileRef +from ..config import ALLOWED_FEATURES from ..build_api import prepare_toolchain from ..targets import TARGET_NAMES from . import (lpcxpresso, ds5_5, iar, makefile, embitz, coide, kds, simplicity, @@ -161,22 +162,23 @@ def generate_project_files(resources, export_path, target, name, toolchain, ide, return files, exporter -def _inner_zip_export(resources, inc_repos): - for loc, res in resources.items(): - to_zip = ( - res.headers + res.s_sources + res.c_sources +\ - res.cpp_sources + res.libraries + res.hex_files + \ - [res.linker_script] + res.bin_files + res.objects + \ - res.json_files + res.lib_refs + res.lib_builds) - if inc_repos: - for directory in res.repo_dirs: - for root, _, files in walk(directory): - for repo_file in files: - source = join(root, repo_file) - to_zip.append(source) - res.file_basepath[source] = res.base_path - to_zip += res.repo_files - yield loc, to_zip +def _inner_zip_export(resources, prj_files, inc_repos): + to_zip = sum((resources.get_file_refs(ftype) for ftype + in Resources.ALL_FILE_TYPES), + []) + to_zip.extend(FileRef(basename(pfile), pfile) for pfile in prj_files) + for dest, source in resources.get_file_refs(FileType.BLD_REF): + target_dir, _ = splitext(dest) + dest = join(target_dir, ".bld", "bldrc") + to_zip.append(FileRef(dest, source)) + if inc_repos: + for dest, source in resources.get_file_refs(FileType.REPO_DIRS): + for root, _, files in walk(source): + for repo_file in files: + file_source = join(root, repo_file) + file_dest = join(dest, relpath(file_source, source)) + to_zip.append(FileRef(file_dest, file_source)) + return to_zip def zip_export(file_name, prefix, resources, project_files, inc_repos, notify): """Create a zip file from an exported project. @@ -188,32 +190,19 @@ def zip_export(file_name, prefix, resources, project_files, inc_repos, notify): project_files - a list of extra files to be added to the root of the prefix directory """ - to_zip_list = list(_inner_zip_export(resources, inc_repos)) - total_files = sum(len(to_zip) for _, to_zip in to_zip_list) - total_files += len(project_files) + to_zip_list = sorted(set(_inner_zip_export( + resources, project_files, inc_repos))) + total_files = len(to_zip_list) zipped = 0 with zipfile.ZipFile(file_name, "w") as zip_file: - for prj_file in project_files: - zip_file.write(prj_file, join(prefix, basename(prj_file))) - for loc, to_zip in to_zip_list: - res = resources[loc] - for source in to_zip: - if source: - zip_file.write( - source, - join(prefix, loc, - relpath(source, res.file_basepath[source]))) - notify.progress("Zipping", source, - 100 * (zipped / total_files)) - zipped += 1 - for lib, res in resources.items(): - for source in res.lib_builds: - target_dir, _ = splitext(source) - dest = join(prefix, loc, - relpath(target_dir, res.file_basepath[source]), - ".bld", "bldrc") - zip_file.write(source, dest) - + for dest, source in to_zip_list: + if source and isfile(source): + zip_file.write(source, join(prefix, dest)) + zipped += 1 + notify.progress("Zipping", source, + 100 * (zipped / total_files)) + else: + zipped += 1 def export_project(src_paths, export_path, target, ide, libraries_paths=None, @@ -275,26 +264,16 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None, if name is None: name = basename(normpath(abspath(src_paths[0]))) - resource_dict = {} + resources = Resources(notify, collect_ignores=True) for loc, path in src_paths.items(): - res = Resources(collect_ignores=True) - res.add_toolchain_labels(toolchain) + resources.add_toolchain_labels(toolchain) for p in path: - res.add_directory(p) - resource_dict[loc] = res - resources = Resources() - - for loc, res in resource_dict.items(): - temp = copy.deepcopy(res) - temp.subtract_basepath(".", loc) - resources.add(temp) - + resources.add_directory(p, into_path=loc) toolchain.build_dir = export_path toolchain.config.load_resources(resources) toolchain.set_config_data(toolchain.config.get_config_data()) config_header = toolchain.get_config_header() - resources.headers.append(config_header) - resources.file_basepath[config_header] = dirname(config_header) + resources.add_file_ref(FileType.HEADER, basename(config_header), config_header) # Change linker script if specified if linker_script is not None: @@ -303,16 +282,13 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None, files, exporter = generate_project_files(resources, export_path, target, name, toolchain, ide, macros=macros) - files.append(config_header) if zip_proj: - for resource in resource_dict.values(): - for label, res in resource.features.items(): - resource.add(res) + resources.add_features(ALLOWED_FEATURES) if isinstance(zip_proj, basestring): - zip_export(join(export_path, zip_proj), name, resource_dict, + zip_export(join(export_path, zip_proj), name, resources, files + list(exporter.static_files), inc_repos, notify) else: - zip_export(zip_proj, name, resource_dict, + zip_export(zip_proj, name, resources, files + list(exporter.static_files), inc_repos, notify) else: for static_file in exporter.static_files: diff --git a/tools/export/exporters.py b/tools/export/exporters.py index ee900ed8eca..4b849b63d15 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -10,6 +10,7 @@ from tools.targets import TARGET_MAP from tools.utils import mkdir +from tools.resources import FileType class TargetNotSupportedException(Exception): @@ -87,12 +88,8 @@ def get_toolchain(self): return self.TOOLCHAIN def add_config(self): - """Add the containgin directory of mbed_config.h to include dirs""" - config = self.toolchain.get_config_header() - if config: - self.resources.inc_dirs.append( - dirname(relpath(config, - self.resources.file_basepath[config]))) + """Add the containing directory of mbed_config.h to include dirs""" + pass @property def flags(self): @@ -116,11 +113,15 @@ def flags(self): flags['c_flags'] += c_defines flags['cxx_flags'] += c_defines if config_header: - config_header = relpath(config_header, - self.resources.file_basepath[config_header]) - flags['c_flags'] += self.toolchain.get_config_option(config_header) + def is_config_header(f): + return f.path == config_header + config_header= filter( + is_config_header, self.resources.get_file_refs(FileType.HEADER) + )[0] + flags['c_flags'] += self.toolchain.get_config_option( + config_header.name) flags['cxx_flags'] += self.toolchain.get_config_option( - config_header) + config_header.name) return flags def get_source_paths(self): @@ -181,8 +182,7 @@ def make_key(self, src): Positional Arguments: src - the src's location """ - rel_path = relpath(src, self.resources.file_basepath[src]) - path_list = os.path.normpath(rel_path).split(os.sep) + path_list = os.path.normpath(src).split(os.sep) assert len(path_list) >= 1 if len(path_list) == 1: key = self.project_name diff --git a/tools/export/uvision/__init__.py b/tools/export/uvision/__init__.py index fad3b472cef..50996218d55 100644 --- a/tools/export/uvision/__init__.py +++ b/tools/export/uvision/__init__.py @@ -2,7 +2,7 @@ from builtins import str import os -from os.path import sep, normpath, join, exists +from os.path import sep, normpath, join, exists, dirname import ntpath import copy from collections import namedtuple @@ -10,6 +10,7 @@ from subprocess import Popen, PIPE import re +from tools.resources import FileType from tools.arm_pack_manager import Cache from tools.targets import TARGET_MAP from tools.export.exporters import Exporter, apply_supported_whitelist @@ -228,10 +229,10 @@ def generate(self): self.resources.inc_dirs).encode('utf-8'), 'device': DeviceUvision(self.target), } - sct_file = self.resources.linker_script + sct_name, sct_path = self.resources.get_file_refs(FileType.LD_SCRIPT)[0] ctx['linker_script'] = self.toolchain.correct_scatter_shebang( - sct_file, self.resources.file_basepath[sct_file]) - if ctx['linker_script'] != sct_file: + sct_path, dirname(sct_name)) + if ctx['linker_script'] != sct_path: self.generated_files.append(ctx['linker_script']) core = ctx['device'].core ctx['cputype'] = core.rstrip("FD") diff --git a/tools/project.py b/tools/project.py index b9f9e3f63fd..86492809f6b 100644 --- a/tools/project.py +++ b/tools/project.py @@ -55,10 +55,11 @@ def resolve_exporter_alias(ide): def setup_project( ide, target, - program=None, - source_dir=None, - build=None, - export_path=None + zip, + program, + source_dir, + build, + export_path, ): """Generate a name, if not provided, and find dependencies @@ -82,7 +83,10 @@ def setup_project( project_name = TESTS[program] else: project_name = basename(normpath(realpath(source_dir[0]))) - src_paths = {relpath(path, project_dir): [path] for path in source_dir} + if zip: + src_paths = {path.strip(".\\/"): [path] for path in source_dir} + else: + src_paths = {relpath(path, project_dir): [path] for path in source_dir} lib_paths = None else: test = Test(program) @@ -124,6 +128,7 @@ def export(target, ide, build=None, src=None, macros=None, project_id=None, project_dir, name, src, lib = setup_project( ide, target, + bool(zip_proj), program=project_id, source_dir=src, build=build, @@ -289,6 +294,13 @@ def get_args(argv): default=None ) + parser.add_argument( + "-z", + action="store_true", + default=None, + dest="zip", + ) + parser.add_argument( "--ignore", dest="ignore", @@ -352,7 +364,7 @@ def main(): src=options.source_dir, macros=options.macros, project_id=options.program, - zip_proj=not bool(options.source_dir), + zip_proj=not bool(options.source_dir) or options.zip, build_profile=profile, app_config=options.app_config, export_path=options.build_dir, diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index ea9150c7cae..94aae53c178 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -34,14 +34,13 @@ import fnmatch import re +from collections import namedtuple, defaultdict from copy import copy from itertools import chain from os import walk from os.path import (join, splitext, dirname, relpath, basename, split, normcase, abspath, exists) -from ..toolchains import TOOLCHAINS - # Support legacy build conventions: the original mbed build system did not have # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but # had the knowledge of a list of these directories to be ignored. @@ -79,39 +78,53 @@ } +FileRef = namedtuple("FileRef", "name path") + +class FileType(object): + C_SRC = "c" + CPP_SRC = "c++" + ASM_SRC = "s" + HEADER = "header" + INC_DIR = "inc" + LIB_DIR = "libdir" + LIB = "lib" + OBJECT = "o" + HEX = "hex" + BIN = "bin" + JSON = "json" + LD_SCRIPT = "ld" + LIB_REF = "libref" + BLD_REF = "bldref" + REPO_DIR = "repodir" + + def __init__(self): + raise NotImplemented + class Resources(object): - def __init__(self, notify, base_path=None, collect_ignores=False): + ALL_FILE_TYPES = [ + FileType.C_SRC, + FileType.CPP_SRC, + FileType.ASM_SRC, + FileType.HEADER, + FileType.INC_DIR, + FileType.LIB_DIR, + FileType.LIB, + FileType.OBJECT, + FileType.HEX, + FileType.BIN, + FileType.JSON, + FileType.LD_SCRIPT, + FileType.LIB_REF, + FileType.BLD_REF, + FileType.REPO_DIR, + ] + + def __init__(self, notify, collect_ignores=False): self.notify = notify - self.base_path = base_path self.collect_ignores = collect_ignores + self._file_refs = defaultdict(list) self._label_paths = [] - self.file_basepath = {} - - self.inc_dirs = [] - self.headers = [] - - self.s_sources = [] - self.c_sources = [] - self.cpp_sources = [] - - self.lib_dirs = set([]) - self.objects = [] - self.libraries = [] - - # mbed special files - self.lib_builds = [] - self.lib_refs = [] - - self.repo_dirs = [] - self.repo_files = [] - - self.linker_script = None - - # Other files - self.hex_files = [] - self.bin_files = [] - self.json_files = [] self.ignored_dirs = [] @@ -122,12 +135,13 @@ def __init__(self, notify, base_path=None, collect_ignores=False): } # Pre-mbed 2.0 ignore dirs - self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) + self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS) # Ignore patterns from .mbedignore files self.ignore_patterns = [] self._ignore_regex = re.compile("$^") + def __add__(self, resources): if resources is None: return self @@ -145,79 +159,15 @@ def ignore_dir(self, directory): self.ignored_dirs.append(directory) def add(self, resources): - for f,p in resources.file_basepath.items(): - self.file_basepath[f] = p - - self.inc_dirs += resources.inc_dirs - self.headers += resources.headers - - self.s_sources += resources.s_sources - self.c_sources += resources.c_sources - self.cpp_sources += resources.cpp_sources - + for file_type in self.ALL_FILE_TYPES: + self._file_refs[file_type].extend(resources._file_refs[file_type]) self.lib_dirs |= resources.lib_dirs - self.objects += resources.objects - self.libraries += resources.libraries - - self.lib_builds += resources.lib_builds - self.lib_refs += resources.lib_refs - - self.repo_dirs += resources.repo_dirs - self.repo_files += resources.repo_files - - if resources.linker_script is not None: - self.linker_script = resources.linker_script - - self.hex_files += resources.hex_files - self.bin_files += resources.bin_files - self.json_files += resources.json_files self.ignored_dirs += resources.ignored_dirs self._label_paths += resources._label_paths return self - def rewrite_basepath(self, file_name, export_path, loc): - """ Replace the basepath of filename with export_path - - Positional arguments: - file_name - the absolute path to a file - export_path - the final destination of the file after export - """ - new_f = join(loc, relpath(file_name, self.file_basepath[file_name])) - self.file_basepath[new_f] = export_path - return new_f - - def subtract_basepath(self, export_path, loc=""): - """ Rewrite all of the basepaths with the export_path - - Positional arguments: - export_path - the final destination of the resources with respect to the - generated project files - """ - keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', - 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script', - 'lib_dirs'] - for key in keys: - vals = getattr(self, key) - if isinstance(vals, set): - vals = list(vals) - if isinstance(vals, list): - new_vals = [] - for val in vals: - new_vals.append(self.rewrite_basepath( - val, export_path, loc)) - if isinstance(getattr(self, key), set): - setattr(self, key, set(new_vals)) - else: - setattr(self, key, new_vals) - elif vals: - setattr(self, key, self.rewrite_basepath( - vals, export_path, loc)) - def closure(res, export_path=export_path, loc=loc): - res.subtract_basepath(export_path, loc) - return res - def _collect_duplicates(self, dupe_dict, dupe_headers): for filename in self.s_sources + self.c_sources + self.cpp_sources: objname, _ = splitext(basename(filename)) @@ -249,52 +199,41 @@ def detect_duplicates(self): (headername, " ".join(locations))) return count - def relative_to(self, base, dot=False): - for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', - 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', - 'hex_files', 'bin_files', 'json_files']: - v = [rel_path(f, base, dot) for f in getattr(self, field)] - setattr(self, field, v) - - if self.linker_script is not None: - self.linker_script = rel_path(self.linker_script, base, dot) + for file_type in self.ALL_FILE_TYPES: + v = [f._replace(name=rel_path(f, base, dot)) for + f in self.get_file_refs(file_type)] + self._file_refs[file_type] = v def win_to_unix(self): - for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', - 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', - 'hex_files', 'bin_files', 'json_files']: - v = [f.replace('\\', '/') for f in getattr(self, field)] - setattr(self, field, v) - - if self.linker_script is not None: - self.linker_script = self.linker_script.replace('\\', '/') + for file_type in self.ALL_FILE_TYPES: + v = [f._replace(name=f.replace('\\', '/')) for + f in self.get_file_refs(file_type)] + self._file_refs[file_type] = v def __str__(self): s = [] - for (label, resources) in ( - ('Include Directories', self.inc_dirs), - ('Headers', self.headers), + for (label, file_type) in ( + ('Include Directories', FileType.INC_DIR), + ('Headers', FileType.HEADER), - ('Assembly sources', self.s_sources), - ('C sources', self.c_sources), - ('C++ sources', self.cpp_sources), + ('Assembly sources', FileType.ASM_SRC), + ('C sources', FileType.C_SRC), + ('C++ sources', FileType.CPP_SRC), - ('Library directories', self.lib_dirs), - ('Objects', self.objects), - ('Libraries', self.libraries), + ('Library directories', FileType.LIB_DIR), + ('Objects', FileType.OBJECT), + ('Libraries', FileType.LIB), - ('Hex files', self.hex_files), - ('Bin files', self.bin_files), + ('Hex files', FileType.HEX), + ('Bin files', FileType.BIN), + ('Linker script', FileType.LD_SCRIPT) ): + resources = self.get_file_refs(file_type) if resources: - s.append('%s:\n ' % label + '\n '.join(resources)) - - if self.linker_script: - s.append('Linker Script: ' + self.linker_script) + s.append('%s:\n ' % label + '\n '.join( + "%s -> %s" % (name, path) for name, path in resources)) return '\n'.join(s) @@ -303,10 +242,10 @@ def _add_labels(self, prefix, labels): self.labels.setdefault(prefix, []) self.labels[prefix].extend(labels) prefixed_labels = set("%s_%s" % (prefix, label) for label in labels) - for path, base_path in self._label_paths: + for path, base_path, into_path in self._label_paths: if basename(path) in prefixed_labels: - self.add_directory(path, base_path) - self._label_paths = [(p, b) for p, b in self._label_paths + self.add_directory(path, base_path, into_path) + self._label_paths = [(p, b, i) for p, b, i in self._label_paths if basename(p) not in prefixed_labels] def add_target_labels(self, target): @@ -345,7 +284,83 @@ def _not_current_label(self, dirname, label_type): return (dirname.startswith(label_type + "_") and dirname[len(label_type) + 1:] not in self.labels[label_type]) - def add_directory(self, path, base_path=None, exclude_paths=None): + def add_file_ref(self, file_type, file_name, file_path): + ref = FileRef(file_name, file_path) + self._file_refs[file_type].append(ref) + + def get_file_refs(self, file_type): + """Return a list of FileRef for every file of the given type""" + return self._file_refs[file_type] + + def get_file_names(self, file_type): + return [f.name for f in self.get_file_refs(file_type)] + + def add_files_to_type(self, file_type, files): + self._file_refs[file_type].extend(FileRef(f, f) for f in files) + + @property + def inc_dirs(self): + return self.get_file_names(FileType.INC_DIR) + + @property + def headers(self): + return self.get_file_names(FileType.HEADER) + + @property + def s_sources(self): + return self.get_file_names(FileType.ASM_SRC) + + @property + def c_sources(self): + return self.get_file_names(FileType.C_SRC) + + @property + def cpp_sources(self): + return self.get_file_names(FileType.CPP_SRC) + + @property + def lib_dirs(self): + return self.get_file_names(FileType.LIB_DIR) + + @property + def objects(self): + return self.get_file_names(FileType.OBJECT) + + @property + def libraries(self): + return self.get_file_names(FileType.LIB) + + @property + def lib_builds(self): + return self.get_file_names(FileType.BLD_REF) + + @property + def lib_refs(self): + return self.get_file_names(FileType.LIB_REF) + + @property + def linker_script(self): + return self.get_file_names(FileType.LD_SCRIPT)[0] + + @property + def hex_files(self): + return self.get_file_names(FileType.HEX) + + @property + def bin_files(self): + return self.get_file_names(FileType.BIN) + + @property + def json_files(self): + return self.get_file_names(FileType.JSON) + + def add_directory( + self, + path, + base_path=None, + into_path=None, + exclude_paths=None, + ): """ Scan a directory and include its resources in this resources obejct Positional arguments: @@ -354,12 +369,16 @@ def add_directory(self, path, base_path=None, exclude_paths=None): Keyword arguments base_path - If this is part of an incremental scan, include the origin directory root of the scan here + into_path - Pretend that scanned files are within the specified + directory within a project instead of using their actual path exclude_paths - A list of paths that are to be excluded from a build """ self.notify.progress("scan", abspath(path)) if base_path is None: base_path = path + if into_path is None: + into_path = path if self.collect_ignores and path in self.ignored_dirs: self.ignored_dirs.remove(path) if exclude_paths: @@ -384,11 +403,12 @@ def add_directory(self, path, base_path=None, exclude_paths=None): for d in copy(dirs): dir_path = join(root, d) if d == '.hg' or d == '.git': - self.repo_dirs.append(dir_path) + fake_path = join(into_path, relpath(dir_path, base_path)) + self.add_file_ref(FileType.REPO_DIR, fake_path, dir_path) if (any(self._not_current_label(d, t) for t in ['TARGET', 'TOOLCHAIN', 'FEATURE'])): - self._label_paths.append((dir_path, base_path)) + self._label_paths.append((dir_path, base_path, into_path)) self.ignore_dir(dir_path) dirs.remove(d) elif (d.startswith('.') or d in self.legacy_ignore_dirs or @@ -398,14 +418,35 @@ def add_directory(self, path, base_path=None, exclude_paths=None): # Add root to include paths root = root.rstrip("/") - self.inc_dirs.append(root) - self.file_basepath[root] = base_path + fake_root = join(into_path, relpath(root, base_path)) + self.add_file_ref(FileType.INC_DIR, fake_root, root) for file in files: file_path = join(root, file) - self._add_file(file_path, base_path) - - def _add_file(self, file_path, base_path): + self._add_file(file_path, base_path, into_path) + + _EXT = { + ".c": FileType.C_SRC, + ".cc": FileType.CPP_SRC, + ".cpp": FileType.CPP_SRC, + ".s": FileType.ASM_SRC, + ".h": FileType.HEADER, + ".hh": FileType.HEADER, + ".hpp": FileType.HEADER, + ".o": FileType.OBJECT, + ".hex": FileType.HEX, + ".bin": FileType.BIN, + ".json": FileType.JSON, + ".a": FileType.LIB, + ".ar": FileType.LIB, + ".sct": FileType.LD_SCRIPT, + ".ld": FileType.LD_SCRIPT, + ".icf": FileType.LD_SCRIPT, + ".lib": FileType.LIB_REF, + ".bld": FileType.BLD_REF, + } + + def _add_file(self, file_path, base_path, into_path): """ Add a single file into the resources object that was found by scanning starting as base_path """ @@ -415,55 +456,13 @@ def _add_file(self, file_path, base_path): self.ignore_dir(relpath(file_path, base_path)) return - self.file_basepath[file_path] = base_path + fake_path = join(into_path, relpath(file_path, base_path)) _, ext = splitext(file_path) - ext = ext.lower() - - if ext == '.s': - self.s_sources.append(file_path) - - elif ext == '.c': - self.c_sources.append(file_path) - - elif ext == '.cpp' or ext == '.cc': - self.cpp_sources.append(file_path) - - elif ext == '.h' or ext == '.hpp' or ext == '.hh': - self.headers.append(file_path) - - elif ext == '.o': - self.objects.append(file_path) - - elif ext in ('.a', '.ar'): - self.libraries.append(file_path) - self.lib_dirs.add(dirname(file_path)) - - elif ext in ('.sct', '.icf', '.ld'): - if self.linker_script is not None: - self.notify.info("Warning: Multiple linker scripts detected: %s and %s" % (self.linker_script, file_path)) - else: - self.linker_script = file_path - - elif ext == '.lib': - self.lib_refs.append(file_path) - - elif ext == '.bld': - self.lib_builds.append(file_path) - - elif basename(file_path) == '.hgignore': - self.repo_files.append(file_path) - - elif basename(file_path) == '.gitignore': - self.repo_files.append(file_path) - - elif ext == '.hex': - self.hex_files.append(file_path) - - elif ext == '.bin': - self.bin_files.append(file_path) - - elif ext == '.json': - self.json_files.append(file_path) + try: + file_type = self._EXT[ext.lower()] + self.add_file_ref(file_type, fake_path, file_path) + except KeyError: + pass def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 3a340fbb518..34ed460fff1 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -38,6 +38,7 @@ from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK from .. import hooks from ..notifier.term import TerminalNotifier +from ..resources import FileType from ..memap import MemapParser from ..config import ConfigException @@ -284,7 +285,7 @@ def scan_repository(self, path): return resources - def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): + def copy_files(self, files_paths, trg_path, resources=None): # Handle a single file if not isinstance(files_paths, list): files_paths = [files_paths] @@ -294,12 +295,7 @@ def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): files_paths.remove(source) for source in files_paths: - if resources is not None and source in resources.file_basepath: - relative_path = relpath(source, resources.file_basepath[source]) - elif rel_path is not None: - relative_path = relpath(source, rel_path) - else: - _, relative_path = split(source) + _, relative_path = split(source) target = join(trg_path, relative_path) @@ -310,10 +306,10 @@ def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY - def relative_object_path(self, build_path, base_dir, source): - source_dir, name, _ = split_path(source) + def relative_object_path(self, build_path, file_ref): + source_dir, name, _ = split_path(file_ref.name) - obj_dir = relpath(join(build_path, relpath(source_dir, base_dir))) + obj_dir = relpath(join(build_path, source_dir)) if obj_dir is not self.prev_dir: self.prev_dir = obj_dir mkdir(obj_dir) @@ -368,7 +364,11 @@ def get_arch_file(self, objects): # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY def compile_sources(self, resources, inc_dirs=None): # Web IDE progress bar for project build - files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources + files_to_compile = ( + resources.get_file_refs(FileType.ASM_SRC) + + resources.get_file_refs(FileType.C_SRC) + + resources.get_file_refs(FileType.CPP_SRC) + ) self.to_be_compiled = len(files_to_compile) self.compiled = 0 @@ -399,11 +399,10 @@ def compile_sources(self, resources, inc_dirs=None): # Sort compile queue for consistency files_to_compile.sort() for source in files_to_compile: - object = self.relative_object_path( - self.build_dir, resources.file_basepath[source], source) + object = self.relative_object_path(self.build_dir, source) # Queue mode (multiprocessing) - commands = self.compile_command(source, object, inc_paths) + commands = self.compile_command(source.path, object, inc_paths) if commands is not None: queue.append({ 'source': source, @@ -429,7 +428,7 @@ def compile_seq(self, queue, objects): result = compile_worker(item) self.compiled += 1 - self.progress("compile", item['source'], build_update=True) + self.progress("compile", item['source'].name, build_update=True) for res in result['results']: self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) self.compile_output([ @@ -467,7 +466,7 @@ def compile_queue(self, queue, objects): results.remove(r) self.compiled += 1 - self.progress("compile", result['source'], build_update=True) + self.progress("compile", result['source'].name, build_update=True) for res in result['results']: self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) self.compile_output([ @@ -628,15 +627,15 @@ def link_program(self, r, tmp_path, name): bin = None if ext == 'elf' else full_path map = join(tmp_path, name + '.map') - r.objects = sorted(set(r.objects)) + objects = sorted(set(r.objects)) config_file = ([self.config.app_config_location] if self.config.app_config_location else []) - dependencies = r.objects + r.libraries + [r.linker_script] + config_file + dependencies = objects + r.libraries + [r.linker_script] + config_file dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) if self.need_update(elf, dependencies): needed_update = True self.progress("link", name) - self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) + self.link(elf, objects, r.libraries, r.lib_dirs, r.linker_script) if bin and self.need_update(bin, [elf]): needed_update = True From 0294f62c74aef75c6f52e040f978c5132e225c54 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 13:46:27 -0500 Subject: [PATCH 20/44] Put all src_paths into the root --- tools/resources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 94aae53c178..20856c9cdf5 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -483,9 +483,9 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, for path in src_paths: if exists(path): if exclude: - self.add_directory(path, exclude_paths=[toolchain.build_dir]) + self.add_directory(path, into_path="", exclude_paths=[toolchain.build_dir]) else: - self.add_directory(path) + self.add_directory(path, into_path="") # Scan dependency paths for include dirs if dependencies_paths is not None: From 60e08b196c46a685d570b51035787b1831808b77 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 13:56:21 -0500 Subject: [PATCH 21/44] Don't flatten build directories Make a really good guess --- tools/resources/__init__.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 20856c9cdf5..43a321ab9a4 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -482,10 +482,15 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, self.add_toolchain_labels(toolchain) for path in src_paths: if exists(path): + into_path = relpath(path).strip(".\\/") if exclude: - self.add_directory(path, into_path="", exclude_paths=[toolchain.build_dir]) + self.add_directory( + path, + into_path=into_path, + exclude_paths=[toolchain.build_dir] + ) else: - self.add_directory(path, into_path="") + self.add_directory(path, into_path=into_path) # Scan dependency paths for include dirs if dependencies_paths is not None: From 06615780542c29d8aff1303b77f016a6a2befa80 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 14:02:39 -0500 Subject: [PATCH 22/44] Use include paths when compiling We were using include names before --- tools/toolchains/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 34ed460fff1..414abde7779 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -374,7 +374,8 @@ def compile_sources(self, resources, inc_dirs=None): self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) - inc_paths = resources.inc_dirs + inc_paths = [path for _, path + in resources.get_file_refs(FileType.INC_DIR)] if inc_dirs is not None: if isinstance(inc_dirs, list): inc_paths.extend(inc_dirs) From 8303539415a8640b04330a17808f32729fa86ff5 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 14:19:09 -0500 Subject: [PATCH 23/44] Remove unused APIs --- tools/resources/__init__.py | 38 ++++++++----------------------------- 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 43a321ab9a4..66db7e65904 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -124,6 +124,7 @@ def __init__(self, notify, collect_ignores=False): self.collect_ignores = collect_ignores self._file_refs = defaultdict(list) self._label_paths = [] + self._win_to_unix = False self.ignored_dirs = [] @@ -142,32 +143,10 @@ def __init__(self, notify, collect_ignores=False): self._ignore_regex = re.compile("$^") - def __add__(self, resources): - if resources is None: - return self - else: - return self.add(resources) - - def __radd__(self, resources): - if resources is None: - return self - else: - return self.add(resources) - def ignore_dir(self, directory): if self.collect_ignores: self.ignored_dirs.append(directory) - def add(self, resources): - for file_type in self.ALL_FILE_TYPES: - self._file_refs[file_type].extend(resources._file_refs[file_type]) - self.lib_dirs |= resources.lib_dirs - - self.ignored_dirs += resources.ignored_dirs - self._label_paths += resources._label_paths - - return self - def _collect_duplicates(self, dupe_dict, dupe_headers): for filename in self.s_sources + self.c_sources + self.cpp_sources: objname, _ = splitext(basename(filename)) @@ -199,13 +178,8 @@ def detect_duplicates(self): (headername, " ".join(locations))) return count - def relative_to(self, base, dot=False): - for file_type in self.ALL_FILE_TYPES: - v = [f._replace(name=rel_path(f, base, dot)) for - f in self.get_file_refs(file_type)] - self._file_refs[file_type] = v - def win_to_unix(self): + self._win_to_unix = True for file_type in self.ALL_FILE_TYPES: v = [f._replace(name=f.replace('\\', '/')) for f in self.get_file_refs(file_type)] @@ -285,7 +259,10 @@ def _not_current_label(self, dirname, label_type): dirname[len(label_type) + 1:] not in self.labels[label_type]) def add_file_ref(self, file_type, file_name, file_path): - ref = FileRef(file_name, file_path) + if self._win_to_unix: + ref = FileRef(file_name.replace("\\", "/"), file_path) + else: + ref = FileRef(file_name, file_path) self._file_refs[file_type].append(ref) def get_file_refs(self, file_type): @@ -296,7 +273,8 @@ def get_file_names(self, file_type): return [f.name for f in self.get_file_refs(file_type)] def add_files_to_type(self, file_type, files): - self._file_refs[file_type].extend(FileRef(f, f) for f in files) + for f in files: + self.add_file_ref(file_type, f, f) @property def inc_dirs(self): From 4109aa52e6a4e911738d8dc9498715f2aab2b5c3 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 14:27:11 -0500 Subject: [PATCH 24/44] Privatize parts of the API that I don't want public --- tools/resources/__init__.py | 63 +++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 66db7e65904..5fe32085ada 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -120,31 +120,33 @@ class Resources(object): ] def __init__(self, notify, collect_ignores=False): - self.notify = notify - self.collect_ignores = collect_ignores - self._file_refs = defaultdict(list) - self._label_paths = [] - self._win_to_unix = False + # publicly accessible things + self.ignored_dirs = [] + # Pre-mbed 2.0 ignore dirs + self._legacy_ignore_dirs = (LEGACY_IGNORE_DIRS) - self.ignored_dirs = [] + # Primate parameters + self._notify = notify + self._collect_ignores = collect_ignores - self.labels = { - "TARGET": [], - "TOOLCHAIN": [], - "FEATURE": [] - } + # Storage for file references, indexed by file type + self._file_refs = defaultdict(list) - # Pre-mbed 2.0 ignore dirs - self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS) + # Incremental scan related + self._label_paths = [] + self._labels = {"TARGET": [], "TOOLCHAIN": [], "FEATURE": []} + + # Should we convert all paths to unix-style? + self._win_to_unix = False - # Ignore patterns from .mbedignore files - self.ignore_patterns = [] + # Ignore patterns from .mbedignore files and add_ignore_patters + self._ignore_patterns = [] self._ignore_regex = re.compile("$^") def ignore_dir(self, directory): - if self.collect_ignores: + if self._collect_ignores: self.ignored_dirs.append(directory) def _collect_duplicates(self, dupe_dict, dupe_headers): @@ -167,13 +169,13 @@ def detect_duplicates(self): for objname, filenames in dupe_dict.items(): if len(filenames) > 1: count+=1 - self.notify.tool_error( + self._notify.tool_error( "Object file %s.o is not unique! It could be made from: %s"\ % (objname, " ".join(filenames))) for headername, locations in dupe_headers.items(): if len(locations) > 1: count+=1 - self.notify.tool_error( + self._notify.tool_error( "Header file %s is not unique! It could be: %s" %\ (headername, " ".join(locations))) return count @@ -213,8 +215,7 @@ def __str__(self): def _add_labels(self, prefix, labels): - self.labels.setdefault(prefix, []) - self.labels[prefix].extend(labels) + self._labels[prefix].extend(labels) prefixed_labels = set("%s_%s" % (prefix, label) for label in labels) for path, base_path, into_path in self._label_paths: if basename(path) in prefixed_labels: @@ -231,7 +232,7 @@ def add_features(self, features): def add_toolchain_labels(self, toolchain): for prefix, value in toolchain.get_labels().items(): self._add_labels(prefix, value) - self.legacy_ignore_dirs -= set( + self._legacy_ignore_dirs -= set( [toolchain.target.name, LEGACY_TOOLCHAIN_NAMES[toolchain.name]]) def is_ignored(self, file_path): @@ -248,15 +249,17 @@ def add_ignore_patterns(self, root, base_path, patterns): """ real_base = relpath(root, base_path) if real_base == ".": - self.ignore_patterns.extend(normcase(p) for p in patterns) + self._ignore_patterns.extend(normcase(p) for p in patterns) else: - self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns) - if self.ignore_patterns: - self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns)) + self._ignore_patterns.extend( + normcase(join(real_base, pat)) for pat in patterns) + if self._ignore_patterns: + self._ignore_regex = re.compile("|".join( + fnmatch.translate(p) for p in self._ignore_patterns)) def _not_current_label(self, dirname, label_type): return (dirname.startswith(label_type + "_") and - dirname[len(label_type) + 1:] not in self.labels[label_type]) + dirname[len(label_type) + 1:] not in self._labels[label_type]) def add_file_ref(self, file_type, file_name, file_path): if self._win_to_unix: @@ -351,13 +354,13 @@ def add_directory( directory within a project instead of using their actual path exclude_paths - A list of paths that are to be excluded from a build """ - self.notify.progress("scan", abspath(path)) + self._notify.progress("scan", abspath(path)) if base_path is None: base_path = path if into_path is None: into_path = path - if self.collect_ignores and path in self.ignored_dirs: + if self._collect_ignores and path in self.ignored_dirs: self.ignored_dirs.remove(path) if exclude_paths: self.add_ignore_patterns( @@ -389,7 +392,7 @@ def add_directory( self._label_paths.append((dir_path, base_path, into_path)) self.ignore_dir(dir_path) dirs.remove(d) - elif (d.startswith('.') or d in self.legacy_ignore_dirs or + elif (d.startswith('.') or d in self._legacy_ignore_dirs or self.is_ignored(join(root_path, d, ""))): self.ignore_dir(dir_path) dirs.remove(d) @@ -474,7 +477,7 @@ def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, if dependencies_paths is not None: toolchain.progress("dep", dependencies_paths) for dep in dependencies_paths: - lib_self = self.__class__(self.notify, self.collect_ignores)\ + lib_self = self.__class__(self._notify, self._collect_ignores)\ .scan_with_toolchain([dep], toolchain) self.inc_dirs.extend(lib_self.inc_dirs) From 7a26cd8da8c5cf1eddb05fab0aeeb57ccc6c1147 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 13 Jun 2018 14:41:52 -0500 Subject: [PATCH 25/44] Use paths in linking --- tools/build_api.py | 2 +- tools/resources/__init__.py | 2 +- tools/toolchains/__init__.py | 7 +++++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 809e67f4d11..94fee9410ab 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -525,7 +525,7 @@ def build_project(src_paths, build_path, target, toolchain_name, # Change linker script if specified if linker_script is not None: - resources.linker_script = linker_script + resources.add_file_ref(linker_script, linker_script) # Compile Sources objects = toolchain.compile_sources(resources, resources.inc_dirs) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 5fe32085ada..9ecf98bf5bb 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -321,7 +321,7 @@ def lib_refs(self): @property def linker_script(self): - return self.get_file_names(FileType.LD_SCRIPT)[0] + return self.get_file_names(FileType.LD_SCRIPT)[-1] @property def hex_files(self): diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 414abde7779..9e990253747 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -631,12 +631,15 @@ def link_program(self, r, tmp_path, name): objects = sorted(set(r.objects)) config_file = ([self.config.app_config_location] if self.config.app_config_location else []) - dependencies = objects + r.libraries + [r.linker_script] + config_file + linker_script = [path for _, path in r.get_file_refs(FileType.LD_SCRIPT) + if path.endswith(self.LINKER_EXT)][-1] + lib_dirs = [path for _, path in r.get_file_refs(FileType.LIB_DIR)] + dependencies = objects + r.libraries + [linker_script] + config_file dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) if self.need_update(elf, dependencies): needed_update = True self.progress("link", name) - self.link(elf, objects, r.libraries, r.lib_dirs, r.linker_script) + self.link(elf, objects, r.libraries, lib_dirs, linker_script) if bin and self.need_update(bin, [elf]): needed_update = True From f143019e9a811dec99cc113242cf94492e3ded02 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 15 Jun 2018 12:43:06 -0500 Subject: [PATCH 26/44] Correct resources API usage in dupe test --- tools/test/toolchains/api_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/test/toolchains/api_test.py b/tools/test/toolchains/api_test.py index 6a8da396efa..2ca11158492 100644 --- a/tools/test/toolchains/api_test.py +++ b/tools/test/toolchains/api_test.py @@ -16,7 +16,7 @@ TOOLCHAIN_PATHS, mbedToolchain, ) -from tools.resources import LEGACY_TOOLCHAIN_NAMES, Resources +from tools.resources import LEGACY_TOOLCHAIN_NAMES, Resources, FileType from tools.targets import TARGET_MAP, set_targets_json_location from tools.notifier.mock import MockNotifier @@ -247,9 +247,9 @@ def test_detect_duplicates(filenames): cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames] notify = MockNotifier() res = Resources(notify) - res.c_sources = c_sources - res.s_sources = s_sources - res.cpp_sources = cpp_sources + res.add_files_to_type(FileType.C_SRC, c_sources) + res.add_files_to_type(FileType.ASM_SRC, s_sources) + res.add_files_to_type(FileType.CPP_SRC, cpp_sources) assert res.detect_duplicates() == 1,\ "Not Enough duplicates found" From 6a875101921f6ee9612a3f8d8171dc1bc5cc7b86 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 15 Jun 2018 13:05:38 -0500 Subject: [PATCH 27/44] Correct several exporter uses of file_basepath with a dedupe --- tools/export/cces/__init__.py | 6 +-- tools/export/exporters.py | 43 ++++++++++++++++++---- tools/export/gnuarmeclipse/__init__.py | 51 -------------------------- tools/export/makefile/__init__.py | 8 ++-- tools/export/nb/__init__.py | 25 ------------- tools/export/sw4stm32/__init__.py | 6 +-- tools/toolchains/arm.py | 11 ++++-- 7 files changed, 51 insertions(+), 99 deletions(-) diff --git a/tools/export/cces/__init__.py b/tools/export/cces/__init__.py index b21ae220827..b2df58a5837 100644 --- a/tools/export/cces/__init__.py +++ b/tools/export/cces/__init__.py @@ -69,13 +69,11 @@ def flags(self): Skip macros because headless tools handles them separately """ - config_header = self.toolchain.get_config_header() flags = {key + "_flags": copy.deepcopy(value) for key, value \ in self.toolchain.flags.iteritems()} + config_header = self.config_header_ref if config_header: - config_header = os.path.relpath(config_header, \ - self.resources.file_basepath[config_header]) - config_header = "\\\"" + self.format_inc_path(config_header) \ + config_header = "\\\"" + self.format_inc_path(config_header.name) \ + "\\\"" header_options = self.toolchain.get_config_option(config_header) flags['c_flags'] += header_options diff --git a/tools/export/exporters.py b/tools/export/exporters.py index 4b849b63d15..7b7e40cbb4d 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -101,9 +101,7 @@ def flags(self): asm_flags - assembler flags common_flags - common options """ - config_header = self.toolchain.get_config_header() - flags = {key + "_flags": copy.deepcopy(value) for key, value - in self.toolchain.flags.items()} + flags = self.toolchain_flags(self.toolchain) asm_defines = self.toolchain.get_compile_options( self.toolchain.get_symbols(for_asm=True), filter(None, self.resources.inc_dirs), @@ -112,18 +110,47 @@ def flags(self): flags['asm_flags'] += asm_defines flags['c_flags'] += c_defines flags['cxx_flags'] += c_defines + config_header = self.config_header_ref if config_header: - def is_config_header(f): - return f.path == config_header - config_header= filter( - is_config_header, self.resources.get_file_refs(FileType.HEADER) - )[0] flags['c_flags'] += self.toolchain.get_config_option( config_header.name) flags['cxx_flags'] += self.toolchain.get_config_option( config_header.name) return flags + def toolchain_flags(self, toolchain): + """Returns a dictionary of toolchain flags. + Keys of the dictionary are: + cxx_flags - c++ flags + c_flags - c flags + ld_flags - linker flags + asm_flags - assembler flags + common_flags - common options + + The difference from the above is that it takes a parameter. + """ + flags = {key + "_flags": copy.deepcopy(value) for key, value + in toolchain.flags.items()} + config_header = self.config_header_ref + if config_header: + header_options = self.toolchain.get_config_option( + config_header.name) + flags['c_flags'] += header_options + flags['cxx_flags'] += header_options + return flags + + @property + def config_header_ref(self): + config_header = self.toolchain.get_config_header() + if config_header: + def is_config_header(f): + return f.path == config_header + return filter( + is_config_header, self.resources.get_file_refs(FileType.HEADER) + )[0] + else: + return None + def get_source_paths(self): """Returns a list of the directories where source files are contained""" source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', diff --git a/tools/export/gnuarmeclipse/__init__.py b/tools/export/gnuarmeclipse/__init__.py index f288eeba510..559386a9b5f 100644 --- a/tools/export/gnuarmeclipse/__init__.py +++ b/tools/export/gnuarmeclipse/__init__.py @@ -77,57 +77,6 @@ def is_target_supported(cls, target_name): return apply_supported_whitelist( cls.TOOLCHAIN, POST_BINARY_WHITELIST, target) - # override - @property - def flags(self): - """Returns a dictionary of toolchain flags. - Keys of the dictionary are: - cxx_flags - c++ flags - c_flags - c flags - ld_flags - linker flags - asm_flags - assembler flags - common_flags - common options - - The difference from the parent function is that it does not - add macro definitions, since they are passed separately. - """ - - config_header = self.toolchain.get_config_header() - flags = {key + "_flags": copy.deepcopy(value) for key, value - in self.toolchain.flags.items()} - if config_header: - config_header = relpath(config_header, - self.resources.file_basepath[config_header]) - flags['c_flags'] += self.toolchain.get_config_option(config_header) - flags['cxx_flags'] += self.toolchain.get_config_option( - config_header) - return flags - - def toolchain_flags(self, toolchain): - """Returns a dictionary of toolchain flags. - Keys of the dictionary are: - cxx_flags - c++ flags - c_flags - c flags - ld_flags - linker flags - asm_flags - assembler flags - common_flags - common options - - The difference from the above is that it takes a parameter. - """ - - # Note: use the config options from the currently selected toolchain. - config_header = self.toolchain.get_config_header() - - flags = {key + "_flags": copy.deepcopy(value) for key, value - in toolchain.flags.items()} - if config_header: - config_header = relpath(config_header, - self.resources.file_basepath[config_header]) - header_options = self.toolchain.get_config_option(config_header) - flags['c_flags'] += header_options - flags['cxx_flags'] += header_options - return flags - def validate_resources(self): if not self.resources.linker_script: raise NotSupportedException("No linker script found.") diff --git a/tools/export/makefile/__init__.py b/tools/export/makefile/__init__.py index 2c94022285f..4dc89feeb0c 100644 --- a/tools/export/makefile/__init__.py +++ b/tools/export/makefile/__init__.py @@ -24,6 +24,7 @@ from subprocess import check_output, CalledProcessError, Popen, PIPE import shutil from jinja2.exceptions import TemplateNotFound +from tools.resources import FileType from tools.export.exporters import Exporter, apply_supported_whitelist from tools.utils import NotSupportedException from tools.targets import TARGET_MAP @@ -237,11 +238,12 @@ def prepare_sys_lib(libname): def generate(self): if self.resources.linker_script: - sct_file = self.resources.linker_script + sct_file = self.resources.get_file_refs(FileType.LD_SCRIPT)[-1] new_script = self.toolchain.correct_scatter_shebang( - sct_file, join(self.resources.file_basepath[sct_file], "BUILD")) + sct_file.path, dirname(sct_file.name)) if new_script is not sct_file: - self.resources.linker_script = new_script + self.resources.add_files_to_type( + FileType.LD_SCRIPT, [new_script]) self.generated_files.append(new_script) return super(Arm, self).generate() diff --git a/tools/export/nb/__init__.py b/tools/export/nb/__init__.py index 3ae81d0c51c..744b47a4081 100644 --- a/tools/export/nb/__init__.py +++ b/tools/export/nb/__init__.py @@ -36,31 +36,6 @@ def is_target_supported(cls, target_name): def prepare_sys_lib(libname): return "-l" + libname - def toolchain_flags(self, toolchain): - """Returns a dictionary of toolchain flags. - Keys of the dictionary are: - cxx_flags - c++ flags - c_flags - c flags - ld_flags - linker flags - asm_flags - assembler flags - common_flags - common options - - The difference from the above is that it takes a parameter. - """ - - # Note: use the config options from the currently selected toolchain. - config_header = self.toolchain.get_config_header() - - flags = {key + "_flags": copy.deepcopy(value) for key, value - in toolchain.flags.items()} - if config_header: - config_header = relpath(config_header, - self.resources.file_basepath[config_header]) - header_options = self.toolchain.get_config_option(config_header) - flags['c_flags'] += header_options - flags['cxx_flags'] += header_options - return flags - @staticmethod def get_defines_and_remove_from_flags(flags_in, str_key): defines = [] diff --git a/tools/export/sw4stm32/__init__.py b/tools/export/sw4stm32/__init__.py index 2f0817d29c9..b23a91a115f 100644 --- a/tools/export/sw4stm32/__init__.py +++ b/tools/export/sw4stm32/__init__.py @@ -433,10 +433,6 @@ def generate(self): self.resources.win_to_unix() - config_header = self.toolchain.get_config_header() - if config_header: - config_header = relpath(config_header, self.resources.file_basepath[config_header]) - libraries = [] for lib in self.resources.libraries: library, _ = splitext(basename(lib)) @@ -531,7 +527,7 @@ def generate(self): 'name': self.project_name, 'platform': platform, 'include_paths': self.include_path, - 'config_header': config_header, + 'config_header': self.config_header_ref.name, 'exclude_paths': '|'.join(self.excluded_folders), 'ld_script': ld_script, 'library_paths': lib_dirs, diff --git a/tools/toolchains/arm.py b/tools/toolchains/arm.py index 68a7eda0bfc..ee3d9702d6c 100644 --- a/tools/toolchains/arm.py +++ b/tools/toolchains/arm.py @@ -230,12 +230,16 @@ def compile_c(self, source, object, includes): def compile_cpp(self, source, object, includes): return self.compile(self.cppc, source, object, includes) - def correct_scatter_shebang(self, scatter_file, base_path=curdir): + def correct_scatter_shebang(self, scatter_file, cur_dir_name=None): """Correct the shebang at the top of a scatter file. Positional arguments: scatter_file -- the scatter file to correct + Keyword arguments: + cur_dir_name -- the name (not path) of the directory containing the + scatter file + Return: The location of the correct scatter file @@ -249,8 +253,9 @@ def correct_scatter_shebang(self, scatter_file, base_path=curdir): return scatter_file else: new_scatter = join(self.build_dir, ".link_script.sct") - self.SHEBANG += " -I %s" % relpath(dirname(scatter_file), - base_path) + if cur_dir_name is None: + cur_dir_name = dirname(scatter_file) + self.SHEBANG += " -I %s" % cur_dir_name if self.need_update(new_scatter, [scatter_file]): with open(new_scatter, "w") as out: out.write(self.SHEBANG) From c50d8a26dd7899a00e55e3d24b57582750e36b6e Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 15 Jun 2018 13:42:50 -0500 Subject: [PATCH 28/44] Correct win2unix typo --- tools/resources/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 9ecf98bf5bb..21847cc466f 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -183,7 +183,7 @@ def detect_duplicates(self): def win_to_unix(self): self._win_to_unix = True for file_type in self.ALL_FILE_TYPES: - v = [f._replace(name=f.replace('\\', '/')) for + v = [f._replace(name=f.name.replace('\\', '/')) for f in self.get_file_refs(file_type)] self._file_refs[file_type] = v From 001c2d3b3b917c399e5fe4890a6be27848a1e64b Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Fri, 15 Jun 2018 14:40:17 -0500 Subject: [PATCH 29/44] Revisit Mbed 2 building It's much cleaner now --- tools/build_api.py | 159 ++++++++++++++++++++---------------- tools/export/__init__.py | 2 +- tools/resources/__init__.py | 6 +- 3 files changed, 93 insertions(+), 74 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 94fee9410ab..19b2516c27d 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -871,13 +871,31 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None, # Let Exception propagate raise -# We do have unique legacy conventions about how we build and package the mbed -# library + +# A number of compiled files need to be copied as objects as the linker +# will not search for weak symbol overrides in archives. These are: +# - mbed_retarget.o: to make sure that the C standard lib symbols get +# overridden +# - mbed_board.o: `mbed_die` is weak +# - mbed_overrides.o: this contains platform overrides of various +# weak SDK functions +# - mbed_main.o: this contains main redirection +# - mbed_sdk_boot.o: this contains the main boot code in +# - PeripheralPins.o: PinMap can be weak +SEPARATE_NAMES = [ + 'PeripheralPins.o', + 'mbed_retarget.o', + 'mbed_board.o', + 'mbed_overrides.o', + 'mbed_main.o', + 'mbed_sdk_boot.o', +] + + def build_mbed_libs(target, toolchain_name, clean=False, macros=None, notify=None, jobs=1, report=None, properties=None, build_profile=None, ignore=None): - """ Function returns True is library was built and false if building was - skipped + """ Build legacy libraries for a target and toolchain pair Positional arguments: target - the MCU or board that the project will compile for @@ -892,32 +910,36 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, properties - UUUUHHHHH beats me build_profile - a dict of flags that will be passed to the compiler ignore - list of paths to add to mbedignore + + Return - True if target + toolchain built correctly, False if not supported """ - if report != None: + if report is not None: start = time() id_name = "MBED" description = "mbed SDK" vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) - cur_result = create_result(target.name, toolchain_name, id_name, - description) - - if properties != None: - prep_properties(properties, target.name, toolchain_name, - vendor_label) + cur_result = create_result( + target.name, toolchain_name, id_name, description) + if properties is not None: + prep_properties( + properties, target.name, toolchain_name, vendor_label) - # Check toolchain support if toolchain_name not in target.supported_toolchains: supported_toolchains_text = ", ".join(target.supported_toolchains) - print('%s target is not yet supported by toolchain %s' % - (target.name, toolchain_name)) - print('%s target supports %s toolchain%s' % - (target.name, supported_toolchains_text, 's' - if len(target.supported_toolchains) > 1 else '')) - - if report != None: + notify.info('The target {} does not support the toolchain {}'.format( + target.name, + toolchain_name + )) + notify.info('{} supports {} toolchain{}'.format( + target.name, + supported_toolchains_text, + 's' if len(target.supported_toolchains) > 1 else '' + )) + + if report is not None: cur_result["result"] = "SKIP" add_result_to_report(report, cur_result) @@ -925,28 +947,27 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, try: # Source and Build Paths - build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) - build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) + build_toolchain = join( + MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) mkdir(build_toolchain) - # Toolchain - tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) + tmp_path = join( + MBED_LIBRARIES, + '.temp', + mbed2_obj_path(target.name, toolchain_name) + ) mkdir(tmp_path) + # Toolchain and config toolchain = prepare_toolchain( [""], tmp_path, target, toolchain_name, macros=macros, notify=notify, build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore) - # Take into account the library configuration (MBED_CONFIG_FILE) config = toolchain.config config.add_config_files([MBED_CONFIG_FILE]) toolchain.set_config_data(toolchain.config.get_config_data()) - # mbed - notify.info("Building library %s (%s, %s)" % - ('MBED', target.name, toolchain_name)) - - # Common Headers + # distribute header files toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] @@ -957,45 +978,24 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, toolchain.copy_files(resources.headers, dest) library_incdirs.append(dest) - cmsis_implementation = Resources(notify).scan_with_toolchain([MBED_CMSIS_PATH], toolchain) - toolchain.copy_files(cmsis_implementation.headers, build_target) - toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain) - toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain) - - hal_implementation = Resources(notify).scan_with_toolchain([MBED_TARGETS_PATH], toolchain) - toolchain.copy_files(hal_implementation.headers + - hal_implementation.hex_files + - hal_implementation.libraries + - [MBED_CONFIG_FILE], - build_target, resources=hal_implementation) - toolchain.copy_files(hal_implementation.linker_script, build_toolchain) - toolchain.copy_files(hal_implementation.bin_files, build_toolchain) - incdirs = Resources(notify).scan_with_toolchain([build_target], toolchain).inc_dirs - objects = toolchain.compile_sources(cmsis_implementation + hal_implementation, - library_incdirs + incdirs + [tmp_path]) - toolchain.copy_files(objects, build_toolchain) - - # Common Sources + # collect resources of the libs to compile + cmsis_res = Resources(notify).scan_with_toolchain( + [MBED_CMSIS_PATH], toolchain) + hal_res = Resources(notify).scan_with_toolchain( + [MBED_TARGETS_PATH], toolchain) mbed_resources = Resources(notify).scan_with_toolchain( [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain) - objects = toolchain.compile_sources(mbed_resources, - library_incdirs + incdirs) - - # A number of compiled files need to be copied as objects as opposed to - # way the linker search for symbols in archives. These are: - # - mbed_retarget.o: to make sure that the C standard lib symbols get - # overridden - # - mbed_board.o: mbed_die is weak - # - mbed_overrides.o: this contains platform overrides of various - # weak SDK functions - # - mbed_main.o: this contains main redirection - # - PeripheralPins.o: PinMap can be weak - separate_names, separate_objects = ['PeripheralPins.o', 'mbed_retarget.o', 'mbed_board.o', - 'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], [] + incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs + + # Build Things + notify.info("Building library %s (%s, %s)" % + ('MBED', target.name, toolchain_name)) + objects = toolchain.compile_sources(mbed_resources, incdirs) + separate_objects = [] for obj in objects: - for name in separate_names: + for name in SEPARATE_NAMES: if obj.endswith(name): separate_objects.append(obj) @@ -1003,21 +1003,38 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, objects.remove(obj) toolchain.build_library(objects, build_toolchain, "mbed") - - for obj in separate_objects: - toolchain.copy_files(obj, build_toolchain) - - if report != None: + notify.info("Building library %s (%s, %s)" % + ('CMSIS', target.name, toolchain_name)) + cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path]) + notify.info("Building library %s (%s, %s)" % + ('HAL', target.name, toolchain_name)) + hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path]) + + # Copy everything into the build directory + to_copy = sum([ + hal_res.headers, + hal_res.hex_files, + hal_res.bin_files, + hal_res.libraries, + cmsis_res.headers, + cmsis_res.bin_files, + [cmsis_res.linker_script, hal_res.linker_script, MBED_CONFIG_FILE], + cmsis_objects, + hal_objects, + separate_objects, + ], []) + toolchain.copy_files(to_copy, build_toolchain) + + if report is not None: end = time() cur_result["elapsed_time"] = end - start cur_result["result"] = "OK" - add_result_to_report(report, cur_result) return True except Exception as exc: - if report != None: + if report is not None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start @@ -1025,8 +1042,6 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, cur_result["output"] += str(exc) add_result_to_report(report, cur_result) - - # Let Exception propagate raise diff --git a/tools/export/__init__.py b/tools/export/__init__.py index 99609634dd0..db6f0f398b8 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -265,8 +265,8 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None, name = basename(normpath(abspath(src_paths[0]))) resources = Resources(notify, collect_ignores=True) + resources.add_toolchain_labels(toolchain) for loc, path in src_paths.items(): - resources.add_toolchain_labels(toolchain) for p in path: resources.add_directory(p, into_path=loc) toolchain.build_dir = export_path diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 21847cc466f..ef63bfabed8 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -321,7 +321,11 @@ def lib_refs(self): @property def linker_script(self): - return self.get_file_names(FileType.LD_SCRIPT)[-1] + options = self.get_file_names(FileType.LD_SCRIPT) + if options: + return options[-1] + else: + return None @property def hex_files(self): From d8ddfdc5a5ee4d4346f5842ea6f1252d0e8603df Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 18 Jun 2018 11:32:19 -0500 Subject: [PATCH 30/44] Populate lib_dirs --- tools/resources/__init__.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index ef63bfabed8..2c08d40f655 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -431,6 +431,11 @@ def add_directory( ".bld": FileType.BLD_REF, } + _DIR_EXT = { + ".a": FileType.LIB_DIR, + ".ar": FileType.LIB_DIR, + } + def _add_file(self, file_path, base_path, into_path): """ Add a single file into the resources object that was found by scanning starting as base_path @@ -448,6 +453,11 @@ def _add_file(self, file_path, base_path, into_path): self.add_file_ref(file_type, fake_path, file_path) except KeyError: pass + try: + dir_type = self._DIR_EXT[ext.lower()] + self.add_file_ref(dir_type, dirname(fake_path), dirname(file_path)) + except KeyError: + pass def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None, From 28dbbd6b3b6e12626e4a7b33de1b4ccdd133a38f Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 18 Jun 2018 11:37:39 -0500 Subject: [PATCH 31/44] Correct path usage in linking --- tools/toolchains/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 9e990253747..b15961cce34 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -628,18 +628,20 @@ def link_program(self, r, tmp_path, name): bin = None if ext == 'elf' else full_path map = join(tmp_path, name + '.map') - objects = sorted(set(r.objects)) + objects = sorted(set([path for _, path + in r.get_file_refs(FileType.OBJECT)])) config_file = ([self.config.app_config_location] if self.config.app_config_location else []) linker_script = [path for _, path in r.get_file_refs(FileType.LD_SCRIPT) if path.endswith(self.LINKER_EXT)][-1] lib_dirs = [path for _, path in r.get_file_refs(FileType.LIB_DIR)] - dependencies = objects + r.libraries + [linker_script] + config_file + libraries = [path for _, path in r.get_file_refs(FileType.LIB)] + dependencies = objects + libraries + [linker_script] + config_file dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) if self.need_update(elf, dependencies): needed_update = True self.progress("link", name) - self.link(elf, objects, r.libraries, lib_dirs, linker_script) + self.link(elf, objects, libraries, lib_dirs, linker_script) if bin and self.need_update(bin, [elf]): needed_update = True From a4cc32067d8d5979c569cd90a96c26998b97b13c Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 18 Jun 2018 11:39:44 -0500 Subject: [PATCH 32/44] Add get_file_paths res API and use it --- tools/resources/__init__.py | 3 +++ tools/toolchains/__init__.py | 10 ++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 2c08d40f655..52ea67a5f9e 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -275,6 +275,9 @@ def get_file_refs(self, file_type): def get_file_names(self, file_type): return [f.name for f in self.get_file_refs(file_type)] + def get_file_paths(self, file_type): + return [f.path for f in self.get_file_refs(file_type)] + def add_files_to_type(self, file_type, files): for f in files: self.add_file_ref(file_type, f, f) diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index b15961cce34..cf45ad09045 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -374,8 +374,7 @@ def compile_sources(self, resources, inc_dirs=None): self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) - inc_paths = [path for _, path - in resources.get_file_refs(FileType.INC_DIR)] + inc_paths = resources.get_file_paths(FileType.INC_DIR) if inc_dirs is not None: if isinstance(inc_dirs, list): inc_paths.extend(inc_dirs) @@ -628,14 +627,13 @@ def link_program(self, r, tmp_path, name): bin = None if ext == 'elf' else full_path map = join(tmp_path, name + '.map') - objects = sorted(set([path for _, path - in r.get_file_refs(FileType.OBJECT)])) + objects = sorted(set(r.get_file_paths(FileType.OBJECT))) config_file = ([self.config.app_config_location] if self.config.app_config_location else []) linker_script = [path for _, path in r.get_file_refs(FileType.LD_SCRIPT) if path.endswith(self.LINKER_EXT)][-1] - lib_dirs = [path for _, path in r.get_file_refs(FileType.LIB_DIR)] - libraries = [path for _, path in r.get_file_refs(FileType.LIB)] + lib_dirs = r.get_file_paths(FileType.LIB_DIR) + libraries = r.get_file_paths(FileType.LIB) dependencies = objects + libraries + [linker_script] + config_file dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) if self.need_update(elf, dependencies): From 8166889885f86bee6474972733471debdfe3d79c Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 19 Jun 2018 13:50:19 -0500 Subject: [PATCH 33/44] Compute inc_dirs from headers --- tools/build_api.py | 2 +- tools/resources/__init__.py | 32 ++++++++++++++++++++++++-------- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 19b2516c27d..dd62e89c638 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -528,7 +528,7 @@ def build_project(src_paths, build_path, target, toolchain_name, resources.add_file_ref(linker_script, linker_script) # Compile Sources - objects = toolchain.compile_sources(resources, resources.inc_dirs) + objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR))) resources.add_files_to_type(FileType.OBJECT, objects) # Link Program diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index 52ea67a5f9e..b24fd6eb2d0 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -37,7 +37,7 @@ from collections import namedtuple, defaultdict from copy import copy from itertools import chain -from os import walk +from os import walk, sep from os.path import (join, splitext, dirname, relpath, basename, split, normcase, abspath, exists) @@ -131,7 +131,7 @@ def __init__(self, notify, collect_ignores=False): self._collect_ignores = collect_ignores # Storage for file references, indexed by file type - self._file_refs = defaultdict(list) + self._file_refs = defaultdict(set) # Incremental scan related self._label_paths = [] @@ -266,17 +266,35 @@ def add_file_ref(self, file_type, file_name, file_path): ref = FileRef(file_name.replace("\\", "/"), file_path) else: ref = FileRef(file_name, file_path) - self._file_refs[file_type].append(ref) + self._file_refs[file_type].add(ref) def get_file_refs(self, file_type): """Return a list of FileRef for every file of the given type""" - return self._file_refs[file_type] + return list(self._file_refs[file_type]) + + @staticmethod + def _all_parents(files): + for name in files: + components = name.split(sep) + for n in range(1, len(components)): + parent = join(*components[:n]) + yield parent + + def _get_from_refs(self, file_type, key): + if file_type is FileType.INC_DIR: + parents = set(self._all_parents(self._get_from_refs( + FileType.HEADER, key))) + parents.add(".") + else: + parents = set() + return list(parents) + [key(f) for f in self.get_file_refs(file_type)] + def get_file_names(self, file_type): - return [f.name for f in self.get_file_refs(file_type)] + return self._get_from_refs(file_type, lambda f: f.name) def get_file_paths(self, file_type): - return [f.path for f in self.get_file_refs(file_type)] + return self._get_from_refs(file_type, lambda f: f.path) def add_files_to_type(self, file_type, files): for f in files: @@ -406,8 +424,6 @@ def add_directory( # Add root to include paths root = root.rstrip("/") - fake_root = join(into_path, relpath(root, base_path)) - self.add_file_ref(FileType.INC_DIR, fake_root, root) for file in files: file_path = join(root, file) From 545553b6bc24e9f25ad9d5afd75a033db78ce7f4 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 19 Jun 2018 15:23:50 -0500 Subject: [PATCH 34/44] Rewrite test detection to avoid relying on "inc_dirs" --- tools/build_api.py | 51 ++++++++++++++-------------- tools/test_api.py | 66 ++++++++++++++---------------------- tools/toolchains/__init__.py | 13 ++----- 3 files changed, 53 insertions(+), 77 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index dd62e89c638..bd693896807 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -480,7 +480,6 @@ def build_project(src_paths, build_path, target, toolchain_name, stats_depth - depth level for memap to display file/dirs ignore - list of paths to add to mbedignore """ - # Convert src_path to a list if needed if not isinstance(src_paths, list): src_paths = [src_paths] @@ -628,6 +627,7 @@ def build_library(src_paths, build_path, target, toolchain_name, # Convert src_path to a list if needed if not isinstance(src_paths, list): src_paths = [src_paths] + src_paths = [relpath(s) for s in src_paths] # Build path if archive: @@ -679,28 +679,25 @@ def build_library(src_paths, build_path, target, toolchain_name, raise Exception(error_msg) try: - resources = Resources(notify).scan_with_toolchain( + res = Resources(notify).scan_with_toolchain( src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs) # Copy headers, objects and static libraries - all files needed for # static lib - toolchain.copy_files(resources.headers, build_path, resources=resources) - toolchain.copy_files(resources.objects, build_path, resources=resources) - toolchain.copy_files(resources.libraries, build_path, - resources=resources) - toolchain.copy_files(resources.json_files, build_path, - resources=resources) - if resources.linker_script: - toolchain.copy_files(resources.linker_script, build_path, - resources=resources) - - if resources.hex_files: - toolchain.copy_files(resources.hex_files, build_path, - resources=resources) - + to_copy = ( + res.get_file_refs(FileType.HEADER) + + res.get_file_refs(FileType.OBJECT) + + res.get_file_refs(FileType.LIB) + + res.get_file_refs(FileType.JSON) + + res.get_file_refs(FileType.LD_SCRIPT) + + res.get_file_refs(FileType.HEX) + + res.get_file_refs(FileType.BIN) + ) + toolchain.copy_files(to_copy, build_path) # Compile Sources - objects = toolchain.compile_sources(resources, resources.inc_dirs) - resources.objects.extend(objects) + objects = toolchain.compile_sources( + res, res.get_file_paths(FileType.INC_DIR)) + res.add_files_to_type(FileType.OBJECT, objects) if archive: toolchain.build_library(objects, build_path, name) @@ -714,8 +711,6 @@ def build_library(src_paths, build_path, target, toolchain_name, end = time() cur_result["elapsed_time"] = end - start cur_result["result"] = "OK" - - add_result_to_report(report, cur_result) return True @@ -840,8 +835,8 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None, inc_dirs=inc_dirs, dependencies_paths=dependencies_paths) # Copy Headers - toolchain.copy_files(resources.headers, build_path, - resources=resources) + toolchain.copy_files( + resources.get_file_refs(FileType.HEADER), build_path) dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs @@ -968,14 +963,18 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, toolchain.set_config_data(toolchain.config.get_config_data()) # distribute header files - toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) + toolchain.copy_files( + [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES) library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), (MBED_HAL, MBED_LIBRARIES_HAL)]: resources = Resources(notify).scan_with_toolchain([dir], toolchain) - toolchain.copy_files(resources.headers, dest) + toolchain.copy_files( + [FileRef(basename(p), p) for p + in resources.get_file_paths(FileType.HEADER)] , + dest) library_incdirs.append(dest) # collect resources of the libs to compile @@ -1011,7 +1010,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path]) # Copy everything into the build directory - to_copy = sum([ + to_copy = [FileRef(basename(p), p) for p in sum([ hal_res.headers, hal_res.hex_files, hal_res.bin_files, @@ -1022,7 +1021,7 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, cmsis_objects, hal_objects, separate_objects, - ], []) + ], [])] toolchain.copy_files(to_copy, build_toolchain) if report is not None: diff --git a/tools/test_api.py b/tools/test_api.py index 5ae7d0fdc8a..c23fa26a232 100644 --- a/tools/test_api.py +++ b/tools/test_api.py @@ -40,7 +40,7 @@ from Queue import Queue, Empty except ImportError: from queue import Queue, Empty -from os.path import join, exists, basename, relpath +from os.path import join, exists, basename, relpath, isdir from threading import Thread, Lock from multiprocessing import Pool, cpu_count from subprocess import Popen, PIPE @@ -2083,49 +2083,33 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None): commons = [] # Scan the directory for paths to probe for 'TESTS' folders - base_resources = Resources(MockNotifier()) + base_resources = Resources(MockNotifier(), collect_ignores=True) base_resources.add_directory(base_dir) - dirs = base_resources.inc_dirs + dirs = [d for d in base_resources.ignored_dirs if basename(d) == 'TESTS'] for directory in dirs: - subdirs = os.listdir(directory) - - # If the directory contains a subdirectory called 'TESTS', scan it for test cases - if 'TESTS' in subdirs: - walk_base_dir = join(directory, 'TESTS') - test_resources = Resources(MockNotifier()) - test_resources.add_directory(walk_base_dir, base_dir) - - # Loop through all subdirectories - for d in test_resources.inc_dirs: - - # If the test case folder is not called 'host_tests' or 'COMMON' and it is - # located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase) - # then add it to the tests - relative_path = relpath(d, walk_base_dir) - relative_path_parts = os.path.normpath(relative_path).split(os.sep) - if len(relative_path_parts) == 2: - test_group_directory_path, test_case_directory = os.path.split(d) - test_group_directory = os.path.basename(test_group_directory_path) - - # Check to make sure discoverd folder is not in a host test directory or common directory - special_dirs = ['host_tests', 'COMMON'] - if test_group_directory not in special_dirs and test_case_directory not in special_dirs: - test_name = test_path_to_name(d, base_dir) - tests[(test_name, walk_base_dir, test_group_directory, test_case_directory)] = [d] - - # Also find any COMMON paths, we'll add these later once we find all the base tests - if 'COMMON' in relative_path_parts: - if relative_path_parts[0] != 'COMMON': - def predicate(base_pred, group_pred, name_base_group_case): - (name, base, group, case) = name_base_group_case - return base == base_pred and group == group_pred - commons.append((functools.partial(predicate, walk_base_dir, relative_path_parts[0]), d)) - else: - def predicate(base_pred, name_base_group_case): - (name, base, group, case) = name_base_group_case - return base == base_pred - commons.append((functools.partial(predicate, walk_base_dir), d)) + for test_group_directory in os.listdir(directory): + grp_dir = join(directory, test_group_directory) + if not isdir(grp_dir): + continue + for test_case_directory in os.listdir(grp_dir): + d = join(directory, test_group_directory, test_case_directory) + if not isdir(d): + continue + special_dirs = ['host_tests', 'COMMON'] + if test_group_directory not in special_dirs and test_case_directory not in special_dirs: + test_name = test_path_to_name(d, base_dir) + tests[(test_name, directory, test_group_directory, test_case_directory)] = [d] + if test_case_directory == 'COMMON': + def predicate(base_pred, group_pred, name_base_group_case): + (name, base, group, case) = name_base_group_case + return base == base_pred and group == group_pred + commons.append((functools.partial(predicate, directory, test_group_directory), d)) + if test_group_directory == 'COMMON': + def predicate(base_pred, name_base_group_case): + (name, base, group, case) = name_base_group_case + return base == base_pred + commons.append((functools.partial(predicate, directory), grp_dir)) # Apply common directories for pred, path in commons: diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index cf45ad09045..1d290d3caad 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -290,17 +290,10 @@ def copy_files(self, files_paths, trg_path, resources=None): if not isinstance(files_paths, list): files_paths = [files_paths] - for source in files_paths: - if source is None: - files_paths.remove(source) - - for source in files_paths: - _, relative_path = split(source) - - target = join(trg_path, relative_path) - + for dest, source in files_paths: + target = join(trg_path, dest) if (target != source) and (self.need_update(target, [source])): - self.progress("copy", relative_path) + self.progress("copy", dest) mkdir(dirname(target)) copyfile(source, target) From b2395a7786cdb5ddda250f1fb90623818a188458 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 26 Jun 2018 10:12:43 -0500 Subject: [PATCH 35/44] Simplify get_config toolchain handling --- tools/get_config.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tools/get_config.py b/tools/get_config.py index 1cc55c67efb..d425e5e4bb0 100644 --- a/tools/get_config.py +++ b/tools/get_config.py @@ -53,14 +53,15 @@ args_error(parser, "argument -m/--mcu is required") target = extract_mcus(parser, options)[0] - # Toolchain - toolchain = options.tool[0] if options.tool is not None else None - options.prefix = options.prefix or [""] try: params, macros, features = get_config( - options.source_dir, target, toolchain, app_config=options.app_config) + options.source_dir, + target, + options.tool[0] if options.tool else None, + app_config=options.app_config + ) if not params and not macros: print("No configuration data available.") sys.exit(0) From 62538e3bce5c9cbabe9d94ebf793de7da240bca8 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 26 Jun 2018 10:13:13 -0500 Subject: [PATCH 36/44] Filter archives for the arm linker the other compilers can do it for themselves --- tools/toolchains/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 1d290d3caad..d28a8cda408 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -626,7 +626,8 @@ def link_program(self, r, tmp_path, name): linker_script = [path for _, path in r.get_file_refs(FileType.LD_SCRIPT) if path.endswith(self.LINKER_EXT)][-1] lib_dirs = r.get_file_paths(FileType.LIB_DIR) - libraries = r.get_file_paths(FileType.LIB) + libraries = [l for l in r.get_file_paths(FileType.LIB) + if l.endswith(self.LIBRARY_EXT)] dependencies = objects + libraries + [linker_script] + config_file dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) if self.need_update(elf, dependencies): From 3b7a5bb35eb07dbddf8d05e8ef0195a80fb0bd67 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 26 Jun 2018 10:22:10 -0500 Subject: [PATCH 37/44] Filter libraries by extension in exporters --- tools/export/atmelstudio/__init__.py | 2 +- tools/export/cces/__init__.py | 2 +- tools/export/cmake/__init__.py | 2 +- tools/export/cmsis/__init__.py | 2 +- tools/export/codeblocks/__init__.py | 3 +-- tools/export/coide/__init__.py | 2 +- tools/export/ds5_5/__init__.py | 2 +- tools/export/embitz/__init__.py | 2 +- tools/export/exporters.py | 5 +++++ tools/export/gnuarmeclipse/__init__.py | 2 +- tools/export/iar/__init__.py | 2 +- tools/export/kds/__init__.py | 2 +- tools/export/lpcxpresso/__init__.py | 2 +- tools/export/makefile/__init__.py | 2 +- tools/export/mcuxpresso/__init__.py | 7 ++++--- tools/export/simplicity/__init__.py | 2 +- tools/export/sw4stm32/__init__.py | 2 +- tools/export/uvision/__init__.py | 2 +- 18 files changed, 25 insertions(+), 20 deletions(-) diff --git a/tools/export/atmelstudio/__init__.py b/tools/export/atmelstudio/__init__.py index 531f7fdfbe0..c18f798e37d 100644 --- a/tools/export/atmelstudio/__init__.py +++ b/tools/export/atmelstudio/__init__.py @@ -59,7 +59,7 @@ def generate(self): source_folders.append(e) libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/cces/__init__.py b/tools/export/cces/__init__.py index b2df58a5837..cb193aaa8fa 100644 --- a/tools/export/cces/__init__.py +++ b/tools/export/cces/__init__.py @@ -318,7 +318,7 @@ def generate(self): cxx_flags = self.flags['cxx_flags'] + self.flags['common_flags'] libs = [] - for libpath in self.resources.libraries: + for libpath in self.libraries: lib = os.path.splitext(os.path.basename(libpath))[0] libs.append(lib[3:]) # skip 'lib' prefix diff --git a/tools/export/cmake/__init__.py b/tools/export/cmake/__init__.py index e1f3e0b7495..5cf7caa95d4 100644 --- a/tools/export/cmake/__init__.py +++ b/tools/export/cmake/__init__.py @@ -68,7 +68,7 @@ def generate(self): srcs = [re.sub(r'^[.]/', '', f) for f in srcs] # additional libraries - libraries = [self.prepare_lib(basename(lib)) for lib in self.resources.libraries] + libraries = [self.prepare_lib(basename(lib)) for lib in self.libraries] sys_libs = [self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs] # sort includes reverse, so the deepest dir comes first (ensures short includes) diff --git a/tools/export/cmsis/__init__.py b/tools/export/cmsis/__init__.py index f6eceddc73f..999b0b61d8c 100644 --- a/tools/export/cmsis/__init__.py +++ b/tools/export/cmsis/__init__.py @@ -143,7 +143,7 @@ def group_project_files(self, sources, root_element): def generate(self): srcs = self.resources.headers + self.resources.s_sources + \ self.resources.c_sources + self.resources.cpp_sources + \ - self.resources.objects + self.resources.libraries + \ + self.resources.objects + self.libraries + \ [self.resources.linker_script] srcs = [fileCMSIS(src, src) for src in srcs if src] ctx = { diff --git a/tools/export/codeblocks/__init__.py b/tools/export/codeblocks/__init__.py index 3e1129f003b..31841355beb 100644 --- a/tools/export/codeblocks/__init__.py +++ b/tools/export/codeblocks/__init__.py @@ -90,8 +90,7 @@ def generate(self): not x.startswith('obj'))]; c_sources = sorted([self.filter_dot(s) for s in self.resources.c_sources]) - libraries = [self.prepare_lib(basename(lib)) for lib - in self.resources.libraries] + libraries = [self.prepare_lib(basename(lib)) for lib in self.libraries] sys_libs = [self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs] ncs36510fib = (hasattr(self.toolchain.target, 'post_binary_hook') and diff --git a/tools/export/coide/__init__.py b/tools/export/coide/__init__.py index 9e47247015d..41a55f5bc95 100644 --- a/tools/export/coide/__init__.py +++ b/tools/export/coide/__init__.py @@ -88,7 +88,7 @@ def generate(self): }) libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/ds5_5/__init__.py b/tools/export/ds5_5/__init__.py index 6f7346b4ce3..9b2649269fb 100644 --- a/tools/export/ds5_5/__init__.py +++ b/tools/export/ds5_5/__init__.py @@ -60,7 +60,7 @@ def generate(self): 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'scatter_file': self.resources.linker_script, - 'object_files': self.resources.objects + self.resources.libraries, + 'object_files': self.resources.objects + self.libraries, 'source_files': source_files, 'symbols': self.toolchain.get_symbols() } diff --git a/tools/export/embitz/__init__.py b/tools/export/embitz/__init__.py index 9cdd1f03a44..9d4ebac9b1d 100644 --- a/tools/export/embitz/__init__.py +++ b/tools/export/embitz/__init__.py @@ -60,7 +60,7 @@ def generate(self): }) libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/exporters.py b/tools/export/exporters.py index 7b7e40cbb4d..984022b0f52 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -118,6 +118,11 @@ def flags(self): config_header.name) return flags + @property + def libraries(self): + return [l for l in self.resources.get_file_names(FileType.LIB) + if l.endswith(self.toolchain.LIBRARY_EXT)] + def toolchain_flags(self, toolchain): """Returns a dictionary of toolchain flags. Keys of the dictionary are: diff --git a/tools/export/gnuarmeclipse/__init__.py b/tools/export/gnuarmeclipse/__init__.py index 559386a9b5f..14e74c09a2f 100644 --- a/tools/export/gnuarmeclipse/__init__.py +++ b/tools/export/gnuarmeclipse/__init__.py @@ -90,7 +90,7 @@ def create_jinja_ctx(self): # TODO: use some logger to display additional info if verbose libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/iar/__init__.py b/tools/export/iar/__init__.py index e3fa360afd0..ad105d621eb 100644 --- a/tools/export/iar/__init__.py +++ b/tools/export/iar/__init__.py @@ -109,7 +109,7 @@ def generate(self): raise NotSupportedException("No linker script found.") srcs = self.resources.headers + self.resources.s_sources + \ self.resources.c_sources + self.resources.cpp_sources + \ - self.resources.objects + self.resources.libraries + self.resources.objects + self.libraries flags = self.flags c_flags = list(set(flags['common_flags'] + flags['c_flags'] diff --git a/tools/export/kds/__init__.py b/tools/export/kds/__init__.py index ae50c2b236a..80ee5c3d8b4 100644 --- a/tools/export/kds/__init__.py +++ b/tools/export/kds/__init__.py @@ -33,7 +33,7 @@ class KDS(Exporter): def generate(self): libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/lpcxpresso/__init__.py b/tools/export/lpcxpresso/__init__.py index 263a5f7f593..dc8e0ffdd9b 100644 --- a/tools/export/lpcxpresso/__init__.py +++ b/tools/export/lpcxpresso/__init__.py @@ -44,7 +44,7 @@ class LPCXpresso(Exporter): def generate(self): libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) diff --git a/tools/export/makefile/__init__.py b/tools/export/makefile/__init__.py index 4dc89feeb0c..3a595bb3bae 100644 --- a/tools/export/makefile/__init__.py +++ b/tools/export/makefile/__init__.py @@ -70,7 +70,7 @@ def generate(self): self.resources.cpp_sources] libraries = [self.prepare_lib(basename(lib)) for lib - in self.resources.libraries] + in self.libraries] sys_libs = [self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs] diff --git a/tools/export/mcuxpresso/__init__.py b/tools/export/mcuxpresso/__init__.py index 6c2789f1ae1..5c847163537 100644 --- a/tools/export/mcuxpresso/__init__.py +++ b/tools/export/mcuxpresso/__init__.py @@ -76,12 +76,13 @@ def generate(self): # TODO: use some logger to display additional info if verbose - self.libraries = [] + libraries = [] # print 'libraries' # print self.resources.libraries - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) - self.libraries.append(l[3:]) + libraries.append(l[3:]) + self.libraries = libraries self.system_libraries = [ 'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys' diff --git a/tools/export/simplicity/__init__.py b/tools/export/simplicity/__init__.py index d0b5f7e220d..d130ace3bf0 100644 --- a/tools/export/simplicity/__init__.py +++ b/tools/export/simplicity/__init__.py @@ -144,7 +144,7 @@ def generate(self): main_files.append(source) libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: l, _ = splitext(basename(lib)) if l[3:] not in EXCLUDED_LIBS: libraries.append(l[3:]) diff --git a/tools/export/sw4stm32/__init__.py b/tools/export/sw4stm32/__init__.py index b23a91a115f..a3416603ff5 100644 --- a/tools/export/sw4stm32/__init__.py +++ b/tools/export/sw4stm32/__init__.py @@ -434,7 +434,7 @@ def generate(self): self.resources.win_to_unix() libraries = [] - for lib in self.resources.libraries: + for lib in self.libraries: library, _ = splitext(basename(lib)) libraries.append(library[3:]) diff --git a/tools/export/uvision/__init__.py b/tools/export/uvision/__init__.py index 50996218d55..d4c94093869 100644 --- a/tools/export/uvision/__init__.py +++ b/tools/export/uvision/__init__.py @@ -218,7 +218,7 @@ def generate(self): srcs = self.resources.headers + self.resources.s_sources + \ self.resources.c_sources + self.resources.cpp_sources + \ - self.resources.objects + self.resources.libraries + self.resources.objects + self.libraries ctx = { 'name': self.project_name, # project_files => dict of generators - file group to generator of From 5c27da70467d148759d31847d685689580c00b00 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 27 Jun 2018 09:20:58 -0500 Subject: [PATCH 38/44] Correct scatter shebang rewriting in Make --- tools/export/makefile/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/export/makefile/__init__.py b/tools/export/makefile/__init__.py index 3a595bb3bae..22dacdd5738 100644 --- a/tools/export/makefile/__init__.py +++ b/tools/export/makefile/__init__.py @@ -240,7 +240,7 @@ def generate(self): if self.resources.linker_script: sct_file = self.resources.get_file_refs(FileType.LD_SCRIPT)[-1] new_script = self.toolchain.correct_scatter_shebang( - sct_file.path, dirname(sct_file.name)) + sct_file.path, join("..", dirname(sct_file.name))) if new_script is not sct_file: self.resources.add_files_to_type( FileType.LD_SCRIPT, [new_script]) From c641fd7459ead99e2773c99a61e2d4955be2b035 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 28 Jun 2018 10:43:40 -0500 Subject: [PATCH 39/44] Sort every list we have --- tools/resources/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/resources/__init__.py b/tools/resources/__init__.py index b24fd6eb2d0..953fb55c279 100644 --- a/tools/resources/__init__.py +++ b/tools/resources/__init__.py @@ -287,7 +287,9 @@ def _get_from_refs(self, file_type, key): parents.add(".") else: parents = set() - return list(parents) + [key(f) for f in self.get_file_refs(file_type)] + return sorted( + list(parents) + [key(f) for f in self.get_file_refs(file_type)] + ) def get_file_names(self, file_type): From 28800fbdde025aa556f3b47dca468b6e5b2402a3 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 28 Jun 2018 15:13:20 -0500 Subject: [PATCH 40/44] Fix broken tests --- tools/test/toolchains/api_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/test/toolchains/api_test.py b/tools/test/toolchains/api_test.py index 2ca11158492..3b8985fbd00 100644 --- a/tools/test/toolchains/api_test.py +++ b/tools/test/toolchains/api_test.py @@ -25,6 +25,7 @@ @patch('tools.toolchains.arm.run_cmd') def test_arm_version_check(_run_cmd): + set_targets_json_location() _run_cmd.return_value = (""" Product: ARM Compiler 5.06 Component: ARM Compiler 5.06 update 5 (build 528) @@ -52,6 +53,7 @@ def test_arm_version_check(_run_cmd): @patch('tools.toolchains.iar.run_cmd') def test_iar_version_check(_run_cmd): + set_targets_json_location() _run_cmd.return_value = (""" IAR ANSI C/C++ Compiler V7.80.1.28/LNX for ARM """, "", 0) @@ -73,6 +75,7 @@ def test_iar_version_check(_run_cmd): @patch('tools.toolchains.gcc.run_cmd') def test_gcc_version_check(_run_cmd): + set_targets_json_location() _run_cmd.return_value = (""" arm-none-eabi-gcc (Arch Repository) 6.4.4 Copyright (C) 2018 Free Software Foundation, Inc. From 91e77f16cdc1da7290682bd87f85332b9b310a3a Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 5 Jul 2018 12:19:39 -0500 Subject: [PATCH 41/44] Import FireRef before using it --- tools/build_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/build_api.py b/tools/build_api.py index bd693896807..229063f8f86 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -42,7 +42,7 @@ MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, BUILD_DIR) -from .resources import Resources, FileType +from .resources import Resources, FileType, FileRef from .notifier.mock import MockNotifier from .targets import TARGET_NAMES, TARGET_MAP from .libraries import Library From e9d56092c43f2bfd32553f6468873f4978578984 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 9 Jul 2018 09:59:52 -0500 Subject: [PATCH 42/44] Remove unused storage api test --- TESTS/storage_abstraction/.mbedignore | 1 - .../storage_abstraction/basicAPI/basicAPI.cpp | 993 ------------------ 2 files changed, 994 deletions(-) delete mode 100644 TESTS/storage_abstraction/.mbedignore delete mode 100644 TESTS/storage_abstraction/basicAPI/basicAPI.cpp diff --git a/TESTS/storage_abstraction/.mbedignore b/TESTS/storage_abstraction/.mbedignore deleted file mode 100644 index 72e8ffc0db8..00000000000 --- a/TESTS/storage_abstraction/.mbedignore +++ /dev/null @@ -1 +0,0 @@ -* diff --git a/TESTS/storage_abstraction/basicAPI/basicAPI.cpp b/TESTS/storage_abstraction/basicAPI/basicAPI.cpp deleted file mode 100644 index f6ab4351e08..00000000000 --- a/TESTS/storage_abstraction/basicAPI/basicAPI.cpp +++ /dev/null @@ -1,993 +0,0 @@ -/* - * Copyright (c) 2006-2016, ARM Limited, All Rights Reserved - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#if !DEVICE_STORAGE - #error [NOT_SUPPORTED] Storage not supported for this target -#endif - -#ifndef AVOID_GREENTEA -#include "greentea-client/test_env.h" -#endif -#include "utest/utest.h" -#include "unity/unity.h" - -#include "storage_abstraction/Driver_Storage.h" - -#include -#include - -using namespace utest::v1; - -extern ARM_DRIVER_STORAGE ARM_Driver_Storage_MTD_K64F; -ARM_DRIVER_STORAGE *drv = &ARM_Driver_Storage_MTD_K64F; - -/* temporary buffer to hold data for testing. */ -static const unsigned BUFFER_SIZE = 16384; -static uint8_t buffer[BUFFER_SIZE]; - -/* forward declaration */ -void initializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation); - -/* - * Most tests need some basic initialization of the driver before proceeding - * with their operations. - */ -static control_t preambleForBasicInitialization(void) -{ - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - int32_t rc = drv->Initialize(initializationCompleteCallback); - TEST_ASSERT(rc >= ARM_DRIVER_OK); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return CaseTimeout(200) + CaseRepeatAll; - } else { - TEST_ASSERT(rc == 1); - return CaseRepeatAll; - } -} - -template -static void verifyBytePattern(uint64_t addr, size_t sizeofData, T bytePattern) -{ - /* we're limited by BUFFER_SIZE in how much we can verify in a single iteration; - * the variable 'amountBeingVerified' captures the size being verified in each - * iteration. */ - size_t amountBeingVerified = sizeofData; - if (amountBeingVerified > BUFFER_SIZE) { - amountBeingVerified = BUFFER_SIZE; - } - TEST_ASSERT((amountBeingVerified % sizeof(T)) == 0); - - while (sizeofData) { - int32_t rc = drv->ReadData(addr, buffer, amountBeingVerified); - TEST_ASSERT_EQUAL(amountBeingVerified, rc); - for (size_t index = 0; index < amountBeingVerified / sizeof(T); index++) { - // if (bytePattern != ((const T *)buffer)[index]) { - // printf("%u: expected %x, found %x\n", index, bytePattern, ((const T *)buffer)[index]); - // } - TEST_ASSERT_EQUAL(bytePattern, ((const T *)buffer)[index]); - } - - sizeofData -= amountBeingVerified; - addr += amountBeingVerified; - } -} - -void test_getVersion() -{ - ARM_DRIVER_VERSION version = drv->GetVersion(); - - TEST_ASSERT_EQUAL(version.api, ARM_STORAGE_API_VERSION); - TEST_ASSERT_EQUAL(version.drv, ARM_DRIVER_VERSION_MAJOR_MINOR(1,00)); -} - -void test_getCapabilities() -{ - TEST_ASSERT(sizeof(ARM_STORAGE_CAPABILITIES) == sizeof(uint32_t)); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - TEST_ASSERT_EQUAL(0, capabilities.reserved); -} - -void test_getInfo() -{ - ARM_STORAGE_INFO info = {}; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - TEST_ASSERT_EQUAL(0, info.security.reserved1); - TEST_ASSERT_EQUAL(0, info.security.reserved2); - TEST_ASSERT((info.program_cycles == ARM_STORAGE_PROGRAM_CYCLES_INFINITE) || (info.program_cycles > 0)); - TEST_ASSERT(info.total_storage > 0); -} - -void initializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - printf("init complete callback\n"); - TEST_ASSERT_EQUAL(1, status); - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_INITIALIZE); - - Harness::validate_callback(); -} - -control_t test_initialize(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 3; - printf("in test_initialize with call_count %u\n", call_count); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - int32_t rc = drv->Initialize(initializationCompleteCallback); - TEST_ASSERT(rc >= ARM_DRIVER_OK); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? (CaseTimeout(200) + CaseRepeatAll) : (control_t) CaseNext; - } - - TEST_ASSERT(rc == 1); - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; -} - -void uninitializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - printf("uninit complete callback\n"); - TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK); - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_UNINITIALIZE); - - Harness::validate_callback(); -} - -control_t test_uninitialize(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 3; - printf("in test_uninitialize with call_count %u\n", call_count); - - /* update the completion callback. */ - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - int32_t rc = drv->Uninitialize(); - if (call_count > 2) { - /* the driver should return some error for repeated un-initialization. */ - TEST_ASSERT(rc < ARM_DRIVER_OK); - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } - TEST_ASSERT(rc >= ARM_DRIVER_OK); - if (rc == ARM_DRIVER_OK) { - /* asynchronous operation */ - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return CaseTimeout(200) + CaseRepeatAll; - } - - /* synchronous operation */ - TEST_ASSERT(rc == 1); - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; -} - -void powerControlCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - printf("power control complete callback\n"); - TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK); - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_POWER_CONTROL); - - Harness::validate_callback(); -} - -control_t test_powerControl(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 2; - printf("in test_powerControl with call_count %u\n", call_count); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Update the completion callback to 'powerControlCompleteCallback'. */ - if (call_count == 2) { - int32_t rc = drv->Initialize(powerControlCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - int32_t rc = drv->PowerControl(ARM_POWER_FULL); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT(rc == 1); - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } -} - -void readDataCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - printf("ReadData complete callback\n"); - TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK); - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_READ_DATA); - - Harness::validate_callback(); -} - -control_t test_readData(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 5; - printf("in test_readData with call_count %u\n", call_count); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Update the completion callback to 'readDataCompleteCallback'. */ - int32_t rc; - if (call_count == 2) { - rc = drv->Initialize(readDataCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - /* Get the first block. */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - - ARM_STORAGE_INFO info; - rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - TEST_ASSERT(info.program_unit <= BUFFER_SIZE); - TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * info.program_unit); - - /* choose an increasing address for each iteration. */ - uint64_t addr = firstBlock.addr + (call_count - 1) * info.program_unit; - size_t sizeofData = info.program_unit; - - rc = drv->ReadData(addr, buffer, sizeofData); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT(rc > 0); - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } -} - -void programDataCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - TEST_ASSERT(status >= 0); - static unsigned programIteration = 0; - - static const uint32_t BYTE_PATTERN = 0xAA551122; - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - const uint64_t addr = firstBlock.addr + programIteration * firstBlock.attributes.erase_unit; - size_t sizeofData = info.program_unit; - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - TEST_ASSERT((operation == ARM_STORAGE_OPERATION_ERASE) || (operation == ARM_STORAGE_OPERATION_PROGRAM_DATA)); - if (operation == ARM_STORAGE_OPERATION_ERASE) { - // printf("programming %u bytes at address %lu with pattern 0x%" PRIx32 "\n", sizeofData, (uint32_t)addr, BYTE_PATTERN); - - size_t sizeofData = info.program_unit; - TEST_ASSERT(BUFFER_SIZE >= sizeofData); - TEST_ASSERT((sizeofData % sizeof(uint32_t)) == 0); - for (size_t index = 0; index < sizeofData / sizeof(uint32_t); index++) { - ((uint32_t *)buffer)[index] = BYTE_PATTERN; - } - - status = drv->ProgramData(addr, buffer, sizeofData); - if (status < ARM_DRIVER_OK) { - return; /* failure. this will trigger a timeout and cause test failure. */ - } - if (status == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return; /* We've successfully pended a programData operation; we'll have another - * invocation of this callback when programming completes. */ - } - } - - /* We come here either because of completion for program-data or as a very - * unlikely fall through from synchronous completion of program-data (above). */ - -#ifndef __CC_ARM - printf("verifying programmed sector at addr %lu\n", (uint32_t)addr); -#endif - verifyBytePattern(addr, sizeofData, BYTE_PATTERN); - ++programIteration; - - Harness::validate_callback(); -} - -control_t test_programDataUsingProgramUnit(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 5; - printf("in test_programDataUsingProgramUnit with call_count %u\n", call_count); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Get the first block. */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - TEST_ASSERT(info.program_unit <= firstBlock.attributes.erase_unit); - TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * firstBlock.attributes.erase_unit); - - /* initialize the buffer to hold the pattern. */ - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - /* Update the completion callback to 'programDataCompleteCallback'. */ - if (call_count == 2) { - int32_t rc = drv->Initialize(programDataCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - /* choose an increasing address for each iteration. */ - uint64_t addr = firstBlock.addr + (call_count - 2) * firstBlock.attributes.erase_unit; - - /* erase the sector at 'addr' */ - printf("erasing sector at addr %lu\n", (uint32_t)addr); - rc = drv->Erase(addr, firstBlock.attributes.erase_unit); - TEST_ASSERT(rc >= 0); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT_EQUAL(firstBlock.attributes.erase_unit, rc); - verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0); - - static const uint32_t BYTE_PATTERN = 0xAA551122; - size_t sizeofData = info.program_unit; - TEST_ASSERT(BUFFER_SIZE >= sizeofData); - TEST_ASSERT((sizeofData % sizeof(uint32_t)) == 0); - for (size_t index = 0; index < sizeofData / sizeof(uint32_t); index++) { - ((uint32_t *)buffer)[index] = BYTE_PATTERN; - } - - /* program the sector at addr */ - // printf("programming %u bytes at address %lu with pattern 0x%" PRIx32 "\n", sizeofData, (uint32_t)addr, BYTE_PATTERN); - rc = drv->ProgramData((uint32_t)addr, buffer, sizeofData); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT(rc > 0); - - printf("verifying programmed sector at addr %lu\n", (uint32_t)addr); - verifyBytePattern(addr, sizeofData, BYTE_PATTERN); - - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } - } -} - -void programDataOptimalCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - TEST_ASSERT(status >= 0); - static unsigned programIteration = 0; - - static const uint8_t BYTE_PATTERN = 0xAA; - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - const uint64_t addr = firstBlock.addr + programIteration * firstBlock.attributes.erase_unit; - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - size_t sizeofData = info.optimal_program_unit; - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - TEST_ASSERT((operation == ARM_STORAGE_OPERATION_ERASE) || (operation == ARM_STORAGE_OPERATION_PROGRAM_DATA)); - if (operation == ARM_STORAGE_OPERATION_ERASE) { -#ifndef __CC_ARM - printf("programming %u bytes at address %lu with pattern 0x%x\n", sizeofData, (uint32_t)addr, BYTE_PATTERN); -#endif - size_t sizeofData = info.optimal_program_unit; - TEST_ASSERT(BUFFER_SIZE >= sizeofData); - memset(buffer, BYTE_PATTERN, sizeofData); - - status = drv->ProgramData(addr, buffer, sizeofData); - if (status < ARM_DRIVER_OK) { - return; /* failure. this will trigger a timeout and cause test failure. */ - } - if (status == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return; /* We've successfully pended a programData operation; we'll have another - * invocation of this callback when programming completes. */ - } - } - - /* We come here either because of completion for program-data or as a very - * unlikely fall through from synchronous completion of program-data (above). */ - -#ifndef __CC_ARM - printf("verifying programmed sector at addr %lu\n", (uint32_t)addr); -#endif - verifyBytePattern(addr, sizeofData, BYTE_PATTERN); - ++programIteration; - - Harness::validate_callback(); -} - -control_t test_programDataUsingOptimalProgramUnit(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 5; - printf("in test_programDataUsingOptimalProgramUnit with call_count %u\n", call_count); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Get the first block. */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - TEST_ASSERT(info.optimal_program_unit <= firstBlock.attributes.erase_unit); - TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * firstBlock.attributes.erase_unit); - - /* initialize the buffer to hold the pattern. */ - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - /* Update the completion callback to 'programDataCompleteCallback'. */ - if (call_count == 2) { - int32_t rc = drv->Initialize(programDataOptimalCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - /* choose an increasing address for each iteration. */ - uint64_t addr = firstBlock.addr + (call_count - 2) * firstBlock.attributes.erase_unit; - - /* erase the sector at 'addr' */ - printf("erasing sector at addr %lu\n", (uint32_t)addr); - rc = drv->Erase(addr, firstBlock.attributes.erase_unit); - TEST_ASSERT(rc >= 0); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT_EQUAL(firstBlock.attributes.erase_unit, rc); - verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0); - - static const uint8_t BYTE_PATTERN = 0xAA; - size_t sizeofData = info.optimal_program_unit; - TEST_ASSERT(BUFFER_SIZE >= sizeofData); - memset(buffer, BYTE_PATTERN, sizeofData); - - /* program the sector at addr */ - printf("programming %u bytes at address %lu with pattern 0x%x\n", sizeofData, (uint32_t)addr, BYTE_PATTERN); - rc = drv->ProgramData((uint32_t)addr, buffer, sizeofData); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT_EQUAL(sizeofData, rc); - - printf("verifying programmed sector at addr %lu\n", (uint32_t)addr); - verifyBytePattern(addr, sizeofData, BYTE_PATTERN); - - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } - } -} - -void test_eraseWithInvalidParameters(void) -{ - int32_t rc; - - rc = drv->Erase(0, 0); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - /* operate before the start of the first block. */ - ARM_STORAGE_BLOCK block; - rc = drv->GetNextBlock(NULL, &block); /* get the first block */ - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&block)); - TEST_ASSERT(block.size > 0); - rc = drv->Erase(block.addr - 1, BUFFER_SIZE); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - /* operate at an address past the end of the last block */ - uint64_t endAddr = block.addr + block.size; - for (; ARM_STORAGE_VALID_BLOCK(&block); drv->GetNextBlock(&block, &block)) { - endAddr = block.addr + block.size; - } - rc = drv->Erase(endAddr + 1, BUFFER_SIZE); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - ARM_STORAGE_INFO info; - rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - drv->GetNextBlock(NULL, &block); /* get the first block */ - TEST_ASSERT(block.size >= block.attributes.erase_unit); - TEST_ASSERT((block.size % block.attributes.erase_unit) == 0); - - rc = drv->Erase(block.addr + 1, block.attributes.erase_unit); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->Erase(block.addr, block.attributes.erase_unit - 1); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->Erase(block.addr, block.attributes.erase_unit + 1); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->Erase(block.addr, block.attributes.erase_unit / 2); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); -} - -template -void eraseCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - static unsigned eraseIteration = 0; -#ifndef __CC_ARM - printf("erase<%u> complete callback: iteration %u\n", ERASE_UNITS_PER_ITERATION, eraseIteration); -#endif - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_ERASE); - - /* test that the actual sector has been erased */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT_EQUAL(ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, status); - - const uint64_t addr = firstBlock.addr + eraseIteration * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit; - ++eraseIteration; - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - //printf("testing erased sector at addr %lu", (uint32_t)addr); - verifyBytePattern(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0); - - Harness::validate_callback(); -} - -template -control_t test_erase(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 5; - printf("in test_erase<%u> with call_count %u\n", ERASE_UNITS_PER_ITERATION, call_count); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Get the first block. */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - if (firstBlock.size < ((call_count - 1) * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit)) { - printf("firstBlock isn't large enough to support instance %u of test_erase<%u>\n", call_count, ERASE_UNITS_PER_ITERATION); - return CaseNext; - } - - /* Update the completion callback to 'eraseCompleteCallback'. */ - if (call_count == 2) { - int32_t rc = drv->Initialize(eraseCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - /* choose an increasing address for each iteration. */ - uint64_t addr = firstBlock.addr + (call_count - 2) * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit; - - printf("erasing %lu bytes at addr %lu\n", (ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit), (uint32_t)addr); - int32_t rc = drv->Erase(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT_EQUAL(ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, rc); - - ARM_STORAGE_INFO info; - rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - /* test that the actual sector has been erased */ - printf("testing erased sector at addr %lu\n", (uint32_t)addr); - verifyBytePattern(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, (uint8_t)0xFF); - - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } -} - -void eraseChipCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ -#ifndef __CC_ARM - printf("eraseChip complete callback\n"); -#endif - TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK); - TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_ERASE_ALL); - - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - uint64_t addr = firstBlock.addr; - - /* test that the flash has been erased */ -#ifndef __CC_ARM - printf("testing erased chip\n"); -#endif - unsigned index = 0; - static const unsigned MAX_VERIFY_ITERATIONS = 5; - while ((index < MAX_VERIFY_ITERATIONS) && (addr < (firstBlock.addr + firstBlock.size))) { - // printf("testing erased chip at addr %lu\n", (uint32_t)addr); - verifyBytePattern(addr, firstBlock.attributes.erase_unit, (uint8_t)0xFF); - - index++; - addr += firstBlock.attributes.erase_unit; - } - - Harness::validate_callback(); -} - -control_t test_eraseAll(const size_t call_count) -{ - static const unsigned REPEAT_INSTANCES = 5; - printf("in test_eraseAll with call_count %u\n", call_count); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - if (!capabilities.erase_all) { - printf("chip erase not supported on this flash\n"); - return CaseNext; - } - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Update the completion callback to 'eraseChipCompleteCallback'. */ - if (call_count == 2) { - int32_t rc = drv->Initialize(eraseChipCompleteCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - } - - /* Get the first block. */ - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - uint64_t addr = firstBlock.addr; - printf("erasing chip\n"); - - int32_t rc = drv->EraseAll(); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200); - } else { - TEST_ASSERT(rc == 1); - - /* test that the flash has been erased */ - unsigned index = 0; - static const unsigned MAX_VERIFY_ITERATIONS = 5; - while ((index < MAX_VERIFY_ITERATIONS) && (addr < (firstBlock.addr + firstBlock.size))) { - //printf("testing erased chip at addr %lu", (uint32_t)addr); - ARM_STORAGE_INFO info; - rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0); - - index++; - addr += firstBlock.attributes.erase_unit; - } - - return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext; - } -} - -void test_programDataWithInvalidParameters(void) -{ - int32_t rc; - - rc = drv->ProgramData(0, NULL, 0); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->ProgramData(0, buffer, 0); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->ProgramData(0, NULL, BUFFER_SIZE); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - /* operate before the start of the first block. */ - ARM_STORAGE_BLOCK block; - rc = drv->GetNextBlock(NULL, &block); /* get the first block */ - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&block)); - TEST_ASSERT(block.size > 0); - rc = drv->ProgramData(block.addr - 1, buffer, BUFFER_SIZE); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - /* operate at an address past the end of the last block */ - uint64_t endAddr = block.addr + block.size; - for (; ARM_STORAGE_VALID_BLOCK(&block); drv->GetNextBlock(&block, &block)) { - endAddr = block.addr + block.size; - } - rc = drv->ProgramData(endAddr + 1, buffer, BUFFER_SIZE); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - - ARM_STORAGE_INFO info; - rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - if (info.program_unit <= 1) { - return; /* if program_unit is 1 (or 0), we can't proceed with any alignment tests */ - } - - drv->GetNextBlock(NULL, &block); /* get the first block */ - - TEST_ASSERT(block.size >= info.program_unit); - - rc = drv->ProgramData(block.addr + 1, buffer, info.program_unit); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->ProgramData(block.addr, buffer, info.program_unit - 1); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->ProgramData(block.addr, buffer, info.program_unit + 1); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); - rc = drv->ProgramData(block.addr, buffer, info.program_unit / 2); - TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc); -} - -template -void programDataWithMultipleProgramUnitsCallback(int32_t status, ARM_STORAGE_OPERATION operation) -{ - TEST_ASSERT(status >= ARM_DRIVER_OK); - - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - size_t rangeNeededForTest = (N_UNITS * info.program_unit); - /* round-up range to the nearest erase_unit */ - rangeNeededForTest = ((rangeNeededForTest + firstBlock.attributes.erase_unit - 1) / firstBlock.attributes.erase_unit) * firstBlock.attributes.erase_unit; - - static const uint32_t BYTE_PATTERN = 0xABCDEF00; - - if (operation == ARM_STORAGE_OPERATION_ERASE) { - TEST_ASSERT_EQUAL(rangeNeededForTest, status); - TEST_ASSERT((N_UNITS * info.program_unit) <= BUFFER_SIZE); - - /* setup byte pattern in buffer */ - if (info.program_unit >= sizeof(BYTE_PATTERN)) { - for (size_t index = 0; index < ((N_UNITS * info.program_unit) / sizeof(BYTE_PATTERN)); index++) { - ((uint32_t *)buffer)[index] = BYTE_PATTERN; - } - } else { - for (size_t index = 0; index < ((N_UNITS * info.program_unit)); index++) { - buffer[index] = ((const uint8_t *)&BYTE_PATTERN)[0]; - } - } - -#ifndef __CC_ARM - printf("Callback: programming %lu bytes at address %lu with pattern 0x%lx\n", (N_UNITS * info.program_unit), (uint32_t)firstBlock.addr, BYTE_PATTERN); -#endif - rc = drv->ProgramData(firstBlock.addr, buffer, (N_UNITS * info.program_unit)); - TEST_ASSERT(rc >= ARM_DRIVER_OK); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return; /* We've successfully pended a programData operation; we'll have another - * invocation of this callback when programming completes. */ - } - - status = rc; - } - - TEST_ASSERT_EQUAL((N_UNITS * info.program_unit), status); - -#ifndef __CC_ARM - printf("Callback: verifying programmed sector at addr %lu\n", (uint32_t)firstBlock.addr); -#endif - if (info.program_unit >= sizeof(BYTE_PATTERN)) { - verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), BYTE_PATTERN); - } else { - verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), ((const uint8_t *)&BYTE_PATTERN)[0]); - } - - Harness::validate_callback(); -} - -template -control_t test_programDataWithMultipleProgramUnits(const size_t call_count) -{ - int32_t rc; - printf("in test_programDataWithMultipleProgramUnits<%u> with call_count %u\n", N_UNITS, call_count); - - if (call_count == 1) { - /* Achieve basic initialization for the driver before anything else. */ - return preambleForBasicInitialization(); - } - - /* Update the completion callback to 'programDataWithMultipleProgramUnitsCallback'. */ - if (call_count == 2) { - rc = drv->Initialize(programDataWithMultipleProgramUnitsCallback); - TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been - * initialized by the previous iteration. */ - - ARM_STORAGE_BLOCK firstBlock; - drv->GetNextBlock(NULL, &firstBlock); /* get first block */ - TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock)); - TEST_ASSERT(firstBlock.size > 0); - - ARM_STORAGE_INFO info; - int32_t rc = drv->GetInfo(&info); - TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc); - - ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities(); - - size_t rangeNeededForTest = (N_UNITS * info.program_unit); - /* round-up range to the nearest erase_unit */ - rangeNeededForTest = ((rangeNeededForTest + firstBlock.attributes.erase_unit - 1) / firstBlock.attributes.erase_unit) * firstBlock.attributes.erase_unit; - if (firstBlock.size < rangeNeededForTest) { - printf("first block not large enough; rangeNeededForTest: %u\n", rangeNeededForTest); - return CaseNext; /* first block isn't large enough for the intended operation */ - } - - if (rangeNeededForTest > BUFFER_SIZE) { - printf("buffer (%u) not large enough; rangeNeededForTest: %u\n", BUFFER_SIZE, rangeNeededForTest); - return CaseNext; - } - - // printf("erasing %u bytes at addr %lu\n", rangeNeededForTest, (uint32_t)firstBlock.addr); - rc = drv->Erase(firstBlock.addr, rangeNeededForTest); - TEST_ASSERT(rc >= 0); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return CaseTimeout(500); - } else { - TEST_ASSERT_EQUAL(rangeNeededForTest, rc); - - /* setup byte pattern in buffer */ - static const uint32_t BYTE_PATTERN = 0xABCDEF00; - if (info.program_unit >= sizeof(BYTE_PATTERN)) { - for (size_t index = 0; index < ((N_UNITS * info.program_unit) / sizeof(BYTE_PATTERN)); index++) { - ((uint32_t *)buffer)[index] = BYTE_PATTERN; - } - } else { - for (size_t index = 0; index < ((N_UNITS * info.program_unit)); index++) { - buffer[index] = ((const uint8_t *)&BYTE_PATTERN)[0]; - } - } - - printf("programming %lu bytes at address %lu with pattern 0x%lx\n", (N_UNITS * info.program_unit), (uint32_t)firstBlock.addr, BYTE_PATTERN); - rc = drv->ProgramData(firstBlock.addr, buffer, (N_UNITS * info.program_unit)); - TEST_ASSERT(rc >= 0); - if (rc == ARM_DRIVER_OK) { - TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops); - return CaseTimeout(500); - } else { - TEST_ASSERT_EQUAL((N_UNITS * info.program_unit), rc); - - printf("verifying programmed sector at addr %lu\n", (uint32_t)firstBlock.addr); - if (info.program_unit >= sizeof(BYTE_PATTERN)) { - verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), BYTE_PATTERN); - } else { - verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), ((const uint8_t *)&BYTE_PATTERN)[0]); - } - - return CaseNext; - } - } - } - - return CaseNext; -} - -#ifndef AVOID_GREENTEA -// Custom setup handler required for proper Greentea support -utest::v1::status_t greentea_setup(const size_t number_of_cases) -{ - GREENTEA_SETUP(60, "default_auto"); - // Call the default reporting function - return greentea_test_setup_handler(number_of_cases); -} -#else -status_t default_setup(const size_t) -{ - return STATUS_CONTINUE; -} -#endif - -// Specify all your test cases here -Case cases[] = { - Case("get version", test_getVersion), - Case("get capabilities", test_getCapabilities), - Case("get info", test_getInfo), - Case("initialize", test_initialize), - Case("uninitialize", test_uninitialize), - Case("power control", test_powerControl), - Case("erase all", test_eraseAll), - Case("read data", test_readData), - Case("erase with invalid parameters", test_eraseWithInvalidParameters), - Case("erase single unit", test_erase<1>), - Case("erase two units", test_erase<2>), - Case("erase four units", test_erase<4>), - Case("erase eight units", test_erase<8>), - Case("program data with invalid parameters", test_programDataWithInvalidParameters), - Case("program data using program_unit", test_programDataUsingProgramUnit), - Case("program data using optimal_program_unit", test_programDataUsingOptimalProgramUnit), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<2>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<7>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<8>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<9>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<31>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<32>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<33>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<127>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<128>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<129>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1023>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1024>), - Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1025>), -}; - -// Declare your test specification with a custom setup handler -#ifndef AVOID_GREENTEA -Specification specification(greentea_setup, cases); -#else -Specification specification(default_setup, cases); -#endif - -int main(int argc, char** argv) -{ - // Run the test specification - Harness::run(specification); -} From 447acd03608ffc1621722bf584373fba329adb22 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 10 Jul 2018 13:36:01 -0500 Subject: [PATCH 43/44] Make release errors very visible --- tools/build_release.py | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/tools/build_release.py b/tools/build_release.py index c2aa85f7f58..92a4d459bb4 100644 --- a/tools/build_release.py +++ b/tools/build_release.py @@ -168,23 +168,16 @@ toolchains = toolchainSet.intersection(set((options.toolchains).split(','))) for toolchain in toolchains: - id = "%s::%s" % (target_name, toolchain) - - profile = extract_profile(parser, options, toolchain) - notify = TerminalNotifier(options.verbose) - - try: - built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], - toolchain, - notify=notify, - jobs=options.jobs, - report=build_report, - properties=build_properties, - build_profile=profile) - - except Exception, e: - print str(e) - status = False + built_mbed_lib = build_mbed_libs( + TARGET_MAP[target_name], + toolchain, + notify=TerminalNotifier(options.verbose), + jobs=options.jobs, + report=build_report, + properties=build_properties, + build_profile=extract_profile(parser, options, toolchain), + ) + # copy targets.json file as part of the release copy(join(dirname(abspath(__file__)), '..', 'targets', 'targets.json'), MBED_LIBRARIES) From 750a2ace64e4082299dee17a53a9afbb4a4e32a8 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 10 Jul 2018 13:42:10 -0500 Subject: [PATCH 44/44] Use paths explicitly in copy from mbed libs --- tools/build_api.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 229063f8f86..6a97a3928ca 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -1010,18 +1010,21 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None, hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path]) # Copy everything into the build directory - to_copy = [FileRef(basename(p), p) for p in sum([ - hal_res.headers, - hal_res.hex_files, - hal_res.bin_files, - hal_res.libraries, - cmsis_res.headers, - cmsis_res.bin_files, - [cmsis_res.linker_script, hal_res.linker_script, MBED_CONFIG_FILE], + to_copy_paths = [ + hal_res.get_file_paths(FileType.HEADER), + hal_res.get_file_paths(FileType.HEX), + hal_res.get_file_paths(FileType.BIN), + hal_res.get_file_paths(FileType.LIB), + cmsis_res.get_file_paths(FileType.HEADER), + cmsis_res.get_file_paths(FileType.BIN), + cmsis_res.get_file_paths(FileType.LD_SCRIPT), + hal_res.get_file_paths(FileType.LD_SCRIPT), + [MBED_CONFIG_FILE], cmsis_objects, hal_objects, separate_objects, - ], [])] + ] + to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])] toolchain.copy_files(to_copy, build_toolchain) if report is not None: