From f6a15fd3c88baf0722d673635cd1034c5e9c29e6 Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Wed, 20 Jul 2016 14:43:09 -0500 Subject: [PATCH 01/17] Refactor export subsystem Makes several broad changes: - removes dead code that dealt with the online build system - replaces export function with a much simpler one that: - does not copy any sources - the zip file hits the disk - the mbed_config.h hits the disk - the project files hit the disk - nothing else hits the disk - exporters use Resource object scanned with a toolchain - progen exporters don't optionally build a project instead they have a build function that may be called afterwards - much of the code passes pylint (have a score of 9 or above): - project.py - project_api.py - export/__init__.py - export/exporters.py - test/export/build_test.py --- tools/build_api.py | 6 +- tools/export/__init__.py | 222 ++++-------------- tools/export/atmelstudio.py | 8 +- tools/export/codered.py | 4 +- tools/export/coide.py | 4 +- tools/export/ds5_5.py | 2 +- tools/export/e2studio.py | 4 +- tools/export/emblocks.py | 4 +- tools/export/exporters.py | 401 +++++++++++++++----------------- tools/export/gccarm.py | 14 +- tools/export/iar.py | 36 +-- tools/export/kds.py | 4 +- tools/export/simplicityv3.py | 6 +- tools/export/sw4stm32.py | 4 +- tools/export/uvision4.py | 41 ++-- tools/export/uvision5.py | 42 ++-- tools/project.py | 230 +++++++++++------- tools/project_api.py | 329 +++++++++++++++++++------- tools/test/export/build_test.py | 256 ++++++++++---------- tools/tests.py | 2 +- tools/toolchains/__init__.py | 1 + 21 files changed, 836 insertions(+), 784 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index 41688a2c87d..cc85bad1362 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -326,7 +326,7 @@ def prepare_toolchain(src_paths, target, toolchain_name, return toolchain def scan_resources(src_paths, toolchain, dependencies_paths=None, - inc_dirs=None): + inc_dirs=None, base_path=None): """ Scan resources using initialized toolcain Positional arguments @@ -338,9 +338,9 @@ def scan_resources(src_paths, toolchain, dependencies_paths=None, """ # Scan src_path - resources = toolchain.scan_resources(src_paths[0]) + resources = toolchain.scan_resources(src_paths[0], base_path=base_path) for path in src_paths[1:]: - resources.add(toolchain.scan_resources(path)) + resources.add(toolchain.scan_resources(path, base_path=base_path)) # Scan dependency paths for include dirs if dependencies_paths is not None: diff --git a/tools/export/__init__.py b/tools/export/__init__.py index 8ae7c69070e..8241c1e338c 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -1,27 +1,28 @@ +"""The generic interface for all exporters. """ -mbed SDK -Copyright (c) 2011-2013 ARM Limited - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" +# mbed SDK +# Copyright (c) 2011-2013 ARM Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import os, tempfile from os.path import join, exists, basename from shutil import copytree, rmtree, copy import yaml -from tools.utils import mkdir -from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio -from tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException, FailedBuildException +from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar +from tools.export import emblocks, coide, kds, simplicityv3, atmelstudio +from tools.export import sw4stm32, e2studio +from tools.export.exporters import OldLibrariesException, FailedBuildException from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP from project_generator_definitions.definitions import ProGenDef @@ -52,162 +53,25 @@ To export this project please import the export version of the mbed library. """ -def online_build_url_resolver(url): - # TODO: Retrieve the path and name of an online library build URL - return {'path':'', 'name':''} - - -def export(project_path, project_name, ide, target, destination='/tmp/', - tempdir=None, pgen_build = False, clean=True, extra_symbols=None, make_zip=True, sources_relative=False, - build_url_resolver=online_build_url_resolver, progen_build=False): - # Convention: we are using capitals for toolchain and target names - if target is not None: - target = target.upper() - - if tempdir is None: - tempdir = tempfile.mkdtemp() - - use_progen = False - supported = True - report = {'success': False, 'errormsg':'', 'skip': False} - - if ide is None or ide == "zip": - # Simple ZIP exporter - try: - ide = "zip" - exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols) - exporter.scan_and_copy_resources(project_path, tempdir, sources_relative) - exporter.generate() - report['success'] = True - except OldLibrariesException, e: - report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS - else: - if ide not in EXPORTERS: - report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide) - report['skip'] = True - else: - Exporter = EXPORTERS[ide] - target = EXPORT_MAP.get(target, target) - try: - if Exporter.PROGEN_ACTIVE: - use_progen = True - except AttributeError: - pass - - if target not in Exporter.TARGETS or Exporter.TOOLCHAIN not in TARGET_MAP[target].supported_toolchains: - supported = False - - if use_progen: - if not ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']): - supported = False - - if supported: - # target checked, export - try: - exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols, sources_relative=sources_relative) - exporter.scan_and_copy_resources(project_path, tempdir, sources_relative) - if progen_build: - #try to build with pgen ide builders - try: - exporter.generate(progen_build=True) - report['success'] = True - except FailedBuildException, f: - report['errormsg'] = "Build Failed" - else: - exporter.generate() - report['success'] = True - except OldLibrariesException, e: - report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS - - else: - report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide) - report['skip'] = True +def mcu_ide_matrix(verbose_html=False): + """Shows target map using prettytable - zip_path = None - if report['success']: - # readme.txt to contain more exported data - exporter_yaml = { - 'project_generator': { - 'active' : False, - } - } - if use_progen: - try: - import pkg_resources - version = pkg_resources.get_distribution('project_generator').version - exporter_yaml['project_generator']['version'] = version - exporter_yaml['project_generator']['active'] = True; - exporter_yaml['project_generator_definitions'] = {} - version = pkg_resources.get_distribution('project_generator_definitions').version - exporter_yaml['project_generator_definitions']['version'] = version - except ImportError: - pass - with open(os.path.join(tempdir, 'exporter.yaml'), 'w') as outfile: - yaml.dump(exporter_yaml, outfile, default_flow_style=False) - # add readme file to every offline export. - open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write(''% (ide)) - # copy .hgignore file to exported direcotry as well. - if exists(os.path.join(exporter.TEMPLATE_DIR,'.hgignore')): - copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'), tempdir) - if make_zip: - zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean) - else: - zip_path = destination - - return zip_path, report - - -############################################################################### -# Generate project folders following the online conventions -############################################################################### -def copy_tree(src, dst, clean=True): - if exists(dst): - if clean: - rmtree(dst) - else: - return - - copytree(src, dst) - - -def setup_user_prj(user_dir, prj_path, lib_paths=None): - """ - Setup a project with the same directory structure of the mbed online IDE + Keyword argumets: + verbose_html - print the matrix in html format """ - mkdir(user_dir) - - # Project Path - copy_tree(prj_path, join(user_dir, "src")) - - # Project Libraries - user_lib = join(user_dir, "lib") - mkdir(user_lib) - - if lib_paths is not None: - for lib_path in lib_paths: - copy_tree(lib_path, join(user_lib, basename(lib_path))) - -def mcu_ide_matrix(verbose_html=False, platform_filter=None): - """ Shows target map using prettytable """ - supported_ides = [] - for key in EXPORTERS.iterkeys(): - supported_ides.append(key) - supported_ides.sort() - from prettytable import PrettyTable, ALL # Only use it in this function so building works without extra modules + supported_ides = sorted(EXPORTERS.keys()) + # Only use it in this function so building works without extra modules + from prettytable import PrettyTable, ALL # All tests status table print - columns = ["Platform"] + supported_ides - pt = PrettyTable(columns) + table_printer = PrettyTable(["Platform"] + supported_ides) # Align table - for col in columns: - pt.align[col] = "c" - pt.align["Platform"] = "l" + for col in supported_ides: + table_printer.align[col] = "c" + table_printer.align["Platform"] = "l" perm_counter = 0 - target_counter = 0 for target in sorted(TARGET_NAMES): - target_counter += 1 - row = [target] # First column is platform name for ide in supported_ides: text = "-" @@ -218,20 +82,24 @@ def mcu_ide_matrix(verbose_html=False, platform_filter=None): text = "x" perm_counter += 1 row.append(text) - pt.add_row(row) + table_printer.add_row(row) - pt.border = True - pt.vrules = ALL - pt.hrules = ALL - # creates a html page suitable for a browser - # result = pt.get_html_string(format=True) if verbose_html else pt.get_string() + table_printer.border = True + table_printer.vrules = ALL + table_printer.hrules = ALL # creates a html page in a shorter format suitable for readme.md - result = pt.get_html_string() if verbose_html else pt.get_string() + if verbose_html: + result = table_printer.get_html_string() + else: + result = table_printer.get_string() result += "\n" result += "Total IDEs: %d\n"% (len(supported_ides)) - if verbose_html: result += "
" - result += "Total platforms: %d\n"% (target_counter) - if verbose_html: result += "
" + if verbose_html: + result += "
" + result += "Total platforms: %d\n"% (len(TARGET_NAMES)) + if verbose_html: + result += "
" result += "Total permutations: %d"% (perm_counter) - if verbose_html: result = result.replace("&", "&") + if verbose_html: + result = result.replace("&", "&") return result diff --git a/tools/export/atmelstudio.py b/tools/export/atmelstudio.py index f85a047b641..531196f56c2 100644 --- a/tools/export/atmelstudio.py +++ b/tools/export/atmelstudio.py @@ -61,7 +61,7 @@ def generate(self): ctx = { 'target': self.target, - 'name': self.program_name, + 'name': self.project_name, 'source_files': source_files, 'source_folders': source_folders, 'object_files': self.resources.objects, @@ -73,7 +73,7 @@ def generate(self): 'solution_uuid': solution_uuid.upper(), 'project_uuid': project_uuid.upper() } - ctx.update(self.progen_flags) + ctx.update(self.flags) target = self.target.lower() - self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.program_name) - self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.program_name) + self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.project_name) + self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.project_name) diff --git a/tools/export/codered.py b/tools/export/codered.py index d7f815a8278..4dfb6047e04 100644 --- a/tools/export/codered.py +++ b/tools/export/codered.py @@ -48,13 +48,13 @@ def generate(self): libraries.append(l[3:]) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, 'libraries': libraries, 'symbols': self.get_symbols() } - ctx.update(self.progen_flags) + ctx.update(self.flags) self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') diff --git a/tools/export/coide.py b/tools/export/coide.py index 77390afdd8a..a9219505418 100644 --- a/tools/export/coide.py +++ b/tools/export/coide.py @@ -98,7 +98,7 @@ def generate(self): self.resources.linker_script = '' ctx = { - 'name': self.program_name, + 'name': self.project_name, 'source_files': source_files, 'header_files': header_files, 'include_paths': self.resources.inc_dirs, @@ -111,4 +111,4 @@ def generate(self): target = self.target.lower() # Project file - self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.program_name) + self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.project_name) diff --git a/tools/export/ds5_5.py b/tools/export/ds5_5.py index 71242efdd73..e5d333757fe 100644 --- a/tools/export/ds5_5.py +++ b/tools/export/ds5_5.py @@ -54,7 +54,7 @@ def generate(self): }) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'scatter_file': self.resources.linker_script, 'object_files': self.resources.objects + self.resources.libraries, diff --git a/tools/export/e2studio.py b/tools/export/e2studio.py index 66cd9dec9b3..8e68e5862ae 100644 --- a/tools/export/e2studio.py +++ b/tools/export/e2studio.py @@ -33,7 +33,7 @@ def generate(self): libraries.append(l[3:]) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, @@ -44,4 +44,4 @@ def generate(self): self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit') - self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.program_name) + self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.project_name) diff --git a/tools/export/emblocks.py b/tools/export/emblocks.py index a5f20d2c9d9..affbbee09a0 100644 --- a/tools/export/emblocks.py +++ b/tools/export/emblocks.py @@ -60,7 +60,7 @@ def generate(self): self.resources.linker_script = '' ctx = { - 'name': self.program_name, + 'name': self.project_name, 'target': self.target, 'toolchain': self.toolchain.name, 'source_files': source_files, @@ -77,4 +77,4 @@ def generate(self): } # EmBlocks intermediate file template - self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name) + self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.project_name) diff --git a/tools/export/exporters.py b/tools/export/exporters.py index d1846372e56..5f20b6c5e2f 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -1,256 +1,227 @@ """Just a template for subclassing""" -import uuid, shutil, os, logging, fnmatch -from os import walk, remove -from os.path import join, dirname, isdir, split -from copy import copy -from jinja2 import Template, FileSystemLoader +import os +import sys +import logging +from os.path import join, dirname, relpath +from itertools import groupby +from jinja2 import FileSystemLoader from jinja2.environment import Environment -from contextlib import closing -from zipfile import ZipFile, ZIP_DEFLATED -from operator import add -from tools.utils import mkdir -from tools.toolchains import TOOLCHAIN_CLASSES from tools.targets import TARGET_MAP - -from project_generator.generate import Generator -from project_generator.project import Project +from project_generator.project import Project, ProjectTemplateInternal from project_generator.settings import ProjectSettings +from project_generator_definitions.definitions import ProGenDef + -from tools.config import Config +class OldLibrariesException(Exception): + """Exception that indicates an export can not complete due to an out of date + library version. + """ + pass -class OldLibrariesException(Exception): pass +class FailedBuildException(Exception): + """Exception that indicates that a build failed""" + pass -class FailedBuildException(Exception) : pass +class TargetNotSupportedException(Exception): + """Indicates that an IDE does not support a particular MCU""" + pass -# Exporter descriptor for TARGETS -# TARGETS as class attribute for backward compatibility (allows: if in Exporter.TARGETS) class ExporterTargetsProperty(object): + """ Exporter descriptor for TARGETS + TARGETS as class attribute for backward compatibility + (allows: if in Exporter.TARGETS) + """ def __init__(self, func): self.func = func def __get__(self, inst, cls): return self.func(cls) class Exporter(object): + """Exporter base class + + This class is meant to be extended by individual exporters, and provides a + few helper methods for implementing an exporter with either jinja2 or + progen. + """ TEMPLATE_DIR = dirname(__file__) DOT_IN_RELATIVE_PATH = False - - def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None, sources_relative=True): - self.inputDir = inputDir + NAME = None + TARGETS = None + TOOLCHAIN = None + + def __init__(self, target, export_dir, project_name, toolchain, + extra_symbols=None, resources=None): + """Initialize an instance of class exporter + Positional arguments: + target - the target mcu/board for this project + export_dir - the directory of the exported project files + project_name - the name of the project + toolchain - an instance of class toolchain + extra_symbols - a list of extra macros for the toolchain + + Keyword arguments: + resources - an instance of class Resources + """ + self.export_dir = export_dir self.target = target - self.program_name = program_name - self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target]) - self.build_url_resolver = build_url_resolver + self.project_name = project_name + self.toolchain = toolchain jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__))) self.jinja_environment = Environment(loader=jinja_loader) - self.extra_symbols = extra_symbols if extra_symbols else [] - self.config_macros = [] - self.sources_relative = sources_relative - self.config_header = None + self.resources = resources + self.symbols = self.toolchain.get_symbols() + self.generated_files = [] + self.project = None + + # Add extra symbols and config file symbols to the Exporter's list of + # symbols. + config_macros = self.toolchain.config.get_config_data_macros() + if config_macros: + self.symbols.extend(config_macros) + if extra_symbols: + self.symbols.extend(extra_symbols) def get_toolchain(self): + """A helper getter function that we should probably eliminate""" return self.TOOLCHAIN @property def flags(self): - return self.toolchain.flags - - @property - def progen_flags(self): - if not hasattr(self, "_progen_flag_cache") : - self._progen_flag_cache = dict([(key + "_flags", value) for key,value in self.flags.iteritems()]) - asm_defines = ["-D"+symbol for symbol in self.toolchain.get_symbols(True)] - c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()] - self._progen_flag_cache['asm_flags'] += asm_defines - self._progen_flag_cache['c_flags'] += c_defines - self._progen_flag_cache['cxx_flags'] += c_defines - if self.config_header: - self._progen_flag_cache['c_flags'] += self.toolchain.get_config_option(self.config_header) - self._progen_flag_cache['cxx_flags'] += self.toolchain.get_config_option(self.config_header) - return self._progen_flag_cache - - def __scan_and_copy(self, src_path, trg_path): - resources = self.toolchain.scan_resources(src_path) - - for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources', - 'objects', 'libraries', 'linker_script', - 'lib_builds', 'lib_refs', 'hex_files', 'bin_files']: - r = getattr(resources, r_type) - if r: - self.toolchain.copy_files(r, trg_path, resources=resources) - return resources - - @staticmethod - def _get_dir_grouped_files(files): - """ Get grouped files based on the dirname """ - files_grouped = {} - for file in files: - rel_path = os.path.relpath(file, os.getcwd()) - dir_path = os.path.dirname(rel_path) - if dir_path == '': - # all files within the current dir go into Source_Files - dir_path = 'Source_Files' - if not dir_path in files_grouped.keys(): - files_grouped[dir_path] = [] - files_grouped[dir_path].append(file) - return files_grouped + """Returns a dictionary of toolchain flags. + Keys of the dictionary are: + cxx_flags - c++ flags + c_flags - c flags + ld_flags - linker flags + asm_flags - assembler flags + common_flags - common options + """ + config_header = self.toolchain.get_config_header() + config_header = relpath(config_header, + self.resources.file_basepath[config_header]) + flags = {key + "_flags": value for key, value + in self.toolchain.flags.iteritems()} + asm_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols(True)] + c_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols()] + flags['asm_flags'] += asm_defines + flags['c_flags'] += c_defines + flags['cxx_flags'] += c_defines + if config_header: + flags['c_flags'] += self.toolchain.get_config_option(config_header) + flags['cxx_flags'] += self.toolchain.get_config_option( + config_header) + return flags + + def get_source_paths(self): + """Returns a list of the directories where source files are contained""" + source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', + 'objects', 'libraries'] + source_files = [] + for key in source_keys: + source_files.extend(getattr(self.resources, key)) + return list(set([os.path.dirname(src) for src in source_files])) def progen_get_project_data(self): """ Get ProGen project data """ # provide default data, some tools don't require any additional # tool specific settings - code_files = [] - for r_type in ['c_sources', 'cpp_sources', 's_sources']: - for file in getattr(self.resources, r_type): - code_files.append(file) - - sources_files = code_files + self.resources.hex_files + self.resources.objects + \ - self.resources.libraries - sources_grouped = Exporter._get_dir_grouped_files(sources_files) - headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers) - project_data = { - 'common': { - 'sources': sources_grouped, - 'includes': headers_grouped, - 'build_dir':'.build', - 'target': [TARGET_MAP[self.target].progen['target']], - 'macros': self.get_symbols(), - 'export_dir': [self.inputDir], - 'linker_file': [self.resources.linker_script], - } - } + def make_key(src): + """turn a source file into it's group name""" + key = os.path.basename(os.path.dirname(src)) + if not key: + key = os.path.basename(os.path.normpath(self.export_dir)) + return key + + def grouped(sources): + """Group the source files by their encompassing directory""" + data = sorted(sources, key=make_key) + return {k: list(g) for k, g in groupby(data, make_key)} + + if self.toolchain.get_config_header(): + config_header = self.toolchain.get_config_header() + config_header = relpath(config_header, + self.resources.file_basepath[config_header]) + else: + config_header = None + + # we want to add this to our include dirs + config_dir = os.path.dirname(config_header) if config_header else [] + + project_data = ProjectTemplateInternal._get_project_template() + + project_data['target'] = TARGET_MAP[self.target].progen['target'] + project_data['source_paths'] = self.get_source_paths() + project_data['include_paths'] = self.resources.inc_dirs + [config_dir] + project_data['include_files'] = grouped(self.resources.headers) + project_data['source_files_s'] = grouped(self.resources.s_sources) + project_data['source_files_c'] = grouped(self.resources.c_sources) + project_data['source_files_cpp'] = grouped(self.resources.cpp_sources) + project_data['source_files_obj'] = grouped(self.resources.objects) + project_data['source_files_lib'] = grouped(self.resources.libraries) + project_data['output_dir']['path'] = self.export_dir + project_data['linker_file'] = self.resources.linker_script + project_data['macros'] = self.symbols + project_data['build_dir'] = 'build' + project_data['template'] = None + project_data['name'] = self.project_name + project_data['output_type'] = 'exe' + project_data['debugger'] = None return project_data - def progen_gen_file(self, tool_name, project_data, progen_build=False): - """ Generate project using ProGen Project API """ + def progen_gen_file(self, project_data): + """ Generate project using ProGen Project API + Positional arguments: + tool_name - the tool for which to generate project files + project_data - a dict whose base key, values are specified in + progen_get_project_data, the items will have been + modified by Exporter subclasses + + Keyword arguments: + progen_build - A boolean that determines if the tool will build the + project + """ + if not self.check_supported(self.NAME): + raise TargetNotSupportedException("Target not supported") settings = ProjectSettings() - project = Project(self.program_name, [project_data], settings) - # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen - # thinks it is not dict but a file, and adds them to workspace. - project.project['common']['include_paths'] = self.resources.inc_dirs - project.generate(tool_name, copied=not self.sources_relative) - if progen_build: - print("Project exported, building...") - result = project.build(tool_name) - if result == -1: - raise FailedBuildException("Build Failed") - - def __scan_all(self, path): - resources = [] - - for root, dirs, files in walk(path): - for d in copy(dirs): - if d == '.' or d == '..': - dirs.remove(d) - - for file in files: - file_path = join(root, file) - resources.append(file_path) - - return resources - - def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): - # Copy only the file for the required target and toolchain - lib_builds = [] - # Create the configuration object - if isinstance(prj_paths, basestring): - prj_paths = [prj_paths] - config = Config(self.target, prj_paths) - for src in ['lib', 'src']: - resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path) - for path in prj_paths[1:]: - resources.add(self.__scan_and_copy(join(path, src), trg_path)) - - lib_builds.extend(resources.lib_builds) - - # The repository files - #for repo_dir in resources.repo_dirs: - # repo_files = self.__scan_all(repo_dir) - # for path in prj_paths: - # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) - - # The libraries builds - for bld in lib_builds: - build_url = open(bld).read().strip() - lib_data = self.build_url_resolver(build_url) - lib_path = lib_data['path'].rstrip('\\/') - self.__scan_and_copy(lib_path, join(trg_path, lib_data['name'])) - - # Create .hg dir in mbed build dir so it's ignored when versioning - hgdir = join(trg_path, lib_data['name'], '.hg') - mkdir(hgdir) - fhandle = file(join(hgdir, 'keep.me'), 'a') - fhandle.close() - - if not relative: - # Final scan of the actual exported resources - resources = self.toolchain.scan_resources(trg_path) - resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) - else: - # use the prj_dir (source, not destination) - resources = self.toolchain.scan_resources(prj_paths[0]) - for path in prj_paths[1:]: - resources.add(toolchain.scan_resources(path)) - - # Loads the resources into the config system which might expand/modify resources based on config data - self.resources = config.load_resources(resources) - - if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED : - # Add the configuration file to the target directory - self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME - config.get_config_data_header(join(trg_path, self.config_header)) - self.config_macros = [] - self.resources.inc_dirs.append(".") - else: - # And add the configuration macros to the toolchain - self.config_macros = config.get_config_data_macros() + self.project = Project(self.project_name, [project_data], settings) + self.project.project['export'] = project_data.copy() + self.project.generate(self.NAME, copied=False, fill=False) + for middle in self.project.generated_files.values(): + for field, thing in middle.iteritems(): + if field == "files": + for filename in thing.values(): + self.generated_files.append(filename) + + def progen_build(self): + """Build a project that was already generated by progen""" + print("Project {} exported, building for {}...".format( + self.project_name, self.NAME)) + sys.stdout.flush() + result = self.project.build(self.NAME) + if result == -1: + raise FailedBuildException("Build Failed") + + def check_supported(self, ide): + """Indicated if this combination of IDE and MCU is supported""" + if self.target not in self.TARGETS or \ + self.TOOLCHAIN not in TARGET_MAP[self.target].supported_toolchains: + return False + if not ProGenDef(ide).is_supported( + TARGET_MAP[self.target].progen['target']): + return False + return True def gen_file(self, template_file, data, target_file): - template_path = join(Exporter.TEMPLATE_DIR, template_file) - template = self.jinja_environment.get_template(template_file) + """Generates a project file from a template using jinja""" + jinja_loader = FileSystemLoader( + os.path.dirname(os.path.abspath(__file__))) + jinja_environment = Environment(loader=jinja_loader) + + template = jinja_environment.get_template(template_file) target_text = template.render(data) - target_path = join(self.inputDir, target_file) - logging.debug("Generating: %s" % target_path) + target_path = join(self.export_dir, target_file) + logging.debug("Generating: %s", target_path) open(target_path, "w").write(target_text) - - def get_symbols(self, add_extra_symbols=True): - """ This function returns symbols which must be exported. - Please add / overwrite symbols in each exporter separately - """ - - # We have extra symbols from e.g. libraries, we want to have them also added to export - extra = self.extra_symbols if add_extra_symbols else [] - if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED: - # If the config header is supported, we will preinclude it and do not not - # need the macros as preprocessor flags - return extra - - symbols = self.toolchain.get_symbols(True) + self.toolchain.get_symbols() \ - + self.config_macros + extra - return symbols - -def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True): - uid = str(uuid.uuid4()) - zipfilename = '%s.zip'%uid - - logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename))) - # make zip - def zipdir(basedir, archivename): - assert isdir(basedir) - fakeroot = program_name + '/' - with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z: - for root, _, files in os.walk(basedir): - # NOTE: ignore empty directories - for fn in files: - absfn = join(root, fn) - zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):] - z.write(absfn, zfn) - - zipdir(tempdirectory, join(destination, zipfilename)) - - if clean: - shutil.rmtree(tempdirectory) - - return join(destination, zipfilename) + self.generated_files += [target_path] diff --git a/tools/export/gccarm.py b/tools/export/gccarm.py index 3cdb0477ff1..a1b12ebc019 100644 --- a/tools/export/gccarm.py +++ b/tools/export/gccarm.py @@ -135,8 +135,6 @@ class GccArm(Exporter): def generate(self): # "make" wants Unix paths - if self.sources_relative: - self.resources.relative_to(self.prj_paths[0]) self.resources.win_to_unix() to_be_compiled = [] @@ -152,19 +150,19 @@ def generate(self): l, _ = splitext(basename(lib)) libraries.append(l[3:]) - build_dir = abspath(join(self.inputDir, ".build")) + build_dir = abspath(join(self.export_dir, ".build")) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'to_be_compiled': to_be_compiled, 'object_files': self.resources.objects, 'include_paths': self.resources.inc_dirs, 'library_paths': self.resources.lib_dirs, 'linker_script': self.resources.linker_script, 'libraries': libraries, - 'symbols': self.get_symbols(), + 'symbols': self.toolchain.get_symbols(), 'cpu_flags': self.toolchain.cpu, - 'vpath': [relpath(s, build_dir) for s in self.prj_paths] if self.sources_relative else [".."], - 'hex_files': self.resources.hex_files + 'hex_files': self.resources.hex_files, + 'vpath': [".."] } for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']: @@ -174,7 +172,7 @@ def generate(self): ctx[key] = ctx['vpath'][0] + "/" + ctx[key] if "../." not in ctx["include_paths"]: ctx["include_paths"] += ['../.'] - ctx.update(self.progen_flags) + ctx.update(self.flags) self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile') def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): diff --git a/tools/export/iar.py b/tools/export/iar.py index 5c52a3d1ec4..053861721f3 100644 --- a/tools/export/iar.py +++ b/tools/export/iar.py @@ -28,7 +28,7 @@ class IAREmbeddedWorkbench(Exporter): Exporter class for IAR Systems. This class uses project generator. """ # These 2 are currently for exporters backward compatiblity - NAME = 'IAR' + NAME = 'iar_arm' TOOLCHAIN = 'IAR' # PROGEN_ACTIVE contains information for exporter scripts that this is using progen PROGEN_ACTIVE = True @@ -50,39 +50,23 @@ def TARGETS(cls): continue return cls._targets_supported - def generate(self, progen_build=False): + def generate(self): """ Generates the project files """ project_data = self.progen_get_project_data() - tool_specific = {} - # Expand tool specific settings by IAR specific settings which are required try: if TARGET_MAP[self.target].progen['iar']['template']: - tool_specific['iar'] = TARGET_MAP[self.target].progen['iar'] + project_data['template']=TARGET_MAP[self.target].progen['iar']['template'] except KeyError: # use default template # by the mbed projects - tool_specific['iar'] = { - # We currently don't use misc, template sets those for us - # 'misc': { - # 'cxx_flags': ['--no_rtti', '--no_exceptions'], - # 'c_flags': ['--diag_suppress=Pa050,Pa084,Pa093,Pa082'], - # 'ld_flags': ['--skip_dynamic_initialization'], - # }, - 'template': [os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')], - } - - project_data['tool_specific'] = {} - project_data['tool_specific'].setdefault("iar", {}) - project_data['tool_specific']['iar'].setdefault("misc", {}) - project_data['tool_specific']['iar'].update(tool_specific['iar']) - project_data['tool_specific']['iar']['misc'].update(self.progen_flags) + project_data['template']=[os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')] + + project_data['misc'] = self.flags # VLA is enabled via template IccAllowVLA - project_data['tool_specific']['iar']['misc']['c_flags'].remove("--vla") - project_data['common']['build_dir'] = os.path.join(project_data['common']['build_dir'], 'iar_arm') - if progen_build: - self.progen_gen_file('iar_arm', project_data, True) - else: - self.progen_gen_file('iar_arm', project_data) + project_data['misc']['c_flags'].remove("--vla") + project_data['misc']['asm_flags'] = list(set(project_data['misc']['asm_flags'])) + project_data['build_dir'] = os.path.join(project_data['build_dir'], 'iar_arm') + self.progen_gen_file(project_data) # Currently not used, we should reuse folder_name to create virtual folders class IarFolder(): diff --git a/tools/export/kds.py b/tools/export/kds.py index 13c038debaf..59324dd3e2f 100644 --- a/tools/export/kds.py +++ b/tools/export/kds.py @@ -35,7 +35,7 @@ def generate(self): libraries.append(l[3:]) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, @@ -44,4 +44,4 @@ def generate(self): } self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') - self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.program_name) + self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.project_name) diff --git a/tools/export/simplicityv3.py b/tools/export/simplicityv3.py index 3254152127b..89b2edf3ef0 100644 --- a/tools/export/simplicityv3.py +++ b/tools/export/simplicityv3.py @@ -157,7 +157,7 @@ def generate(self): self.check_and_add_path(split(self.resources.linker_script)[0]) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'main_files': main_files, 'recursiveFolders': self.orderedPaths, 'object_files': self.resources.objects, @@ -171,7 +171,7 @@ def generate(self): 'kit': self.KITS[self.target], 'loopcount': 0 } - ctx.update(self.progen_flags) + ctx.update(self.flags) ## Strip main folder from include paths because ssproj is not capable of handling it if '.' in ctx['include_paths']: @@ -191,4 +191,4 @@ def generate(self): print("\t" + bpath.name + "\n") ''' - self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.program_name) + self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.project_name) diff --git a/tools/export/sw4stm32.py b/tools/export/sw4stm32.py index bacc02260cc..7d0ea356fb0 100644 --- a/tools/export/sw4stm32.py +++ b/tools/export/sw4stm32.py @@ -65,7 +65,7 @@ class Sw4STM32(Exporter): TARGETS = BOARDS.keys() def __gen_dir(self, dirname): - settings = join(self.inputDir, dirname) + settings = join(self.export_dir, dirname) mkdir(settings) def __generate_uid(self): @@ -78,7 +78,7 @@ def generate(self): libraries.append(l[3:]) ctx = { - 'name': self.program_name, + 'name': self.project_name, 'include_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, 'library_paths': self.resources.lib_dirs, diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py index 0a76c89f824..a1636e4b0d2 100644 --- a/tools/export/uvision4.py +++ b/tools/export/uvision4.py @@ -28,7 +28,7 @@ class Uvision4(Exporter): Exporter class for uvision. This class uses project generator. """ # These 2 are currently for exporters backward compatiblity - NAME = 'uVision4' + NAME = 'uvision' TOOLCHAIN = 'ARM' # PROGEN_ACTIVE contains information for exporter scripts that this is using progen PROGEN_ACTIVE = True @@ -53,7 +53,7 @@ def TARGETS(cls): def get_toolchain(self): return TARGET_MAP[self.target].default_toolchain - def generate(self, progen_build=False): + def generate(self): """ Generates the project files """ project_data = self.progen_get_project_data() tool_specific = {} @@ -72,25 +72,32 @@ def generate(self, progen_build=False): project_data['tool_specific'].update(tool_specific) # get flags from toolchain and apply - project_data['tool_specific']['uvision']['misc'] = {} + project_data['misc'] = {} # need to make this a string for progen. Only adds preprocessor when "macros" set asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join( - list(set(self.progen_flags['asm_flags']))) - project_data['tool_specific']['uvision']['misc']['asm_flags'] = [asm_flag_string] + list(set(self.flags['asm_flags']))) + # asm flags only, common are not valid within uvision project, they are armcc specific + project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab - project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set( - ['-D__ASSERT_MSG'] + self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags[ - 'cxx_flags'])) + project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags'])) # not compatible with c99 flag set in the template - project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99") + project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files - project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--cpp") + project_data['misc']['c_flags'].remove("--cpp") # we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it - project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--no_vla") - project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.progen_flags['ld_flags'] + project_data['misc']['c_flags'].remove("--no_vla") + project_data['misc']['ld_flags'] = self.flags['ld_flags'] - project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision4' - if progen_build: - self.progen_gen_file('uvision', project_data, True) - else: - self.progen_gen_file('uvision', project_data) + i = 0 + for macro in self.symbols: + # armasm does not like floating numbers in macros, timestamp to int + if macro.startswith('MBED_BUILD_TIMESTAMP'): + timestamp = macro[len('MBED_BUILD_TIMESTAMP='):] + project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp))) + # armasm does not even accept MACRO=string + if macro.startswith('MBED_USERNAME'): + project_data['macros'].pop(i) + i += 1 + project_data['macros'].append('__ASSERT_MSG') + project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision4' + self.progen_gen_file(project_data) diff --git a/tools/export/uvision5.py b/tools/export/uvision5.py index f985b3ff886..28d12e74df5 100644 --- a/tools/export/uvision5.py +++ b/tools/export/uvision5.py @@ -28,7 +28,7 @@ class Uvision5(Exporter): Exporter class for uvision5. This class uses project generator. """ # These 2 are currently for exporters backward compatiblity - NAME = 'uVision5' + NAME = 'uvision5' TOOLCHAIN = 'ARM' # PROGEN_ACTIVE contains information for exporter scripts that this is using progen PROGEN_ACTIVE = True @@ -53,7 +53,7 @@ def TARGETS(cls): def get_toolchain(self): return TARGET_MAP[self.target].default_toolchain - def generate(self, progen_build=False): + def generate(self): """ Generates the project files """ project_data = self.progen_get_project_data() tool_specific = {} @@ -68,27 +68,35 @@ def generate(self, progen_build=False): 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')], } + #project_data['template'] = [tool_specific['uvision5']['template']] project_data['tool_specific'] = {} project_data['tool_specific'].update(tool_specific) # get flags from toolchain and apply - project_data['tool_specific']['uvision5']['misc'] = {} - - # need to make this a string got progen. Only adds preprocessor when "macros" set - asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.progen_flags['asm_flags']))) - project_data['tool_specific']['uvision5']['misc']['asm_flags'] = [asm_flag_string] + project_data['misc'] = {} + asm_flag_string = '--cpreproc --cpreproc_opts=-D__ASSERT_MSG,' + ",".join(list(set(self.flags['asm_flags']))) + # asm flags only, common are not valid within uvision project, they are armcc specific + project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab - project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(['-D__ASSERT_MSG']+self.progen_flags['common_flags'] + self.progen_flags['c_flags'] + self.progen_flags['cxx_flags'])) + project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags'])) # not compatible with c99 flag set in the template - project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99") + project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files - project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--cpp") + project_data['misc']['c_flags'].remove("--cpp") # we want no-vla for only cxx, but it's also applied for C in IDE, thus we remove it - project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--no_vla") - project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.progen_flags['ld_flags'] + project_data['misc']['c_flags'].remove("--no_vla") + project_data['misc']['ld_flags'] = self.flags['ld_flags'] - project_data['common']['build_dir'] = project_data['common']['build_dir'] + '\\' + 'uvision5' - if progen_build: - self.progen_gen_file('uvision5', project_data, True) - else: - self.progen_gen_file('uvision5', project_data) + i = 0 + for macro in self.symbols: + # armasm does not like floating numbers in macros, timestamp to int + if macro.startswith('MBED_BUILD_TIMESTAMP'): + timestamp = macro[len('MBED_BUILD_TIMESTAMP='):] + project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp))) + # armasm does not even accept MACRO=string + if macro.startswith('MBED_USERNAME'): + project_data['macros'].pop(i) + i += 1 + project_data['macros'].append('__ASSERT_MSG') + project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision5' + self.progen_gen_file(project_data) diff --git a/tools/project.py b/tools/project.py index 873e7fbe50c..004164667b6 100644 --- a/tools/project.py +++ b/tools/project.py @@ -1,3 +1,6 @@ +""" The CLI entry point for exporting projects from the mbed tools to any of the +supported IDEs or project structures. +""" import sys from os.path import join, abspath, dirname, exists, basename ROOT = abspath(join(dirname(__file__), "..")) @@ -5,21 +8,87 @@ from shutil import move, rmtree from argparse import ArgumentParser -from os import path +from os.path import normpath -from tools.paths import EXPORT_DIR -from tools.export import export, EXPORTERS, mcu_ide_matrix +from tools.paths import EXPORT_DIR, MBED_BASE, MBED_LIBRARIES +from tools.export import EXPORTERS, mcu_ide_matrix from tools.tests import TESTS, TEST_MAP -from tools.tests import test_known, test_name_known +from tools.tests import test_known, test_name_known, Test from tools.targets import TARGET_NAMES -from tools.libraries import LIBRARIES -from utils import argparse_filestring_type, argparse_many, args_error -from utils import argparse_force_lowercase_type, argparse_force_uppercase_type, argparse_dir_not_parent -from project_api import setup_project, perform_export, print_results, get_lib_symbols - - - -if __name__ == '__main__': +from tools.utils import argparse_filestring_type, argparse_many, args_error +from tools.utils import argparse_force_lowercase_type +from tools.utils import argparse_force_uppercase_type +from tools.project_api import export_project + + +def setup_project(ide, target, program=None, source_dir=None, build=None): + """Generate a name, if not provided, and find dependencies + + Positional arguments: + ide - IDE or project structure that will soon be exported to + target - MCU that the project will build for + + Keyword arguments: + program - the index of a test program + source_dir - the directory, or directories that contain all of the sources + build - a directory that will contain the result of the export + """ + # Some libraries have extra macros (called by exporter symbols) to we need + # to pass them to maintain compilation macros integrity between compiled + # library and header files we might use with it + if source_dir: + # --source is used to generate IDE files to toolchain directly + # in the source tree and doesn't generate zip file + project_dir = source_dir[0] + if program: + project_name = TESTS[program] + else: + project_name = basename(normpath(source_dir[0])) + src_paths = source_dir + lib_paths = None + else: + test = Test(program) + if not build: + # Substitute the mbed library builds with their sources + if MBED_LIBRARIES in test.dependencies: + test.dependencies.remove(MBED_LIBRARIES) + test.dependencies.append(MBED_BASE) + + src_paths = [test.source_dir] + lib_paths = test.dependencies + project_name = "_".join([test.id, ide, target]) + project_dir = join(EXPORT_DIR, project_name) + + return project_dir, project_name, src_paths, lib_paths + + +def export(target, ide, build=None, src=None, macros=None, project_id=None, + clean=False, zip_proj=False): + """Do an export of a project. + + Positional arguments: + target - MCU that the project will compile for + ide - the IDE or project structure to export to + + Keyword arguments: + build - to use the compiled mbed libraries or not + src - directory or directories that contain the source to export + macros - extra macros to add to the project + project_id - the name of the project + clean - start from a clean state before exporting + zip_proj - create a zip file or not + """ + project_dir, name, src, lib = setup_project(ide, target, program=project_id, + source_dir=src, build=build) + + zip_name = name+".zip" if zip_proj else None + + export_project(src, project_dir, target, ide, clean=clean, name=name, + macros=macros, libraries_paths=lib, zip_proj=zip_name) + + +def main(): + """Entry point""" # Parse Options parser = ArgumentParser() @@ -29,32 +98,36 @@ toolchainlist.sort() parser.add_argument("-m", "--mcu", - metavar="MCU", - default='LPC1768', - type=argparse_many(argparse_force_uppercase_type(targetnames, "MCU")), - help="generate project for the given MCU (%s)"% ', '.join(targetnames)) + metavar="MCU", + default='LPC1768', + type=argparse_many( + argparse_force_uppercase_type(targetnames, "MCU")), + help="generate project for the given MCU ({})".format( + ', '.join(targetnames))) parser.add_argument("-i", - dest="ide", - default='uvision', - type=argparse_force_lowercase_type(toolchainlist, "toolchain"), - help="The target IDE: %s"% str(toolchainlist)) + dest="ide", + default='uvision', + type=argparse_force_lowercase_type( + toolchainlist, "toolchain"), + help="The target IDE: %s"% str(toolchainlist)) parser.add_argument("-c", "--clean", - action="store_true", - default=False, - help="clean the export directory") + action="store_true", + default=False, + help="clean the export directory") group = parser.add_mutually_exclusive_group(required=False) - group.add_argument("-p", - type=test_known, - dest="program", - help="The index of the desired test program: [0-%d]"% (len(TESTS)-1)) + group.add_argument( + "-p", + type=test_known, + dest="program", + help="The index of the desired test program: [0-%s]"% (len(TESTS)-1)) group.add_argument("-n", - type=test_name_known, - dest="program", - help="The name of the desired test program") + type=test_name_known, + dest="program", + help="The name of the desired test program") parser.add_argument("-b", dest="build", @@ -63,40 +136,40 @@ help="use the mbed library build, instead of the sources") group.add_argument("-L", "--list-tests", - action="store_true", - dest="list_tests", - default=False, - help="list available programs in order and exit") + action="store_true", + dest="list_tests", + default=False, + help="list available programs in order and exit") group.add_argument("-S", "--list-matrix", - action="store_true", - dest="supported_ides", - default=False, - help="displays supported matrix of MCUs and IDEs") + action="store_true", + dest="supported_ides", + default=False, + help="displays supported matrix of MCUs and IDEs") parser.add_argument("-E", - action="store_true", - dest="supported_ides_html", - default=False, - help="writes tools/export/README.md") + action="store_true", + dest="supported_ides_html", + default=False, + help="writes tools/export/README.md") parser.add_argument("--source", - action="append", - type=argparse_filestring_type, - dest="source_dir", - default=[], - help="The source (input) directory") + action="append", + type=argparse_filestring_type, + dest="source_dir", + default=[], + help="The source (input) directory") parser.add_argument("-D", - action="append", - dest="macros", - help="Add a macro definition") + action="append", + dest="macros", + help="Add a macro definition") options = parser.parse_args() # Print available tests in order and exit if options.list_tests is True: - print '\n'.join(map(str, sorted(TEST_MAP.values()))) + print '\n'.join([str(test) for test in sorted(TEST_MAP.values())]) sys.exit() # Only prints matrix of supported IDEs @@ -108,13 +181,13 @@ if options.supported_ides_html: html = mcu_ide_matrix(verbose_html=True) try: - with open("./export/README.md","w") as f: - f.write("Exporter IDE/Platform Support\n") - f.write("-----------------------------------\n") - f.write("\n") - f.write(html) - except IOError as e: - print "I/O error({0}): {1}".format(e.errno, e.strerror) + with open("./export/README.md", "w") as readme: + readme.write("Exporter IDE/Platform Support\n") + readme.write("-----------------------------------\n") + readme.write("\n") + readme.write(html) + except IOError as exc: + print "I/O error({0}): {1}".format(exc.errno, exc.strerror) except: print "Unexpected error:", sys.exc_info()[0] raise @@ -125,12 +198,9 @@ if exists(EXPORT_DIR): rmtree(EXPORT_DIR) - # Export results - successes = [] - failures = [] + for mcu in options.mcu: + zip_proj = not bool(options.source_dir) - # source_dir = use relative paths, otherwise sources are copied - sources_relative = True if options.source_dir else False # Target if not options.mcu: args_error(parser, "argument -m/--mcu is required") @@ -141,32 +211,12 @@ if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") - + # Export to selected toolchain for mcu in options.mcu: - # Program Number or name - p, src, ide = options.program, options.source_dir, options.ide - try: - project_dir, project_name, project_temp = setup_project(mcu, ide, p, src, options.build) - zip = not bool(src) # create zip when no src_dir provided - clean = not bool(src) # don't clean when source is provided, use acrual source tree for IDE files - - # Export to selected toolchain - lib_symbols = get_lib_symbols(options.macros, src, p) - tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir[0], project_temp, clean=clean, make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative) - except OSError as e: - if e.errno == 2: - report = dict(success=False, errormsg="Library path '%s' does not exist. Ensure that the library is built." % (e.filename)) - else: - report = dict(success=False, errormsg="An OS error occured: errno #{}".format(e.errno)) - if report['success']: - if not zip: - zip_path = join(project_temp, project_name) - else: - zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (project_name, ide, mcu)) - move(tmp_path, zip_path) - successes.append("%s::%s\t%s"% (mcu, ide, zip_path)) - else: - failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg'])) + export(mcu, options.ide, build=options.build, src=options.source_dir, + macros=options.macros, project_id=options.program, + clean=options.clean, zip_proj=zip_proj) + - # Prints export results - print_results(successes, failures) +if __name__ == "__main__": + main() diff --git a/tools/project_api.py b/tools/project_api.py index f0bfb04795a..2bffd5ebe04 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -1,110 +1,263 @@ +""" The new way of doing exports """ import sys -from os.path import join, abspath, dirname, exists, basename +from os.path import join, abspath, dirname, exists +from os.path import basename, relpath, normpath +from os import makedirs ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) +import copy +from shutil import rmtree +import zipfile -from tools.paths import EXPORT_WORKSPACE, EXPORT_TMP -from tools.paths import MBED_BASE, MBED_LIBRARIES -from tools.export import export, setup_user_prj -from tools.utils import mkdir -from tools.tests import Test, TEST_MAP, TESTS -from tools.libraries import LIBRARIES +from tools.build_api import prepare_toolchain +from tools.build_api import scan_resources +from tools.export import EXPORTERS -try: - import tools.private_settings as ps -except: - ps = object() +def get_exporter_toolchain(ide): + """ Return the exporter class and the toolchain string as a tuple -def get_program(n): - p = TEST_MAP[n].n - return p + Positional arguments: + ide - the ide name of an exporter + """ + return EXPORTERS[ide], EXPORTERS[ide].TOOLCHAIN -def get_test(p): - return Test(p) +def rewrite_basepath(file_name, resources, export_path): + """ Replace the basepath of filename with export_path + Positional arguments: + file_name - the absolute path to a file + resources - the resources object that the file came from + export_path - the final destination of the file after export + """ + new_f = relpath(file_name, resources.file_basepath[file_name]) + resources.file_basepath[join(export_path, new_f)] = export_path + return new_f -def get_test_from_name(n): - if not n in TEST_MAP.keys(): - # Check if there is an alias for this in private_settings.py - if getattr(ps, "test_alias", None) is not None: - alias = ps.test_alias.get(n, "") - if not alias in TEST_MAP.keys(): - return None - else: - n = alias + +def subtract_basepath(resources, export_path): + """ Rewrite all of the basepaths with the export_path + + Positional arguments: + resources - the resource object to rewrite the basepaths of + export_path - the final destination of the resources with respect to the + generated project files + """ + keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', + 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script'] + for key in keys: + vals = getattr(resources, key) + if type(vals) is list: + new_vals = [] + for val in vals: + new_vals.append(rewrite_basepath(val, resources, export_path)) + setattr(resources, key, new_vals) else: - return None - return get_program(n) - - -def get_lib_symbols(macros, src, program): - # Some libraries have extra macros (called by exporter symbols) to we need to pass - # them to maintain compilation macros integrity between compiled library and - # header files we might use with it - lib_symbols = [] - if macros: - lib_symbols += macros - if src: - return lib_symbols - test = get_test(program) - for lib in LIBRARIES: - if lib['build_dir'] in test.dependencies: - lib_macros = lib.get('macros', None) - if lib_macros is not None: - lib_symbols.extend(lib_macros) - - -def setup_project(mcu, ide, program=None, source_dir=None, build=None): - - # Some libraries have extra macros (called by exporter symbols) to we need to pass - # them to maintain compilation macros integrity between compiled library and - # header files we might use with it - if source_dir: - # --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file - project_dir = source_dir - project_name = TESTS[program] if program else "Unnamed_Project" - project_temp = join(source_dir[0], 'projectfiles', '%s_%s' % (ide, mcu)) - mkdir(project_temp) - else: - test = get_test(program) - if not build: - # Substitute the library builds with the sources - # TODO: Substitute also the other library build paths - if MBED_LIBRARIES in test.dependencies: - test.dependencies.remove(MBED_LIBRARIES) - test.dependencies.append(MBED_BASE) - - # Build the project with the same directory structure of the mbed online IDE - project_name = test.id - project_dir = [join(EXPORT_WORKSPACE, project_name)] - project_temp = EXPORT_TMP - setup_user_prj(project_dir[0], test.source_dir, test.dependencies) - - return project_dir, project_name, project_temp - - -def perform_export(dir, name, ide, mcu, temp, clean=False, zip=False, lib_symbols='', - sources_relative=False, progen_build=False): - - tmp_path, report = export(dir, name, ide, mcu, dir[0], temp, clean=clean, - make_zip=zip, extra_symbols=lib_symbols, sources_relative=sources_relative, - progen_build=progen_build) - return tmp_path, report - - -def print_results(successes, failures, skips = []): + setattr(resources, key, rewrite_basepath(vals, resources, + export_path)) + + +def prepare_project(src_paths, export_path, target, ide, + libraries_paths=None, options=None, linker_script=None, + clean=False, notify=None, verbose=False, name=None, + inc_dirs=None, jobs=1, silent=False, extra_verbose=False, + config=None, macros=None): + """ This function normalizes the + """ + + # Convert src_path to a list if needed + if type(src_paths) != type([]): + src_paths = [src_paths] + # Extend src_paths wiht libraries_paths + if libraries_paths is not None: + src_paths.extend(libraries_paths) + + # Export Directory + if exists(export_path) and clean: + rmtree(export_path) + if not exists(export_path): + makedirs(export_path) + + _, toolchain_name = get_exporter_toolchain(ide) + + # Pass all params to the unified prepare_resources() + toolchain = prepare_toolchain(src_paths, export_path, target, + toolchain_name, macros=macros, + options=options, clean=clean, jobs=jobs, + notify=notify, silent=silent, verbose=verbose, + extra_verbose=extra_verbose, config=config) + + + # The first path will give the name to the library + if name is None: + name = basename(normpath(abspath(src_paths[0]))) + + # Call unified scan_resources + resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) + toolchain.build_dir = export_path + config_header = toolchain.get_config_header() + resources.headers.append(config_header) + resources.file_basepath[config_header] = dirname(config_header) + + # Change linker script if specified + if linker_script is not None: + resources.linker_script = linker_script + + return resources, toolchain + + +def generate_project_files(resources, export_path, target, name, toolchain, ide, + macros=None): + """Generate the project files for a project + + Positional arguments: + resources - a Resources object containing all of the files needed to build + this project + export_path - location to place project files + name - name of the project + toolchain - a toolchain class that corresponds to the toolchain used by the + IDE or makefile + ide - IDE name to export to + + Optional arguments: + macros - additional macros that should be defined within the exported + project + """ + exporter_cls, _ = get_exporter_toolchain(ide) + exporter = exporter_cls(target, export_path, name, toolchain, + extra_symbols=macros, resources=resources) + exporter.generate() + files = exporter.generated_files + return files, exporter + + +def zip_export(file_name, prefix, resources, project_files): + """Create a zip file from an exported project. + + Positional Parameters: + file_name - the file name of the resulting zip file + prefix - a directory name that will prefix the entire zip file's contents + resources - a resources object with files that must be included in the zip + project_files - a list of extra files to be added to the root of the prefix + directory + """ + with zipfile.ZipFile(file_name, "w") as zip_file: + for prj_file in project_files: + zip_file.write(prj_file, join(prefix, basename(prj_file))) + for source in resources.headers + resources.s_sources + \ + resources.c_sources + resources.cpp_sources + \ + resources.libraries + resources.hex_files + \ + [resources.linker_script] + resources.bin_files \ + + resources.objects + resources.json_files: + zip_file.write(source, + join(prefix, relpath(source, + resources.file_basepath[source]))) + + +def export_project(src_paths, export_path, target, ide, + libraries_paths=None, options=None, linker_script=None, + clean=False, notify=None, verbose=False, name=None, + inc_dirs=None, jobs=1, silent=False, extra_verbose=False, + config=None, macros=None, zip_proj=None): + """Generates a project file and creates a zip archive if specified + + Positional Arguments: + src_paths - a list of paths from which to find source files + export_path - a path specifying the location of generated project files + target - the mbed board/mcu for which to generate the executable + ide - the ide for which to generate the project fields + + Keyword Arguments: + libraries_paths - paths to additional libraries + options - build options passed by -o flag + linker_script - path to the linker script for the specified target + clean - removes the export_path if it exists + notify - function is passed all events, and expected to handle notification + of the user, emit the events to a log, etc. + verbose - assigns the notify function to toolchains print_notify_verbose + name - project name + inc_dirs - additional include directories + jobs - number of threads + silent - silent build - no output + extra_verbose - assigns the notify function to toolchains + print_notify_verbose + config - toolchain's config object + macros - User-defined macros + zip_proj - string name of the zip archive you wish to creat (exclude arg + if you do not wish to create an archive + """ + + # Convert src_path to a list if needed + if type(src_paths) != type([]): + src_paths = [src_paths] + # Extend src_paths wiht libraries_paths + if libraries_paths is not None: + src_paths.extend(libraries_paths) + + # Export Directory + if exists(export_path) and clean: + rmtree(export_path) + if not exists(export_path): + makedirs(export_path) + + _, toolchain_name = get_exporter_toolchain(ide) + + # Pass all params to the unified prepare_resources() + toolchain = prepare_toolchain(src_paths, target, toolchain_name, + macros=macros, options=options, clean=clean, + jobs=jobs, notify=notify, silent=silent, + verbose=verbose, extra_verbose=extra_verbose, + config=config) + + # The first path will give the name to the library + if name is None: + name = basename(normpath(abspath(src_paths[0]))) + + # Call unified scan_resources + resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) + toolchain.build_dir = export_path + config_header = toolchain.get_config_header() + resources.headers.append(config_header) + resources.file_basepath[config_header] = dirname(config_header) + temp = copy.deepcopy(resources) + + if zip_proj: + subtract_basepath(resources, export_path) + + # Change linker script if specified + if linker_script is not None: + resources.linker_script = linker_script + + files, exporter = generate_project_files(resources, export_path, + target, name, toolchain, ide, + macros=macros) + if zip_proj: + zip_export(join(export_path, zip_proj), name, temp, files) + + return exporter + + +def print_results(successes, failures, skips=None): + """ Print out the results of an export process + + Positional arguments: + successes - The list of exports that succeeded + failures - The list of exports that failed + + Keyword arguments: + skips - The list of exports that were skipped + """ print - if len(successes) > 0: + if successes: print "Successful: " for success in successes: print " * %s" % success - if len(failures) > 0: + if failures: print "Failed: " for failure in failures: print " * %s" % failure - if len(skips) > 0: + if skips: print "Skipped: " for skip in skips: print " * %s" % skip diff --git a/tools/test/export/build_test.py b/tools/test/export/build_test.py index ec80fcfd004..530213f3564 100644 --- a/tools/test/export/build_test.py +++ b/tools/test/export/build_test.py @@ -16,154 +16,166 @@ limitations under the License. """ - import sys -import argparse -import os +from os import path, remove, rename import shutil -from os.path import join, abspath, dirname, exists, basename -r=dirname(__file__) -ROOT = abspath(join(r, "..","..","..")) +ROOT = path.abspath(path.join(path.dirname(__file__), "..", "..", "..")) sys.path.insert(0, ROOT) +import argparse from tools.export import EXPORTERS -from tools.targets import TARGET_NAMES, TARGET_MAP -from tools.project_api import setup_project, perform_export, print_results, get_test_from_name, get_lib_symbols -from project_generator_definitions.definitions import ProGenDef -from tools.utils import args_error - - -class ProgenBuildTest(): - def __init__(self, desired_ides, targets): - #map of targets and the ides that can build programs for them - self.target_ides = {} - for target in targets: - self.target_ides[target] =[] - for ide in desired_ides: - if target in EXPORTERS[ide].TARGETS: - #target is supported by ide - self.target_ides[target].append(ide) - if len(self.target_ides[target]) == 0: - del self.target_ides[target] - - - @staticmethod - def get_pgen_targets(ides): - #targets supported by pgen and desired ides for tests - targs = [] - for ide in ides: - for target in TARGET_NAMES: - if target not in targs and hasattr(TARGET_MAP[target],'progen') \ - and ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']): - targs.append(target) - return targs +from tools.targets import TARGET_NAMES +from tools.tests import TESTS +from tools.project import setup_project +from tools.project_api import print_results, export_project +from tools.tests import test_name_known, Test +from tools.export.exporters import FailedBuildException, \ + TargetNotSupportedException +from tools.utils import argparse_force_lowercase_type, \ + argparse_force_uppercase_type, argparse_many + + +class ProgenBuildTest(object): + """Object to encapsulate logic for progen build testing""" + def __init__(self, desired_ides, mcus, tests): + """ + Initialize an instance of class ProgenBuildTest + Args: + desired_ides: the IDEs you wish to make/build project files for + mcus: the mcus to specify in project files + tests: the test projects to make/build project files from + """ + self.ides = desired_ides + self.mcus = mcus + self.tests = tests + + @property + def mcu_ide_pairs(self): + """Yields tuples of valid mcu, ide combinations""" + for mcu in self.mcus: + for ide in self.ides: + if mcu in EXPORTERS[ide].TARGETS: + yield mcu, ide @staticmethod - def handle_project_files(project_dir, mcu, test, tool, clean=False): + def handle_log_files(project_dir, tool, name): + """ + Renames/moves log files + Args: + project_dir: the directory that contains project files + tool: the ide that created the project files + name: the name of the project + clean: a boolean value determining whether to remove the + created project files + """ log = '' if tool == 'uvision' or tool == 'uvision5': - log = os.path.join(project_dir,"build","build_log.txt") + log = path.join(project_dir, "build", "build_log.txt") elif tool == 'iar': - log = os.path.join(project_dir, 'build_log.txt') + log = path.join(project_dir, 'build_log.txt') try: - with open(log, 'r') as f: - print f.read() - except: - return - - prefix = "_".join([test, mcu, tool]) - log_name = os.path.join(os.path.dirname(project_dir), prefix+"_log.txt") - - #check if a log already exists for this platform+test+ide - if os.path.exists(log_name): - #delete it if so - os.remove(log_name) - os.rename(log, log_name) - - if clean: - shutil.rmtree(project_dir, ignore_errors=True) - return - - def generate_and_build(self, tests, clean=False): - - #build results + with open(log, 'r') as in_log: + print in_log.read() + log_name = path.join(path.dirname(project_dir), name + "_log.txt") + + # check if a log already exists for this platform+test+ide + if path.exists(log_name): + # delete it if so + remove(log_name) + rename(log, log_name) + except IOError: + pass + + def generate_and_build(self, clean=False): + """ + Generate the project file and build the project + Args: + clean: a boolean value determining whether to remove the + created project files + + Returns: + successes: a list of strings that contain the mcu, ide, test + properties of a successful build test + skips: a list of strings that contain the mcu, ide, test properties + of a skipped test (if the ide does not support mcu) + failures: a list of strings that contain the mcu, ide, test + properties of a failed build test + + """ successes = [] failures = [] skips = [] - for mcu, ides in self.target_ides.items(): - for test in tests: - #resolve name alias - test = get_test_from_name(test) - for ide in ides: - lib_symbols = get_lib_symbols(None, None, test) - project_dir, project_name, project_temp = setup_project(mcu, ide, test) - - dest_dir = os.path.dirname(project_temp) - destination = os.path.join(dest_dir,"_".join([project_name, mcu, ide])) - - tmp_path, report = perform_export(project_dir, project_name, ide, mcu, destination, - lib_symbols=lib_symbols, progen_build = True) - - if report['success']: - successes.append("build for %s::%s\t%s" % (mcu, ide, project_name)) - elif report['skip']: - skips.append("%s::%s\t%s" % (mcu, ide, project_name)) - else: - failures.append("%s::%s\t%s for %s" % (mcu, ide, report['errormsg'], project_name)) - - ProgenBuildTest.handle_project_files(destination, mcu, project_name, ide, clean) + for mcu, ide in self.mcu_ide_pairs: + for test in self.tests: + export_location, name, src, lib = setup_project(ide, mcu, + program=test) + test_name = Test(test).id + try: + exporter = export_project(src, export_location, mcu, ide, + clean=clean, name=name, + libraries_paths=lib) + exporter.progen_build() + successes.append("%s::%s\t%s" % (mcu, ide, test_name)) + except FailedBuildException: + failures.append("%s::%s\t%s" % (mcu, ide, test_name)) + except TargetNotSupportedException: + skips.append("%s::%s\t%s" % (mcu, ide, test_name)) + + ProgenBuildTest.handle_log_files(export_location, ide, name) + if clean: + shutil.rmtree(export_location, ignore_errors=True) return successes, failures, skips -if __name__ == '__main__': - accepted_ides = ["iar", "uvision", "uvision5"] - accepted_targets = sorted(ProgenBuildTest.get_pgen_targets(accepted_ides)) - default_tests = ["MBED_BLINKY"] - - parser = argparse.ArgumentParser(description = "Test progen builders. Leave any flag off to run with all possible options.") - parser.add_argument("-i", "--IDEs", - nargs = '+', - dest="ides", - help="tools you wish to perfrom build tests. (%s)" % ', '.join(accepted_ides), - default = accepted_ides) +def main(): + """Entry point""" + toolchainlist = ["iar", "uvision", "uvision5"] + default_tests = [test_name_known("MBED_BLINKY")] + targetnames = TARGET_NAMES + targetnames.sort() + + parser = argparse.ArgumentParser(description= + "Test progen builders. Leave any flag off" + " to run with all possible options.") + parser.add_argument("-i", + dest="ides", + default=toolchainlist, + type=argparse_many(argparse_force_lowercase_type( + toolchainlist, "toolchain")), + help="The target IDE: %s"% str(toolchainlist)) + + parser.add_argument( + "-p", + type=argparse_many(test_name_known), + dest="programs", + help="The index of the desired test program: [0-%d]" % (len(TESTS) - 1), + default=default_tests) parser.add_argument("-n", - nargs='+', - dest="tests", - help="names of desired test programs", - default = default_tests) - - parser.add_argument("-m", "--mcus", - nargs='+', - dest ="targets", - help="generate project for the given MCUs (%s)" % '\n '.join(accepted_targets), - default = accepted_targets) + type=argparse_many(test_name_known), + dest="programs", + help="The name of the desired test program", + default=default_tests) + + parser.add_argument( + "-m", "--mcu", + metavar="MCU", + default='LPC1768', + nargs="+", + type=argparse_force_uppercase_type(targetnames, "MCU"), + help="generate project for the given MCU (%s)" % ', '.join(targetnames)) parser.add_argument("-c", "--clean", dest="clean", - action = "store_true", + action="store_true", help="clean up the exported project files", default=False) options = parser.parse_args() - - tests = options.tests - ides = [ide.lower() for ide in options.ides] - targets = [target.upper() for target in options.targets] - - if any(get_test_from_name(test) is None for test in tests): - args_error(parser, "[ERROR] test name not recognized") - - if any(target not in accepted_targets for target in targets): - args_error(parser, "[ERROR] mcu must be one of the following:\n %s" % '\n '.join(accepted_targets)) - - if any(ide not in accepted_ides for ide in ides): - args_error(parser, "[ERROR] ide must be in %s" % ', '.join(accepted_ides)) - - build_test = ProgenBuildTest(ides, targets) - successes, failures, skips = build_test.generate_and_build(tests, options.clean) + test = ProgenBuildTest(options.ides, options.mcu, options.programs) + successes, failures, skips = test.generate_and_build(clean=options.clean) print_results(successes, failures, skips) sys.exit(len(failures)) - - +if __name__ == "__main__": + main() diff --git a/tools/tests.py b/tools/tests.py index 66da1249ef5..29c7101b8f4 100644 --- a/tools/tests.py +++ b/tools/tests.py @@ -1253,7 +1253,7 @@ def test_known(string): def test_name_known(string): if string not in TEST_MAP.keys() and \ (getattr(ps, "test_alias", None) is None or \ - ps.test_alias.get(test_id, "") not in TEST_MAP.keys()): + ps.test_alias.get(string, "") not in TEST_MAP.keys()): raise ArgumentTypeError("Program with name '{0}' not found. Supported tests are: \n{1}".format(string, columnate([t['id'] for t in TESTS]))) return TEST_MAP[string].n diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py index 1bb892cf8c7..740a510d8e0 100644 --- a/tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -564,6 +564,7 @@ def _add_dir(self, path, resources, base_path, exclude_paths=None): # Add root to include paths resources.inc_dirs.append(root) + resources.file_basepath[root] = base_path for file in files: file_path = join(root, file) From 7e2ca0115b23b6c3a65549be51336935c21c39b5 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 26 Jul 2016 15:30:59 -0500 Subject: [PATCH 02/17] Update copyright --- tools/build_api.py | 2 +- tools/export/__init__.py | 2 +- tools/export/atmelstudio.py | 2 +- tools/export/codered.py | 2 +- tools/export/coide.py | 2 +- tools/export/ds5_5.py | 2 +- tools/export/e2studio.py | 2 +- tools/export/emblocks.py | 2 +- tools/export/exporters.py | 2 +- tools/export/gccarm.py | 2 +- tools/export/iar.py | 2 +- tools/export/kds.py | 2 +- tools/export/simplicityv3.py | 2 +- tools/export/uvision4.py | 2 +- tools/test/export/build_test.py | 2 +- 15 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tools/build_api.py b/tools/build_api.py index cc85bad1362..f1980b72d83 100644 --- a/tools/build_api.py +++ b/tools/build_api.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/__init__.py b/tools/export/__init__.py index 8241c1e338c..09f3e620d41 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -1,7 +1,7 @@ """The generic interface for all exporters. """ # mbed SDK -# Copyright (c) 2011-2013 ARM Limited +# Copyright (c) 2011-2016 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tools/export/atmelstudio.py b/tools/export/atmelstudio.py index 531196f56c2..251ea3376d3 100644 --- a/tools/export/atmelstudio.py +++ b/tools/export/atmelstudio.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2015 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/codered.py b/tools/export/codered.py index 4dfb6047e04..cf5ef251dca 100644 --- a/tools/export/codered.py +++ b/tools/export/codered.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/coide.py b/tools/export/coide.py index a9219505418..2503cd0ce51 100644 --- a/tools/export/coide.py +++ b/tools/export/coide.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2014 ARM Limited +Copyright (c) 2014-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/ds5_5.py b/tools/export/ds5_5.py index e5d333757fe..d83599879dc 100644 --- a/tools/export/ds5_5.py +++ b/tools/export/ds5_5.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/e2studio.py b/tools/export/e2studio.py index 8e68e5862ae..4fda319c660 100644 --- a/tools/export/e2studio.py +++ b/tools/export/e2studio.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/emblocks.py b/tools/export/emblocks.py index affbbee09a0..4f4aea1b1f5 100644 --- a/tools/export/emblocks.py +++ b/tools/export/emblocks.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2014 ARM Limited +Copyright (c) 2014-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/exporters.py b/tools/export/exporters.py index 5f20b6c5e2f..83b944af9c4 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -58,9 +58,9 @@ def __init__(self, target, export_dir, project_name, toolchain, export_dir - the directory of the exported project files project_name - the name of the project toolchain - an instance of class toolchain - extra_symbols - a list of extra macros for the toolchain Keyword arguments: + extra_symbols - a list of extra macros for the toolchain resources - an instance of class Resources """ self.export_dir = export_dir diff --git a/tools/export/gccarm.py b/tools/export/gccarm.py index a1b12ebc019..afe5c81b055 100644 --- a/tools/export/gccarm.py +++ b/tools/export/gccarm.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/iar.py b/tools/export/iar.py index 053861721f3..3ca488d6669 100644 --- a/tools/export/iar.py +++ b/tools/export/iar.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2015 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/kds.py b/tools/export/kds.py index 59324dd3e2f..6579369d30c 100644 --- a/tools/export/kds.py +++ b/tools/export/kds.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/simplicityv3.py b/tools/export/simplicityv3.py index 89b2edf3ef0..3ddce6842dc 100644 --- a/tools/export/simplicityv3.py +++ b/tools/export/simplicityv3.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2014 ARM Limited +Copyright (c) 2014-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py index a1636e4b0d2..42d52997170 100644 --- a/tools/export/uvision4.py +++ b/tools/export/uvision4.py @@ -1,6 +1,6 @@ """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tools/test/export/build_test.py b/tools/test/export/build_test.py index 530213f3564..e87b4875b15 100644 --- a/tools/test/export/build_test.py +++ b/tools/test/export/build_test.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ mbed SDK -Copyright (c) 2011-2013 ARM Limited +Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From cccc5f3fca24708e74a0e8eb36bf87d94054c025 Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Tue, 26 Jul 2016 17:21:32 -0500 Subject: [PATCH 03/17] Accessing progen exporters directly --- tools/export/exporters.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tools/export/exporters.py b/tools/export/exporters.py index 83b944af9c4..a2a4e3b05d3 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -9,6 +9,7 @@ from tools.targets import TARGET_MAP from project_generator.project import Project, ProjectTemplateInternal +from project_generator.tools_supported import ToolsSupported from project_generator.settings import ProjectSettings from project_generator_definitions.definitions import ProGenDef @@ -72,7 +73,7 @@ def __init__(self, target, export_dir, project_name, toolchain, self.resources = resources self.symbols = self.toolchain.get_symbols() self.generated_files = [] - self.project = None + self.builder_files_dict = {} # Add extra symbols and config file symbols to the Exporter's list of # symbols. @@ -184,10 +185,9 @@ def progen_gen_file(self, project_data): if not self.check_supported(self.NAME): raise TargetNotSupportedException("Target not supported") settings = ProjectSettings() - self.project = Project(self.project_name, [project_data], settings) - self.project.project['export'] = project_data.copy() - self.project.generate(self.NAME, copied=False, fill=False) - for middle in self.project.generated_files.values(): + exporter = ToolsSupported().get_tool(self.NAME) + self.builder_files_dict = {self.NAME:exporter(project_data, settings).export_project()} + for middle in self.builder_files_dict.values(): for field, thing in middle.iteritems(): if field == "files": for filename in thing.values(): @@ -198,7 +198,8 @@ def progen_build(self): print("Project {} exported, building for {}...".format( self.project_name, self.NAME)) sys.stdout.flush() - result = self.project.build(self.NAME) + builder = ToolsSupported().get_tool(self.NAME) + result = builder(self.builder_files_dict[self.NAME], ProjectSettings()).build_project() if result == -1: raise FailedBuildException("Build Failed") From 0016ddf60c1387756cc2ed44938a5b7c3410a889 Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Wed, 27 Jul 2016 16:35:32 -0500 Subject: [PATCH 04/17] progen exporter template --- tools/export/exporters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/export/exporters.py b/tools/export/exporters.py index a2a4e3b05d3..f48a0aebb44 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -8,7 +8,7 @@ from jinja2.environment import Environment from tools.targets import TARGET_MAP -from project_generator.project import Project, ProjectTemplateInternal +from project_generator.tools import tool from project_generator.tools_supported import ToolsSupported from project_generator.settings import ProjectSettings from project_generator_definitions.definitions import ProGenDef @@ -149,7 +149,7 @@ def grouped(sources): # we want to add this to our include dirs config_dir = os.path.dirname(config_header) if config_header else [] - project_data = ProjectTemplateInternal._get_project_template() + project_data = tool.get_tool_template() project_data['target'] = TARGET_MAP[self.target].progen['target'] project_data['source_paths'] = self.get_source_paths() From f4a686fbd914b1f589a071ea7a34faa395332735 Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Thu, 28 Jul 2016 10:16:43 -0500 Subject: [PATCH 05/17] Requirements update --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 963bea20363..a15841c9d6b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,8 +3,8 @@ PySerial>=2.7 PrettyTable>=0.7.2 Jinja2>=2.7.3 IntelHex>=1.3 -project-generator>=0.9.7,<0.10.0 -project-generator-definitions>=0.2.26,<0.3.0 +project-generator==0.9.9 +project_generator_definitions>=0.2.26,<0.3.0 junit-xml pyYAML requests From 67fae3a7059eb5dd0a84b1f6162834b20eb995b4 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 28 Jul 2016 16:55:38 -0500 Subject: [PATCH 06/17] Allow exporting to in-memory zip file --- tools/project_api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tools/project_api.py b/tools/project_api.py index 2bffd5ebe04..f69d3aff99a 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -233,7 +233,10 @@ def export_project(src_paths, export_path, target, ide, target, name, toolchain, ide, macros=macros) if zip_proj: - zip_export(join(export_path, zip_proj), name, temp, files) + if isinstance(zip_proj, basestring): + zip_export(join(export_path, zip_proj), name, temp, files) + else: + zip_export(zip_proj, name, temp, files) return exporter From 3e2526a4249b4c4a95863116bee23641273d7b4b Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 28 Jul 2016 19:05:40 -0500 Subject: [PATCH 07/17] Made exporting safer it will no longer barf when: - a linker scirpt is None - an attribute that is a set it will also export the correct library include paths --- tools/project_api.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/tools/project_api.py b/tools/project_api.py index f69d3aff99a..2dfec2ed107 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -45,15 +45,18 @@ def subtract_basepath(resources, export_path): generated project files """ keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', - 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script'] + 'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script', + 'lib_dirs'] for key in keys: vals = getattr(resources, key) + if type(vals) is set: + vals = list(vals) if type(vals) is list: new_vals = [] for val in vals: new_vals.append(rewrite_basepath(val, resources, export_path)) setattr(resources, key, new_vals) - else: + elif vals: setattr(resources, key, rewrite_basepath(vals, resources, export_path)) @@ -150,9 +153,11 @@ def zip_export(file_name, prefix, resources, project_files): resources.libraries + resources.hex_files + \ [resources.linker_script] + resources.bin_files \ + resources.objects + resources.json_files: - zip_file.write(source, - join(prefix, relpath(source, - resources.file_basepath[source]))) + if source: + zip_file.write(source, + join(prefix, + relpath(source, + resources.file_basepath[source]))) def export_project(src_paths, export_path, target, ide, From 36c750b39fa31d186d0a9bfe27c275fc9412d379 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Thu, 28 Jul 2016 19:17:40 -0500 Subject: [PATCH 08/17] Reinstated the zip exporter --- tools/export/__init__.py | 2 +- tools/export/zip.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tools/export/__init__.py b/tools/export/__init__.py index 09f3e620d41..c5ffb6b4b57 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -21,7 +21,7 @@ from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar from tools.export import emblocks, coide, kds, simplicityv3, atmelstudio -from tools.export import sw4stm32, e2studio +from tools.export import sw4stm32, e2studio, zip from tools.export.exporters import OldLibrariesException, FailedBuildException from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP diff --git a/tools/export/zip.py b/tools/export/zip.py index b9828a61a1c..3961eb0622c 100644 --- a/tools/export/zip.py +++ b/tools/export/zip.py @@ -33,9 +33,11 @@ class ZIP(Exporter): 's_sources':'2' } + TOOLCHAIN = 'ARM' + def get_toolchain(self): return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM' def generate(self): return True - \ No newline at end of file + From e5de39efff868a6ff122696af62e4539a4b56fca Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 8 Aug 2016 15:01:24 -0500 Subject: [PATCH 09/17] Fix renaming issues in non-CI ides Affects these ides: - Atmel Studio - Code Red (I don't think we support this) - Coide - DS-5 - E2Studio - EMblocks - KDS - Simplicity v3 - SW 4 STM32 also corrects flags usage in EMBlocks --- tools/export/atmelstudio.py | 2 +- tools/export/codered.py | 2 +- tools/export/coide.py | 2 +- tools/export/ds5_5.py | 2 +- tools/export/e2studio.py | 2 +- tools/export/emblocks.py | 8 ++++---- tools/export/kds.py | 2 +- tools/export/simplicityv3.py | 3 +-- tools/export/sw4stm32.py | 2 +- 9 files changed, 12 insertions(+), 13 deletions(-) diff --git a/tools/export/atmelstudio.py b/tools/export/atmelstudio.py index 251ea3376d3..66c3c43020f 100644 --- a/tools/export/atmelstudio.py +++ b/tools/export/atmelstudio.py @@ -69,7 +69,7 @@ def generate(self): 'library_paths': self.resources.lib_dirs, 'linker_script': self.resources.linker_script, 'libraries': libraries, - 'symbols': self.get_symbols(), + 'symbols': self.toolchain.get_symbols(), 'solution_uuid': solution_uuid.upper(), 'project_uuid': project_uuid.upper() } diff --git a/tools/export/codered.py b/tools/export/codered.py index cf5ef251dca..185e69a60dc 100644 --- a/tools/export/codered.py +++ b/tools/export/codered.py @@ -53,7 +53,7 @@ def generate(self): 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, 'libraries': libraries, - 'symbols': self.get_symbols() + 'symbols': self.toolchain.get_symbols() } ctx.update(self.flags) self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project') diff --git a/tools/export/coide.py b/tools/export/coide.py index 2503cd0ce51..4af69986efb 100644 --- a/tools/export/coide.py +++ b/tools/export/coide.py @@ -106,7 +106,7 @@ def generate(self): 'library_paths': self.resources.lib_dirs, 'object_files': self.resources.objects, 'libraries': libraries, - 'symbols': self.get_symbols() + 'symbols': self.toolchain.get_symbols() } target = self.target.lower() diff --git a/tools/export/ds5_5.py b/tools/export/ds5_5.py index d83599879dc..9be2535867a 100644 --- a/tools/export/ds5_5.py +++ b/tools/export/ds5_5.py @@ -59,7 +59,7 @@ def generate(self): 'scatter_file': self.resources.linker_script, 'object_files': self.resources.objects + self.resources.libraries, 'source_files': source_files, - 'symbols': self.get_symbols() + 'symbols': self.toolchain.get_symbols() } target = self.target.lower() diff --git a/tools/export/e2studio.py b/tools/export/e2studio.py index 4fda319c660..205287089ad 100644 --- a/tools/export/e2studio.py +++ b/tools/export/e2studio.py @@ -39,7 +39,7 @@ def generate(self): 'object_files': self.resources.objects, 'libraries': libraries, - 'symbols': self.get_symbols() + 'symbols': self.toolchain.get_symbols() } self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') diff --git a/tools/export/emblocks.py b/tools/export/emblocks.py index 4f4aea1b1f5..9e241994529 100644 --- a/tools/export/emblocks.py +++ b/tools/export/emblocks.py @@ -68,12 +68,12 @@ def generate(self): 'script_file': self.resources.linker_script, 'library_paths': self.resources.lib_dirs, 'libraries': libraries, - 'symbols': self.get_symbols(), + 'symbols': self.toolchain.get_symbols(), 'object_files': self.resources.objects, 'sys_libs': self.toolchain.sys_libs, - 'cc_org': self.flags['common'] + self.flags['c'], - 'ld_org': self.flags['common'] + self.flags['ld'], - 'cppc_org': self.flags['common'] + self.flags['cxx'] + 'cc_org': self.flags['common_flags'] + self.flags['c_flags'], + 'ld_org': self.flags['common_flags'] + self.flags['ld_flags'], + 'cppc_org': self.flags['common_flags'] + self.flags['cxx_flags'] } # EmBlocks intermediate file template diff --git a/tools/export/kds.py b/tools/export/kds.py index 6579369d30c..b77a507f176 100644 --- a/tools/export/kds.py +++ b/tools/export/kds.py @@ -40,7 +40,7 @@ def generate(self): 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, 'libraries': libraries, - 'symbols': self.get_symbols() + 'symbols': self.toolchain.get_symbols() } self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject') diff --git a/tools/export/simplicityv3.py b/tools/export/simplicityv3.py index 3ddce6842dc..ba6f6f185bf 100644 --- a/tools/export/simplicityv3.py +++ b/tools/export/simplicityv3.py @@ -147,7 +147,7 @@ def generate(self): libraries.append(l[3:]) defines = [] - for define in self.get_symbols(): + for define in self.toolchain.get_symbols(): if '=' in define: keyval = define.split('=') defines.append( (keyval[0], keyval[1]) ) @@ -165,7 +165,6 @@ def generate(self): 'library_paths': self.resources.lib_dirs, 'linker_script': self.resources.linker_script, 'libraries': libraries, - 'symbols': self.get_symbols(), 'defines': defines, 'part': self.PARTS[self.target], 'kit': self.KITS[self.target], diff --git a/tools/export/sw4stm32.py b/tools/export/sw4stm32.py index 7d0ea356fb0..160879f92ef 100644 --- a/tools/export/sw4stm32.py +++ b/tools/export/sw4stm32.py @@ -84,7 +84,7 @@ def generate(self): 'library_paths': self.resources.lib_dirs, 'object_files': self.resources.objects, 'libraries': libraries, - 'symbols': self.get_symbols(), + 'symbols': self.toolchain.get_symbols(), 'board_name': self.BOARDS[self.target.upper()]['name'], 'mcu_name': self.BOARDS[self.target.upper()]['mcuId'], 'debug_config_uid': self.__generate_uid(), From 2196d50e72058b539bd5529b321ec039184549ae Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Thu, 18 Aug 2016 14:30:46 -0500 Subject: [PATCH 10/17] Create projectfiles directory when exporting Compatible with new c/asm/cpp flag separation. --- tools/export/exporters.py | 15 +++------------ tools/export/uvision4.py | 16 ++++------------ tools/export/uvision5.py | 15 ++++----------- tools/project.py | 6 +++--- tools/project_api.py | 3 ++- 5 files changed, 16 insertions(+), 39 deletions(-) diff --git a/tools/export/exporters.py b/tools/export/exporters.py index f48a0aebb44..c3b381547c6 100644 --- a/tools/export/exporters.py +++ b/tools/export/exporters.py @@ -71,18 +71,9 @@ def __init__(self, target, export_dir, project_name, toolchain, jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__))) self.jinja_environment = Environment(loader=jinja_loader) self.resources = resources - self.symbols = self.toolchain.get_symbols() self.generated_files = [] self.builder_files_dict = {} - # Add extra symbols and config file symbols to the Exporter's list of - # symbols. - config_macros = self.toolchain.config.get_config_data_macros() - if config_macros: - self.symbols.extend(config_macros) - if extra_symbols: - self.symbols.extend(extra_symbols) - def get_toolchain(self): """A helper getter function that we should probably eliminate""" return self.TOOLCHAIN @@ -98,8 +89,6 @@ def flags(self): common_flags - common options """ config_header = self.toolchain.get_config_header() - config_header = relpath(config_header, - self.resources.file_basepath[config_header]) flags = {key + "_flags": value for key, value in self.toolchain.flags.iteritems()} asm_defines = ["-D" + symbol for symbol in self.toolchain.get_symbols(True)] @@ -108,6 +97,8 @@ def flags(self): flags['c_flags'] += c_defines flags['cxx_flags'] += c_defines if config_header: + config_header = relpath(config_header, + self.resources.file_basepath[config_header]) flags['c_flags'] += self.toolchain.get_config_option(config_header) flags['cxx_flags'] += self.toolchain.get_config_option( config_header) @@ -162,7 +153,7 @@ def grouped(sources): project_data['source_files_lib'] = grouped(self.resources.libraries) project_data['output_dir']['path'] = self.export_dir project_data['linker_file'] = self.resources.linker_script - project_data['macros'] = self.symbols + project_data['macros'] = [] project_data['build_dir'] = 'build' project_data['template'] = None project_data['name'] = self.project_name diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py index 42d52997170..9274948a6e6 100644 --- a/tools/export/uvision4.py +++ b/tools/export/uvision4.py @@ -79,7 +79,10 @@ def generate(self): # asm flags only, common are not valid within uvision project, they are armcc specific project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab - project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags'])) + project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG'] + + self.progen_flags['common_flags'] + + self.progen_flags['c_flags'] + + self.progen_flags['cxx_flags'])) # not compatible with c99 flag set in the template project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files @@ -88,16 +91,5 @@ def generate(self): project_data['misc']['c_flags'].remove("--no_vla") project_data['misc']['ld_flags'] = self.flags['ld_flags'] - i = 0 - for macro in self.symbols: - # armasm does not like floating numbers in macros, timestamp to int - if macro.startswith('MBED_BUILD_TIMESTAMP'): - timestamp = macro[len('MBED_BUILD_TIMESTAMP='):] - project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp))) - # armasm does not even accept MACRO=string - if macro.startswith('MBED_USERNAME'): - project_data['macros'].pop(i) - i += 1 - project_data['macros'].append('__ASSERT_MSG') project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision4' self.progen_gen_file(project_data) diff --git a/tools/export/uvision5.py b/tools/export/uvision5.py index 28d12e74df5..da2f730f678 100644 --- a/tools/export/uvision5.py +++ b/tools/export/uvision5.py @@ -78,7 +78,10 @@ def generate(self): # asm flags only, common are not valid within uvision project, they are armcc specific project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab - project_data['misc']['c_flags'] = list(set(self.flags['common_flags'] + self.flags['c_flags'] + self.flags['cxx_flags'])) + project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG'] + + self.progen_flags['common_flags'] + + self.progen_flags['c_flags'] + + self.progen_flags['cxx_flags'])) # not compatible with c99 flag set in the template project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files @@ -88,15 +91,5 @@ def generate(self): project_data['misc']['ld_flags'] = self.flags['ld_flags'] i = 0 - for macro in self.symbols: - # armasm does not like floating numbers in macros, timestamp to int - if macro.startswith('MBED_BUILD_TIMESTAMP'): - timestamp = macro[len('MBED_BUILD_TIMESTAMP='):] - project_data['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp))) - # armasm does not even accept MACRO=string - if macro.startswith('MBED_USERNAME'): - project_data['macros'].pop(i) - i += 1 - project_data['macros'].append('__ASSERT_MSG') project_data['build_dir'] = project_data['build_dir'] + '\\' + 'uvision5' self.progen_gen_file(project_data) diff --git a/tools/project.py b/tools/project.py index 004164667b6..faf28b9fcc2 100644 --- a/tools/project.py +++ b/tools/project.py @@ -8,7 +8,7 @@ from shutil import move, rmtree from argparse import ArgumentParser -from os.path import normpath +from os.path import normpath, realpath from tools.paths import EXPORT_DIR, MBED_BASE, MBED_LIBRARIES from tools.export import EXPORTERS, mcu_ide_matrix @@ -39,11 +39,11 @@ def setup_project(ide, target, program=None, source_dir=None, build=None): if source_dir: # --source is used to generate IDE files to toolchain directly # in the source tree and doesn't generate zip file - project_dir = source_dir[0] + project_dir = join(source_dir[0],'projectfiles',ide+"_"+target) if program: project_name = TESTS[program] else: - project_name = basename(normpath(source_dir[0])) + project_name = basename(normpath(realpath(source_dir[0]))) src_paths = source_dir lib_paths = None else: diff --git a/tools/project_api.py b/tools/project_api.py index 2dfec2ed107..ece25162063 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -214,7 +214,6 @@ def export_project(src_paths, export_path, target, ide, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config) - # The first path will give the name to the library if name is None: name = basename(normpath(abspath(src_paths[0]))) @@ -229,6 +228,8 @@ def export_project(src_paths, export_path, target, ide, if zip_proj: subtract_basepath(resources, export_path) + else: + resources.relative_to(export_path) # Change linker script if specified if linker_script is not None: From 6686411220c6ba8d1d2f83554c26e140da9bc57d Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Mon, 22 Aug 2016 09:27:40 -0500 Subject: [PATCH 11/17] Set vpath correctly when exporting to projectfiles directory --- tools/export/gccarm.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/tools/export/gccarm.py b/tools/export/gccarm.py index afe5c81b055..feb46aa011a 100644 --- a/tools/export/gccarm.py +++ b/tools/export/gccarm.py @@ -14,9 +14,9 @@ See the License for the specific language governing permissions and limitations under the License. """ -from exporters import Exporter -from os.path import splitext, basename, relpath, join, abspath +from os.path import splitext, basename, relpath, join, abspath, dirname from os import curdir, getcwd +from tools.export.exporters import Exporter class GccArm(Exporter): @@ -150,7 +150,6 @@ def generate(self): l, _ = splitext(basename(lib)) libraries.append(l[3:]) - build_dir = abspath(join(self.export_dir, ".build")) ctx = { 'name': self.project_name, 'to_be_compiled': to_be_compiled, @@ -162,7 +161,9 @@ def generate(self): 'symbols': self.toolchain.get_symbols(), 'cpu_flags': self.toolchain.cpu, 'hex_files': self.resources.hex_files, - 'vpath': [".."] + 'vpath': (["../../.."] + if basename(dirname(dirname(self.export_dir))) == "projectfiles" + else [".."]) } for key in ['include_paths', 'library_paths', 'linker_script', 'hex_files']: @@ -174,7 +175,3 @@ def generate(self): ctx["include_paths"] += ['../.'] ctx.update(self.flags) self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile') - - def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): - self.prj_paths = prj_paths - Exporter.scan_and_copy_resources(self, prj_paths, trg_path, relative) From cadd233b8e39f3a305317ea51447f9da807b6527 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 23 Aug 2016 16:56:22 -0500 Subject: [PATCH 12/17] Allow dict in addition to the other types of src_paths The dict allows the user of the exporter api to specify the result directory of particular groups of scanned dirs. This will be used by the online exporters to spoof everything being in the same directory when they are not. It may also be used by tests, if they would like to export something that looks exactly like a normal project. --- tools/project_api.py | 68 ++++++++++++++++++++++++++++---------------- 1 file changed, 43 insertions(+), 25 deletions(-) diff --git a/tools/project_api.py b/tools/project_api.py index ece25162063..d4f7484d745 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -12,6 +12,7 @@ from tools.build_api import prepare_toolchain from tools.build_api import scan_resources from tools.export import EXPORTERS +from tools.toolchains import Resources def get_exporter_toolchain(ide): @@ -49,13 +50,16 @@ def subtract_basepath(resources, export_path): 'lib_dirs'] for key in keys: vals = getattr(resources, key) - if type(vals) is set: + if isinstance(vals, set): vals = list(vals) - if type(vals) is list: + if isinstance(vals, list): new_vals = [] for val in vals: new_vals.append(rewrite_basepath(val, resources, export_path)) - setattr(resources, key, new_vals) + if isinstance(getattr(resources, key), set): + setattr(resources, key, set(new_vals)) + else: + setattr(resources, key, new_vals) elif vals: setattr(resources, key, rewrite_basepath(vals, resources, export_path)) @@ -85,7 +89,7 @@ def prepare_project(src_paths, export_path, target, ide, _, toolchain_name = get_exporter_toolchain(ide) # Pass all params to the unified prepare_resources() - toolchain = prepare_toolchain(src_paths, export_path, target, + toolchain = prepare_toolchain(src_paths, target, toolchain_name, macros=macros, options=options, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, @@ -111,7 +115,7 @@ def prepare_project(src_paths, export_path, target, ide, def generate_project_files(resources, export_path, target, name, toolchain, ide, - macros=None): + macros=None): """Generate the project files for a project Positional arguments: @@ -148,16 +152,16 @@ def zip_export(file_name, prefix, resources, project_files): with zipfile.ZipFile(file_name, "w") as zip_file: for prj_file in project_files: zip_file.write(prj_file, join(prefix, basename(prj_file))) - for source in resources.headers + resources.s_sources + \ - resources.c_sources + resources.cpp_sources + \ - resources.libraries + resources.hex_files + \ - [resources.linker_script] + resources.bin_files \ - + resources.objects + resources.json_files: - if source: - zip_file.write(source, - join(prefix, - relpath(source, - resources.file_basepath[source]))) + for loc, res in resources.iteritems(): + for source in \ + res.headers + res.s_sources + res.c_sources + res.cpp_sources +\ + res.libraries + res.hex_files + [res.linker_script] +\ + res.bin_files + res.objects + res.json_files: + if source: + zip_file.write(source, + join(prefix, loc, + relpath(source, + res.file_basepath[source]))) def export_project(src_paths, export_path, target, ide, @@ -194,11 +198,19 @@ def export_project(src_paths, export_path, target, ide, """ # Convert src_path to a list if needed - if type(src_paths) != type([]): - src_paths = [src_paths] - # Extend src_paths wiht libraries_paths + if isinstance(src_paths, dict): + paths = sum(src_paths.values(), []) + elif isinstance(src_paths, list): + paths = src_paths[:] + else: + paths = [src_paths] + + # Extend src_paths wit libraries_paths if libraries_paths is not None: - src_paths.extend(libraries_paths) + paths.extend(libraries_paths) + + if not isinstance(src_paths, dict): + src_paths = {"": paths} # Export Directory if exists(export_path) and clean: @@ -209,7 +221,7 @@ def export_project(src_paths, export_path, target, ide, _, toolchain_name = get_exporter_toolchain(ide) # Pass all params to the unified prepare_resources() - toolchain = prepare_toolchain(src_paths, target, toolchain_name, + toolchain = prepare_toolchain(paths, target, toolchain_name, macros=macros, options=options, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, @@ -219,17 +231,23 @@ def export_project(src_paths, export_path, target, ide, name = basename(normpath(abspath(src_paths[0]))) # Call unified scan_resources - resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) + resource_dict = {loc: scan_resources(path, toolchain, inc_dirs=inc_dirs) + for loc, path in src_paths.iteritems()} + resources = Resources() toolchain.build_dir = export_path config_header = toolchain.get_config_header() resources.headers.append(config_header) resources.file_basepath[config_header] = dirname(config_header) - temp = copy.deepcopy(resources) if zip_proj: subtract_basepath(resources, export_path) + for loc, res in resource_dict.iteritems(): + temp = copy.deepcopy(res) + subtract_basepath(temp, join(export_path, loc)) + resources.add(temp) else: - resources.relative_to(export_path) + for _, res in resource_dict.iteritems(): + resources.add(res) # Change linker script if specified if linker_script is not None: @@ -240,9 +258,9 @@ def export_project(src_paths, export_path, target, ide, macros=macros) if zip_proj: if isinstance(zip_proj, basestring): - zip_export(join(export_path, zip_proj), name, temp, files) + zip_export(join(export_path, zip_proj), name, resource_dict, files) else: - zip_export(zip_proj, name, temp, files) + zip_export(zip_proj, name, resource_dict, files) return exporter From c550f9da751a9727702581ae098f2a479eb044af Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 23 Aug 2016 17:28:07 -0500 Subject: [PATCH 13/17] Fix some tracebacks, add zip exporter to the CLI --- tools/export/__init__.py | 1 + tools/export/uvision4.py | 6 +++--- tools/export/uvision5.py | 6 +++--- tools/project_api.py | 1 + 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/tools/export/__init__.py b/tools/export/__init__.py index c5ffb6b4b57..301dc7117af 100644 --- a/tools/export/__init__.py +++ b/tools/export/__init__.py @@ -42,6 +42,7 @@ 'atmelstudio' : atmelstudio.AtmelStudio, 'sw4stm32' : sw4stm32.Sw4STM32, 'e2studio' : e2studio.E2Studio, + 'zip' : zip.ZIP, } ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN = """ diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py index 9274948a6e6..5d3b548d742 100644 --- a/tools/export/uvision4.py +++ b/tools/export/uvision4.py @@ -80,9 +80,9 @@ def generate(self): project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG'] - + self.progen_flags['common_flags'] - + self.progen_flags['c_flags'] - + self.progen_flags['cxx_flags'])) + + self.flags['common_flags'] + + self.flags['c_flags'] + + self.flags['cxx_flags'])) # not compatible with c99 flag set in the template project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files diff --git a/tools/export/uvision5.py b/tools/export/uvision5.py index da2f730f678..5eb08a58039 100644 --- a/tools/export/uvision5.py +++ b/tools/export/uvision5.py @@ -79,9 +79,9 @@ def generate(self): project_data['misc']['asm_flags'] = [asm_flag_string] # cxx flags included, as uvision have them all in one tab project_data['misc']['c_flags'] = list(set(['-D__ASSERT_MSG'] - + self.progen_flags['common_flags'] - + self.progen_flags['c_flags'] - + self.progen_flags['cxx_flags'])) + + self.flags['common_flags'] + + self.flags['c_flags'] + + self.flags['cxx_flags'])) # not compatible with c99 flag set in the template project_data['misc']['c_flags'].remove("--c99") # cpp is not required as it's implicit for cpp files diff --git a/tools/project_api.py b/tools/project_api.py index d4f7484d745..190b9205601 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -256,6 +256,7 @@ def export_project(src_paths, export_path, target, ide, files, exporter = generate_project_files(resources, export_path, target, name, toolchain, ide, macros=macros) + files.append(config_header) if zip_proj: if isinstance(zip_proj, basestring): zip_export(join(export_path, zip_proj), name, resource_dict, files) From b5c189931e41da8d7445e95cc1adf22284cfb866 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 23 Aug 2016 19:55:22 -0500 Subject: [PATCH 14/17] Fix include paths for fragmented projects; remove deadcode --- tools/project_api.py | 62 +++++--------------------------------------- 1 file changed, 7 insertions(+), 55 deletions(-) diff --git a/tools/project_api.py b/tools/project_api.py index 190b9205601..3a3b1c3aa32 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -24,7 +24,7 @@ def get_exporter_toolchain(ide): return EXPORTERS[ide], EXPORTERS[ide].TOOLCHAIN -def rewrite_basepath(file_name, resources, export_path): +def rewrite_basepath(file_name, resources, export_path, loc): """ Replace the basepath of filename with export_path Positional arguments: @@ -32,12 +32,12 @@ def rewrite_basepath(file_name, resources, export_path): resources - the resources object that the file came from export_path - the final destination of the file after export """ - new_f = relpath(file_name, resources.file_basepath[file_name]) + new_f = join(loc, relpath(file_name, resources.file_basepath[file_name])) resources.file_basepath[join(export_path, new_f)] = export_path return new_f -def subtract_basepath(resources, export_path): +def subtract_basepath(resources, export_path, loc=""): """ Rewrite all of the basepaths with the export_path Positional arguments: @@ -55,63 +55,15 @@ def subtract_basepath(resources, export_path): if isinstance(vals, list): new_vals = [] for val in vals: - new_vals.append(rewrite_basepath(val, resources, export_path)) + new_vals.append(rewrite_basepath(val, resources, export_path, + loc)) if isinstance(getattr(resources, key), set): setattr(resources, key, set(new_vals)) else: setattr(resources, key, new_vals) elif vals: setattr(resources, key, rewrite_basepath(vals, resources, - export_path)) - - -def prepare_project(src_paths, export_path, target, ide, - libraries_paths=None, options=None, linker_script=None, - clean=False, notify=None, verbose=False, name=None, - inc_dirs=None, jobs=1, silent=False, extra_verbose=False, - config=None, macros=None): - """ This function normalizes the - """ - - # Convert src_path to a list if needed - if type(src_paths) != type([]): - src_paths = [src_paths] - # Extend src_paths wiht libraries_paths - if libraries_paths is not None: - src_paths.extend(libraries_paths) - - # Export Directory - if exists(export_path) and clean: - rmtree(export_path) - if not exists(export_path): - makedirs(export_path) - - _, toolchain_name = get_exporter_toolchain(ide) - - # Pass all params to the unified prepare_resources() - toolchain = prepare_toolchain(src_paths, target, - toolchain_name, macros=macros, - options=options, clean=clean, jobs=jobs, - notify=notify, silent=silent, verbose=verbose, - extra_verbose=extra_verbose, config=config) - - - # The first path will give the name to the library - if name is None: - name = basename(normpath(abspath(src_paths[0]))) - - # Call unified scan_resources - resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) - toolchain.build_dir = export_path - config_header = toolchain.get_config_header() - resources.headers.append(config_header) - resources.file_basepath[config_header] = dirname(config_header) - - # Change linker script if specified - if linker_script is not None: - resources.linker_script = linker_script - - return resources, toolchain + export_path, loc)) def generate_project_files(resources, export_path, target, name, toolchain, ide, @@ -243,7 +195,7 @@ def export_project(src_paths, export_path, target, ide, subtract_basepath(resources, export_path) for loc, res in resource_dict.iteritems(): temp = copy.deepcopy(res) - subtract_basepath(temp, join(export_path, loc)) + subtract_basepath(temp, export_path, loc) resources.add(temp) else: for _, res in resource_dict.iteritems(): From a6be472111f47ebcb76b4bba0dcf1a2bc2edec7a Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Wed, 24 Aug 2016 10:29:17 -0500 Subject: [PATCH 15/17] Add repo_dirs, repo_files, lib_builds, and lib_refs to zips --- tools/project_api.py | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/tools/project_api.py b/tools/project_api.py index 3a3b1c3aa32..56ef18282ef 100644 --- a/tools/project_api.py +++ b/tools/project_api.py @@ -2,7 +2,7 @@ import sys from os.path import join, abspath, dirname, exists from os.path import basename, relpath, normpath -from os import makedirs +from os import makedirs, walk ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) import copy @@ -104,16 +104,28 @@ def zip_export(file_name, prefix, resources, project_files): with zipfile.ZipFile(file_name, "w") as zip_file: for prj_file in project_files: zip_file.write(prj_file, join(prefix, basename(prj_file))) - for loc, res in resources.iteritems(): - for source in \ - res.headers + res.s_sources + res.c_sources + res.cpp_sources +\ - res.libraries + res.hex_files + [res.linker_script] +\ - res.bin_files + res.objects + res.json_files: - if source: - zip_file.write(source, - join(prefix, loc, - relpath(source, - res.file_basepath[source]))) + for loc, resource in resources.iteritems(): + print resource.features + for res in [resource] + resource.features.values(): + extras = [] + for directory in res.repo_dirs: + for root, _, files in walk(directory): + for repo_file in files: + source = join(root, repo_file) + extras.append(source) + res.file_basepath[source] = res.base_path + for source in \ + res.headers + res.s_sources + res.c_sources +\ + res.cpp_sources + res.libraries + res.hex_files + \ + [res.linker_script] + res.bin_files + res.objects + \ + res.json_files + res.lib_refs + res.lib_builds + \ + res.repo_files + extras: + if source: + zip_file.write( + source, + join(prefix, loc, + relpath(source, res.file_basepath[source]))) + def export_project(src_paths, export_path, target, ide, From 7ee621320a1cc856ea7d000eed306274f51b1cf8 Mon Sep 17 00:00:00 2001 From: Sarah Marsh Date: Wed, 24 Aug 2016 13:01:56 -0500 Subject: [PATCH 16/17] Removed projectfiles directory. debug-info default option --- tools/project.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tools/project.py b/tools/project.py index faf28b9fcc2..4fb905fbe14 100644 --- a/tools/project.py +++ b/tools/project.py @@ -39,7 +39,7 @@ def setup_project(ide, target, program=None, source_dir=None, build=None): if source_dir: # --source is used to generate IDE files to toolchain directly # in the source tree and doesn't generate zip file - project_dir = join(source_dir[0],'projectfiles',ide+"_"+target) + project_dir = source_dir[0] if program: project_name = TESTS[program] else: @@ -63,7 +63,7 @@ def setup_project(ide, target, program=None, source_dir=None, build=None): def export(target, ide, build=None, src=None, macros=None, project_id=None, - clean=False, zip_proj=False): + clean=False, zip_proj=False, options=None): """Do an export of a project. Positional arguments: @@ -84,7 +84,7 @@ def export(target, ide, build=None, src=None, macros=None, project_id=None, zip_name = name+".zip" if zip_proj else None export_project(src, project_dir, target, ide, clean=clean, name=name, - macros=macros, libraries_paths=lib, zip_proj=zip_name) + macros=macros, libraries_paths=lib, zip_proj=zip_name, options=options) def main(): @@ -100,8 +100,7 @@ def main(): parser.add_argument("-m", "--mcu", metavar="MCU", default='LPC1768', - type=argparse_many( - argparse_force_uppercase_type(targetnames, "MCU")), + type=argparse_force_uppercase_type(targetnames, "MCU"), help="generate project for the given MCU ({})".format( ', '.join(targetnames))) @@ -165,6 +164,12 @@ def main(): dest="macros", help="Add a macro definition") + parser.add_argument("-o", + type=argparse_many(str), + dest="opts", + default=["debug-info"], + help="Toolchain options") + options = parser.parse_args() # Print available tests in order and exit @@ -212,10 +217,10 @@ def main(): if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") # Export to selected toolchain - for mcu in options.mcu: - export(mcu, options.ide, build=options.build, src=options.source_dir, - macros=options.macros, project_id=options.program, - clean=options.clean, zip_proj=zip_proj) + export(options.mcu, options.ide, build=options.build, + src=options.source_dir, macros=options.macros, + project_id=options.program, clean=options.clean, + zip_proj=zip_proj, options=options.opts) if __name__ == "__main__": From 6564bdaf1d256413bd28c4765730222048665090 Mon Sep 17 00:00:00 2001 From: Jimmy Brisson Date: Tue, 6 Sep 2016 14:50:32 -0500 Subject: [PATCH 17/17] Bump Progen version Required to prevent that pesky -1million something error code from ruining our CI --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a15841c9d6b..d01de683e98 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ PySerial>=2.7 PrettyTable>=0.7.2 Jinja2>=2.7.3 IntelHex>=1.3 -project-generator==0.9.9 +project-generator==0.9.10 project_generator_definitions>=0.2.26,<0.3.0 junit-xml pyYAML