From 9b4eb37f38301643e7b8afae5f86da5cfb960b6c Mon Sep 17 00:00:00 2001 From: Carles Cufi Date: Mon, 12 Nov 2018 15:12:59 +0100 Subject: [PATCH] scripts: remove west from scripts/ west will now be installed via pip in order for the bootstrapper to be decoupled from the west runners. Signed-off-by: Carles Cufi --- scripts/meta/west/_bootstrap/__init__.py | 5 - scripts/meta/west/_bootstrap/main.py | 439 ------- scripts/meta/west/_bootstrap/version.py | 5 - scripts/meta/west/_bootstrap/west-schema.yml | 17 - scripts/meta/west/build.py | 42 - scripts/meta/west/cmake.py | 220 ---- scripts/meta/west/commands/__init__.py | 74 -- scripts/meta/west/commands/build.py | 288 ----- scripts/meta/west/commands/debug.py | 70 -- scripts/meta/west/commands/flash.py | 26 - scripts/meta/west/commands/project.py | 1143 ------------------ scripts/meta/west/commands/run_common.py | 452 ------- scripts/meta/west/config.py | 95 -- scripts/meta/west/log.py | 105 -- scripts/meta/west/main.py | 241 ---- scripts/meta/west/manifest-schema.yml | 132 -- scripts/meta/west/manifest.py | 400 ------ scripts/meta/west/runners/__init__.py | 35 - scripts/meta/west/runners/arc.py | 107 -- scripts/meta/west/runners/blackmagicprobe.py | 96 -- scripts/meta/west/runners/bossac.py | 54 - scripts/meta/west/runners/core.py | 508 -------- scripts/meta/west/runners/dfu.py | 121 -- scripts/meta/west/runners/esp32.py | 100 -- scripts/meta/west/runners/intel_s1000.py | 166 --- scripts/meta/west/runners/jlink.py | 150 --- scripts/meta/west/runners/nios2.py | 99 -- scripts/meta/west/runners/nrfjprog.py | 129 -- scripts/meta/west/runners/nsim.py | 94 -- scripts/meta/west/runners/openocd.py | 145 --- scripts/meta/west/runners/pyocd.py | 169 --- scripts/meta/west/runners/qemu.py | 34 - scripts/meta/west/runners/xtensa.py | 40 - scripts/meta/west/util.py | 80 -- scripts/west | 92 -- zephyr-env.cmd | 11 - 36 files changed, 5984 deletions(-) delete mode 100644 scripts/meta/west/_bootstrap/__init__.py delete mode 100644 scripts/meta/west/_bootstrap/main.py delete mode 100644 scripts/meta/west/_bootstrap/version.py delete mode 100644 scripts/meta/west/_bootstrap/west-schema.yml delete mode 100644 scripts/meta/west/build.py delete mode 100644 scripts/meta/west/cmake.py delete mode 100644 scripts/meta/west/commands/__init__.py delete mode 100644 scripts/meta/west/commands/build.py delete mode 100644 scripts/meta/west/commands/debug.py delete mode 100644 scripts/meta/west/commands/flash.py delete mode 100644 scripts/meta/west/commands/project.py delete mode 100644 scripts/meta/west/commands/run_common.py delete mode 100644 scripts/meta/west/config.py delete mode 100644 scripts/meta/west/log.py delete mode 100755 scripts/meta/west/main.py delete mode 100644 scripts/meta/west/manifest-schema.yml delete mode 100644 scripts/meta/west/manifest.py delete mode 100644 scripts/meta/west/runners/__init__.py delete mode 100644 scripts/meta/west/runners/arc.py delete mode 100644 scripts/meta/west/runners/blackmagicprobe.py delete mode 100644 scripts/meta/west/runners/bossac.py delete mode 100644 scripts/meta/west/runners/core.py delete mode 100644 scripts/meta/west/runners/dfu.py delete mode 100644 scripts/meta/west/runners/esp32.py delete mode 100644 scripts/meta/west/runners/intel_s1000.py delete mode 100644 scripts/meta/west/runners/jlink.py delete mode 100644 scripts/meta/west/runners/nios2.py delete mode 100644 scripts/meta/west/runners/nrfjprog.py delete mode 100644 scripts/meta/west/runners/nsim.py delete mode 100644 scripts/meta/west/runners/openocd.py delete mode 100644 scripts/meta/west/runners/pyocd.py delete mode 100644 scripts/meta/west/runners/qemu.py delete mode 100644 scripts/meta/west/runners/xtensa.py delete mode 100644 scripts/meta/west/util.py delete mode 100755 scripts/west diff --git a/scripts/meta/west/_bootstrap/__init__.py b/scripts/meta/west/_bootstrap/__init__.py deleted file mode 100644 index 7ee30ca49b4..00000000000 --- a/scripts/meta/west/_bootstrap/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -# Empty file. diff --git a/scripts/meta/west/_bootstrap/main.py b/scripts/meta/west/_bootstrap/main.py deleted file mode 100644 index 6b835da4dda..00000000000 --- a/scripts/meta/west/_bootstrap/main.py +++ /dev/null @@ -1,439 +0,0 @@ -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''West's bootstrap/wrapper script. -''' - -import argparse -import configparser -import os -import platform -import pykwalify.core -import subprocess -import sys -import yaml - -import west._bootstrap.version as version - -if sys.version_info < (3,): - sys.exit('fatal error: you are running Python 2') - - -# -# Special files and directories in the west installation. -# -# These are given variable names for clarity, but they can't be -# changed without propagating the changes into west itself. -# - -# Top-level west directory, containing west itself and the manifest. -WEST_DIR = 'west' -# Subdirectory to check out the west source repository into. -WEST = 'west' -# Default west repository URL. -WEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/west' -# Default revision to check out of the west repository. -WEST_REV_DEFAULT = 'master' -# File inside of WEST_DIR which marks it as the top level of the -# Zephyr project installation. -# -# (The WEST_DIR name is not distinct enough to use when searching for -# the top level; other directories named "west" may exist elsewhere, -# e.g. zephyr/doc/west.) -WEST_MARKER = '.west_topdir' - -# Manifest repository directory under WEST_DIR. -MANIFEST = 'manifest' -# Default manifest repository URL. -MANIFEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/manifest' -# Default revision to check out of the manifest repository. -MANIFEST_REV_DEFAULT = 'master' - -_SCHEMA_PATH = os.path.join(os.path.dirname(__file__), "west-schema.yml") - -# -# Helpers shared between init and wrapper mode -# - - -class WestError(RuntimeError): - pass - - -class WestNotFound(WestError): - '''Neither the current directory nor any parent has a West installation.''' - - -def west_dir(start=None): - ''' - Returns the path to the west/ directory, searching ``start`` and its - parents. - - Raises WestNotFound if no west directory is found. - ''' - return os.path.join(west_topdir(start), WEST_DIR) - - -def manifest_dir(start=None): - ''' - Returns the path to the manifest/ directory, searching ``start`` and its - parents. - - Raises WestNotFound if no west directory is found. - ''' - return os.path.join(west_topdir(start), MANIFEST) - - -def west_topdir(start=None): - ''' - Like west_dir(), but returns the path to the parent directory of the west/ - directory instead, where project repositories are stored - ''' - # If you change this function, make sure to update west.util.west_topdir(). - - cur_dir = start or os.getcwd() - - while True: - if os.path.isfile(os.path.join(cur_dir, WEST_DIR, WEST_MARKER)): - return cur_dir - - parent_dir = os.path.dirname(cur_dir) - if cur_dir == parent_dir: - # At the root - raise WestNotFound('Could not find a West installation ' - 'in this or any parent directory') - cur_dir = parent_dir - - -def clone(desc, url, rev, dest): - if os.path.exists(dest): - raise WestError('refusing to clone into existing location ' + dest) - - print('=== Cloning {} from {}, rev. {} ==='.format(desc, url, rev)) - subprocess.check_call(('git', 'clone', '-b', rev, '--', url, dest)) - - -# -# west init -# - - -def init(argv): - '''Command line handler for ``west init`` invocations. - - This exits the program with a nonzero exit code if fatal errors occur.''' - - # Remember to update scripts/west-completion.bash if you add or remove - # flags - - init_parser = argparse.ArgumentParser( - prog='west init', - formatter_class=argparse.RawDescriptionHelpFormatter, - description=''' -Initializes a Zephyr installation. Use "west clone" afterwards to fetch the -sources. - -In more detail, does the following: - - 1. Clones the manifest repository to west/manifest, and the west repository - to west/west - - 2. Creates a marker file west/{} - - 3. Creates an initial configuration file west/config - -As an alternative to manually editing west/config, 'west init' can be rerun on -an already initialized West instance to update configuration settings. Only -explicitly passed configuration values (e.g. --mr MANIFEST_REVISION) are -updated. - -Updating the manifest URL or revision via 'west init' automatically runs 'west -update --reset-manifest --reset-projects' afterwards to reset the manifest to -the new revision, and all projects to their new manifest revisions. - -Updating the west URL or revision also runs 'west update --reset-west'. - -To suppress the reset of the manifest, west, and projects, pass --no-reset. -With --no-reset, only the configuration file will be updated, and you will have -to handle any resetting yourself. -'''.format(WEST_MARKER)) - - init_parser.add_argument( - '-m', '--manifest-url', - help='Manifest repository URL (default: {})' - .format(MANIFEST_URL_DEFAULT)) - - init_parser.add_argument( - '--mr', '--manifest-rev', dest='manifest_rev', - help='Manifest revision to fetch (default: {})' - .format(MANIFEST_REV_DEFAULT)) - - init_parser.add_argument( - '--nr', '--no-reset', dest='reset', action='store_false', - help='''Suppress the automatic reset of the manifest, west, and project - repositories when re-running 'west init' in an existing - installation to update the manifest or west URL/revision''') - - init_parser.add_argument( - 'directory', nargs='?', default=None, - help='''Directory to initialize West in. Missing directories will be - created automatically. (default: current directory)''') - - args = init_parser.parse_args(args=argv) - - try: - reinit(os.path.join(west_dir(args.directory), 'config'), args) - except WestNotFound: - bootstrap(args) - - -def bootstrap(args): - '''Bootstrap a new manifest + West installation.''' - - west_url = WEST_URL_DEFAULT - manifest_url = args.manifest_url or MANIFEST_URL_DEFAULT - - west_rev = WEST_REV_DEFAULT - manifest_rev = args.manifest_rev or MANIFEST_REV_DEFAULT - - directory = args.directory or os.getcwd() - - if not os.path.isdir(directory): - try: - print('Initializing in new directory', directory) - os.makedirs(directory, exist_ok=False) - except PermissionError: - sys.exit('Cannot initialize in {}: permission denied'.format( - directory)) - except FileExistsError: - sys.exit('Something else created {} concurrently; quitting'.format( - directory)) - except Exception as e: - sys.exit("Can't create directory {}: {}".format( - directory, e.args)) - else: - print('Initializing in', directory) - - # Clone the west source code and the manifest into west/. Git will create - # the west/ directory if it does not exist. - - clone('manifest repository', manifest_url, manifest_rev, - os.path.join(directory, WEST_DIR, MANIFEST)) - - # Parse the manifest and look for a section named "west" - manifest_file = os.path.join(directory, WEST_DIR, MANIFEST, 'default.yml') - with open(manifest_file, 'r') as f: - data = yaml.safe_load(f.read()) - - if 'west' in data: - wdata = data['west'] - try: - pykwalify.core.Core( - source_data=wdata, - schema_files=[_SCHEMA_PATH] - ).validate() - except pykwalify.errors.SchemaError as e: - sys.exit("Error: Failed to parse manifest file '{}': {}" - .format(manifest_file, e)) - - if 'url' in wdata: - west_url = wdata['url'] - if 'revision' in wdata: - west_rev = wdata['revision'] - - print("cloning {} at revision {}".format(west_url, west_rev)) - clone('west repository', west_url, west_rev, - os.path.join(directory, WEST_DIR, WEST)) - - # Create an initial configuration file - - config_path = os.path.join(directory, WEST_DIR, 'config') - update_conf(config_path, manifest_url, manifest_rev) - print('=== Initial configuration written to {} ==='.format(config_path)) - - # Create a dotfile to mark the installation. Hide it on Windows. - - with open(os.path.join(directory, WEST_DIR, WEST_MARKER), 'w') as f: - hide_file(f.name) - - print('=== West initialized. Now run "west clone" in {}. ==='. - format(directory)) - - -def reinit(config_path, args): - ''' - Reinitialize an existing installation. - - This updates the west/config configuration file, and optionally resets the - manifest, west, and project repositories to the new revision. - ''' - manifest_url = args.manifest_url - - if not (manifest_url or args.manifest_rev): - sys.exit('West already initialized. Please pass any settings you ' - 'want to change.') - - update_conf(config_path, manifest_url, args.manifest_rev) - - print('=== Updated configuration written to {} ==='.format(config_path)) - - if args.reset: - cmd = ['update', '--reset-manifest', '--reset-projects', - '--reset-west'] - print("=== Running 'west {}' to update repositories ===" - .format(' '.join(cmd))) - wrap(cmd) - - -def update_conf(config_path, manifest_url, manifest_rev): - ''' - Creates or updates the configuration file at 'config_path' with the - specified values. Values that are None/empty are ignored. - ''' - config = configparser.ConfigParser() - - # This is a no-op if the file doesn't exist, so no need to check - config.read(config_path) - - update_key(config, 'manifest', 'remote', manifest_url) - update_key(config, 'manifest', 'revision', manifest_rev) - - with open(config_path, 'w') as f: - config.write(f) - - -def update_key(config, section, key, value): - ''' - Updates 'key' in section 'section' in ConfigParser 'config', creating - 'section' if it does not exist. - - If value is None/empty, 'key' is left as-is. - ''' - if not value: - return - - if section not in config: - config[section] = {} - - config[section][key] = value - - -def hide_file(path): - '''Ensure path is a hidden file. - - On Windows, this uses attrib to hide the file manually. - - On UNIX systems, this just checks that the path's basename begins - with a period ('.'), for it to be hidden already. It's a fatal - error if it does not begin with a period in this case. - - On other systems, this just prints a warning. - ''' - system = platform.system() - - if system == 'Windows': - subprocess.check_call(['attrib', '+H', path]) - elif os.name == 'posix': # Try to check for all Unix, not just macOS/Linux - if not os.path.basename(path).startswith('.'): - sys.exit("internal error: {} can't be hidden on UNIX".format(path)) - else: - print("warning: unknown platform {}; {} may not be hidden" - .format(system, path), file=sys.stderr) - - -# -# Wrap a West command -# - -def append_to_pythonpath(directory): - pp = os.environ.get('PYTHONPATH') - os.environ['PYTHONPATH'] = ':'.join(([pp] if pp else []) + [directory]) - - -def wrap(argv): - printing_version = False - printing_help_only = False - - if argv: - if argv[0] in ('-V', '--version'): - print('West bootstrapper version: v{} ({})'. - format(version.__version__, os.path.dirname(__file__))) - printing_version = True - elif len(argv) == 1 and argv[0] in ('-h', '--help'): - # This only matters if we're called outside of an - # installation directory. We delegate to the main help if - # called from within one, because it includes a list of - # available commands, etc. - printing_help_only = True - - start = os.getcwd() - try: - topdir = west_topdir(start) - except WestNotFound: - if printing_version: - sys.exit(0) # run outside of an installation directory - elif printing_help_only: - # We call print multiple times here and below instead of using - # \n to be newline agnostic. - print('To set up a Zephyr installation here, run "west init".') - print('Run "west init -h" for additional information.') - sys.exit(0) - else: - print('Error: "{}" is not a Zephyr installation directory.'. - format(start), file=sys.stderr) - print('Things to try:', file=sys.stderr) - print(' - Run "west init" to set up an installation here.', - file=sys.stderr) - print(' - Run "west init -h" for additional information.', - file=sys.stderr) - sys.exit(1) - - west_git_repo = os.path.join(topdir, WEST_DIR, WEST) - if printing_version: - try: - git_describe = subprocess.check_output( - ['git', 'describe', '--tags'], - stderr=subprocess.DEVNULL, - cwd=west_git_repo).decode(sys.getdefaultencoding()).strip() - print('West repository version: {} ({})'.format(git_describe, - west_git_repo)) - except subprocess.CalledProcessError: - print('West repository version: unknown; no tags were found') - sys.exit(0) - - # Import the west package from the installation and run its main - # function with the given command-line arguments. - # - # This can't be done as a subprocess: that would break the - # runners' debug handling for GDB, which needs to block the usual - # control-C signal handling. GDB uses Ctrl-C to halt the debug - # target. So we really do need to import west and delegate within - # this bootstrap process. - # - # Put this at position 1 to make sure it comes before random stuff - # that might be on a developer's PYTHONPATH in the import order. - sys.path.insert(1, os.path.join(west_git_repo, 'src')) - import west.main - west.main.main(argv) - - -# -# Main entry point -# - - -def main(wrap_argv=None): - '''Entry point to the wrapper script.''' - if wrap_argv is None: - wrap_argv = sys.argv[1:] - - if not wrap_argv or wrap_argv[0] != 'init': - wrap(wrap_argv) - else: - init(wrap_argv[1:]) - sys.exit(0) - - -if __name__ == '__main__': - main() diff --git a/scripts/meta/west/_bootstrap/version.py b/scripts/meta/west/_bootstrap/version.py deleted file mode 100644 index 5c057ff9376..00000000000 --- a/scripts/meta/west/_bootstrap/version.py +++ /dev/null @@ -1,5 +0,0 @@ -# Don't put anything else in here! -# -# This is the Python 3 version of option 3 in: -# https://packaging.python.org/guides/single-sourcing-package-version/#single-sourcing-the-version -__version__ = '0.4.1' diff --git a/scripts/meta/west/_bootstrap/west-schema.yml b/scripts/meta/west/_bootstrap/west-schema.yml deleted file mode 100644 index 78b44dae017..00000000000 --- a/scripts/meta/west/_bootstrap/west-schema.yml +++ /dev/null @@ -1,17 +0,0 @@ -## A pykwalify schema for basic validation of the structure of a -## west YAML file. (Full validation would require additional work, -## e.g. to validate that remote URLs obey the URL format specified in -## rfc1738.) -## - -# The top-level west yaml is a map. The only top-level element is -# 'west'. All other elements are contained within it. This allows -# us a bit of future-proofing. -type: map -mapping: - url: - required: false - type: str - revision: - required: false - type: str diff --git a/scripts/meta/west/build.py b/scripts/meta/west/build.py deleted file mode 100644 index bc41dab3097..00000000000 --- a/scripts/meta/west/build.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 (c) Foundries.io. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Common definitions for building Zephyr applications. - -This provides some default settings and convenience wrappers for -building Zephyr applications needed by multiple commands. - -See west.cmd.build for the build command itself. -''' - -from west import cmake -from west import log - -DEFAULT_BUILD_DIR = 'build' -'''Name of the default Zephyr build directory.''' - -DEFAULT_CMAKE_GENERATOR = 'Ninja' -'''Name of the default CMake generator.''' - - -def is_zephyr_build(path): - '''Return true if and only if `path` appears to be a valid Zephyr - build directory. - - "Valid" means the given path is a directory which contains a CMake - cache with a 'ZEPHYR_TOOLCHAIN_VARIANT' key. - ''' - try: - cache = cmake.CMakeCache.from_build_dir(path) - except FileNotFoundError: - cache = {} - - if 'ZEPHYR_TOOLCHAIN_VARIANT' in cache: - log.dbg('{} is a zephyr build directory'.format(path), - level=log.VERBOSE_EXTREME) - return True - else: - log.dbg('{} is NOT a valid zephyr build directory'.format(path), - level=log.VERBOSE_EXTREME) - return False diff --git a/scripts/meta/west/cmake.py b/scripts/meta/west/cmake.py deleted file mode 100644 index 37452e306ed..00000000000 --- a/scripts/meta/west/cmake.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright (c) 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Helpers for dealing with CMake''' - -from collections import OrderedDict -import os.path -import re -import subprocess -import shutil - -from west import log -from west.util import quote_sh_list - -__all__ = ['run_cmake', 'run_build', - 'make_c_identifier', - 'CMakeCacheEntry', 'CMakeCache'] - -DEFAULT_CACHE = 'CMakeCache.txt' - - -def run_cmake(args, quiet=False): - '''Run cmake to (re)generate a build system''' - cmake = shutil.which('cmake') - if cmake is None: - log.die('CMake is not installed or cannot be found; cannot build.') - cmd = [cmake] + args - kwargs = dict() - if quiet: - kwargs['stdout'] = subprocess.DEVNULL - kwargs['stderr'] = subprocess.STDOUT - log.dbg('Running CMake:', cmd, level=log.VERBOSE_VERY) - log.dbg('As command:', quote_sh_list(cmd), level=log.VERBOSE_VERY) - subprocess.check_call(cmd, **kwargs) - - -def run_build(build_directory, extra_args=(), quiet=False): - '''Run cmake in build tool mode in `build_directory`''' - run_cmake(['--build', build_directory] + list(extra_args), quiet=quiet) - - -def make_c_identifier(string): - '''Make a C identifier from a string in the same way CMake does. - ''' - # The behavior of CMake's string(MAKE_C_IDENTIFIER ...) is not - # precisely documented. This behavior matches the test case - # that introduced the function: - # - # https://gitlab.kitware.com/cmake/cmake/commit/0ab50aea4c4d7099b339fb38b4459d0debbdbd85 - ret = [] - - alpha_under = re.compile('[A-Za-z_]') - alpha_num_under = re.compile('[A-Za-z0-9_]') - - if not alpha_under.match(string): - ret.append('_') - for c in string: - if alpha_num_under.match(c): - ret.append(c) - else: - ret.append('_') - - return ''.join(ret) - - -class CMakeCacheEntry: - '''Represents a CMake cache entry. - - This class understands the type system in a CMakeCache.txt, and - converts the following cache types to Python types: - - Cache Type Python type - ---------- ------------------------------------------- - FILEPATH str - PATH str - STRING str OR list of str (if ';' is in the value) - BOOL bool - INTERNAL str OR list of str (if ';' is in the value) - ---------- ------------------------------------------- - ''' - - # Regular expression for a cache entry. - # - # CMake variable names can include escape characters, allowing a - # wider set of names than is easy to match with a regular - # expresion. To be permissive here, use a non-greedy match up to - # the first colon (':'). This breaks if the variable name has a - # colon inside, but it's good enough. - CACHE_ENTRY = re.compile( - r'''(?P.*?) # name - :(?PFILEPATH|PATH|STRING|BOOL|INTERNAL) # type - =(?P.*) # value - ''', re.X) - - @classmethod - def _to_bool(cls, val): - # Convert a CMake BOOL string into a Python bool. - # - # "True if the constant is 1, ON, YES, TRUE, Y, or a - # non-zero number. False if the constant is 0, OFF, NO, - # FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in - # the suffix -NOTFOUND. Named boolean constants are - # case-insensitive. If the argument is not one of these - # constants, it is treated as a variable." - # - # https://cmake.org/cmake/help/v3.0/command/if.html - val = val.upper() - if val in ('ON', 'YES', 'TRUE', 'Y'): - return True - elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''): - return False - elif val.endswith('-NOTFOUND'): - return False - else: - try: - v = int(val) - return v != 0 - except ValueError as exc: - raise ValueError('invalid bool {}'.format(val)) from exc - - @classmethod - def from_line(cls, line, line_no): - # Comments can only occur at the beginning of a line. - # (The value of an entry could contain a comment character). - if line.startswith('//') or line.startswith('#'): - return None - - # Whitespace-only lines do not contain cache entries. - if not line.strip(): - return None - - m = cls.CACHE_ENTRY.match(line) - if not m: - return None - - name, type_, value = (m.group(g) for g in ('name', 'type', 'value')) - if type_ == 'BOOL': - try: - value = cls._to_bool(value) - except ValueError as exc: - args = exc.args + ('on line {}: {}'.format(line_no, line),) - raise ValueError(args) from exc - elif type_ == 'STRING' or type_ == 'INTERNAL': - # If the value is a CMake list (i.e. is a string which - # contains a ';'), convert to a Python list. - if ';' in value: - value = value.split(';') - - return CMakeCacheEntry(name, value) - - def __init__(self, name, value): - self.name = name - self.value = value - - def __str__(self): - fmt = 'CMakeCacheEntry(name={}, value={})' - return fmt.format(self.name, self.value) - - -class CMakeCache: - '''Parses and represents a CMake cache file.''' - - @staticmethod - def from_build_dir(build_dir): - return CMakeCache(os.path.join(build_dir, DEFAULT_CACHE)) - - def __init__(self, cache_file): - self.cache_file = cache_file - self.load(cache_file) - - def load(self, cache_file): - entries = [] - with open(cache_file, 'r') as cache: - for line_no, line in enumerate(cache): - entry = CMakeCacheEntry.from_line(line, line_no) - if entry: - entries.append(entry) - self._entries = OrderedDict((e.name, e) for e in entries) - - def get(self, name, default=None): - entry = self._entries.get(name) - if entry is not None: - return entry.value - else: - return default - - def get_list(self, name, default=None): - if default is None: - default = [] - entry = self._entries.get(name) - if entry is not None: - value = entry.value - if isinstance(value, list): - return value - elif isinstance(value, str): - return [value] if value else [] - else: - msg = 'invalid value {} type {}' - raise RuntimeError(msg.format(value, type(value))) - else: - return default - - def __contains__(self, name): - return name in self._entries - - def __getitem__(self, name): - return self._entries[name].value - - def __setitem__(self, name, entry): - if not isinstance(entry, CMakeCacheEntry): - msg = 'improper type {} for value {}, expecting CMakeCacheEntry' - raise TypeError(msg.format(type(entry), entry)) - self._entries[name] = entry - - def __delitem__(self, name): - del self._entries[name] - - def __iter__(self): - return iter(self._entries.values()) diff --git a/scripts/meta/west/commands/__init__.py b/scripts/meta/west/commands/__init__.py deleted file mode 100644 index 769757140d2..00000000000 --- a/scripts/meta/west/commands/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''West's commands subpackage. - -All commands should be implemented within modules in this package. -''' - -from abc import ABC, abstractmethod - -__all__ = ['CommandContextError', 'WestCommand'] - - -class CommandContextError(RuntimeError): - '''Indicates that a context-dependent command could not be run.''' - - -class WestCommand(ABC): - '''Abstract superclass for a west command. - - All top-level commands supported by west implement this interface.''' - - def __init__(self, name, description, accepts_unknown_args=False): - '''Create a command instance. - - `name`: the command's name, as entered by the user. - `description`: one-line command description to show to the user. - - `accepts_unknown_args`: if true, the command can handle - arbitrary unknown command line arguments in its run() - method. Otherwise, passing unknown arguments will cause - UnknownArgumentsError to be raised. - ''' - self.name = name - self.description = description - self._accept_unknown = accepts_unknown_args - - def run(self, args, unknown): - '''Run the command. - - `args`: known arguments parsed via `register_arguments()` - `unknown`: unknown arguments present on the command line - ''' - if unknown and not self._accept_unknown: - self.parser.error('unexpected arguments: {}'.format(unknown)) - self.do_run(args, unknown) - - def add_parser(self, parser_adder): - '''Registers a parser for this command, and returns it. - ''' - self.parser = self.do_add_parser(parser_adder) - return self.parser - - # - # Mandatory subclass hooks - # - - @abstractmethod - def do_add_parser(self, parser_adder): - '''Subclass method for registering command line arguments. - - `parser_adder` is an argparse argument subparsers adder.''' - - @abstractmethod - def do_run(self, args, unknown): - '''Subclasses must implement; called when the command is run. - - `args` is the namespace of parsed known arguments. - - If `accepts_unknown_args` was False when constructing this - object, `unknown` will be empty. Otherwise, it is an iterable - containing all unknown arguments present on the command line. - ''' diff --git a/scripts/meta/west/commands/build.py b/scripts/meta/west/commands/build.py deleted file mode 100644 index a7405e9a63c..00000000000 --- a/scripts/meta/west/commands/build.py +++ /dev/null @@ -1,288 +0,0 @@ -# Copyright (c) 2018 Foundries.io -# -# SPDX-License-Identifier: Apache-2.0 - -import argparse -import os - -from west import log -from west import cmake -from west.build import DEFAULT_BUILD_DIR, DEFAULT_CMAKE_GENERATOR, \ - is_zephyr_build -from west.commands import WestCommand - -BUILD_HELP = '''\ -Convenience wrapper for building Zephyr applications. - -This command attempts to do what you mean when run from a Zephyr -application source or a pre-existing build directory: - -- When "west build" is run from a Zephyr build directory, the source - directory is obtained from the CMake cache, and that build directory - is re-compiled. - -- Otherwise, the source directory defaults to the current working - directory, so running "west build" from a Zephyr application's - source directory compiles it. - -The source and build directories can be explicitly set with the ---source-dir and --build-dir options. The build directory defaults to -'build' if it is not auto-detected. The build directory is always -created if it does not exist. - -This command runs CMake to generate a build system if one is not -present in the build directory, then builds the application. -Subsequent builds try to avoid re-running CMake; you can force it -to run by setting --cmake. - -To pass additional options to CMake, give them as extra arguments -after a '--' For example, "west build -- -DOVERLAY_CONFIG=some.conf" sets -an overlay config file. (Doing this forces a CMake run.)''' - - -class Build(WestCommand): - - def __init__(self): - super(Build, self).__init__( - 'build', - BUILD_HELP, - accepts_unknown_args=False) - - self.source_dir = None - '''Source directory for the build, or None on error.''' - - self.build_dir = None - '''Final build directory used to run the build, or None on error.''' - - self.created_build_dir = False - '''True if the build directory was created; False otherwise.''' - - self.run_cmake = False - '''True if CMake was run; False otherwise. - - Note: this only describes CMake runs done by this command. The - build system generated by CMake may also update itself due to - internal logic.''' - - self.cmake_cache = None - '''Final parsed CMake cache for the build, or None on error.''' - - def do_add_parser(self, parser_adder): - parser = parser_adder.add_parser( - self.name, - formatter_class=argparse.RawDescriptionHelpFormatter, - description=self.description) - - # Remember to update scripts/west-completion.bash if you add or remove - # flags - - parser.add_argument('-b', '--board', - help='''Board to build for (must be given for the - first build, can be omitted later)''') - parser.add_argument('-s', '--source-dir', - help='''Explicitly set the source directory. - If not given and rebuilding an existing Zephyr - build directory, this is taken from the CMake - cache. Otherwise, the current directory is - assumed.''') - parser.add_argument('-d', '--build-dir', - help='''Explicitly sets the build directory. - If not given and the current directory is a Zephyr - build directory, it will be used; otherwise, "{}" - is assumed. The directory will be created if - it doesn't exist.'''.format(DEFAULT_BUILD_DIR)) - parser.add_argument('-t', '--target', - help='''Override the build system target (e.g. - 'clean', 'pristine', etc.)''') - parser.add_argument('-c', '--cmake', action='store_true', - help='Force CMake to run') - parser.add_argument('-f', '--force', action='store_true', - help='Ignore any errors and try to build anyway') - parser.add_argument('cmake_opts', nargs='*', metavar='cmake_opt', - help='Extra option to pass to CMake; implies -c') - - return parser - - def do_run(self, args, ignored): - self.args = args # Avoid having to pass them around - log.dbg('args:', args, level=log.VERBOSE_EXTREME) - self._sanity_precheck() - self._setup_build_dir() - if is_zephyr_build(self.build_dir): - self._update_cache() - if self.args.cmake or self.args.cmake_opts: - self.run_cmake = True - else: - self.run_cmake = True - self._setup_source_dir() - self._sanity_check() - - log.inf('source directory: {}'.format(self.source_dir), colorize=True) - log.inf('build directory: {}{}'. - format(self.build_dir, - (' (created)' if self.created_build_dir - else '')), - colorize=True) - if self.cmake_cache: - board = self.cmake_cache.get('CACHED_BOARD') - elif self.args.board: - board = self.args.board - else: - board = 'UNKNOWN' # shouldn't happen - log.inf('BOARD:', board, colorize=True) - - self._run_cmake(self.args.cmake_opts) - self._sanity_check() - self._update_cache() - - extra_args = ['--target', args.target] if args.target else [] - cmake.run_build(self.build_dir, extra_args=extra_args) - - def _sanity_precheck(self): - app = self.args.source_dir - if app: - if not os.path.isdir(app): - self._check_force('source directory {} does not exist'. - format(app)) - elif 'CMakeLists.txt' not in os.listdir(app): - self._check_force("{} doesn't contain a CMakeLists.txt". - format(app)) - - def _update_cache(self): - try: - self.cmake_cache = cmake.CMakeCache.from_build_dir(self.build_dir) - except FileNotFoundError: - pass - - def _setup_build_dir(self): - # Initialize build_dir and created_build_dir attributes. - log.dbg('setting up build directory', level=log.VERBOSE_EXTREME) - if self.args.build_dir: - build_dir = self.args.build_dir - else: - cwd = os.getcwd() - if is_zephyr_build(cwd): - build_dir = cwd - else: - build_dir = DEFAULT_BUILD_DIR - build_dir = os.path.abspath(build_dir) - - if os.path.exists(build_dir): - if not os.path.isdir(build_dir): - log.die('build directory {} exists and is not a directory'. - format(build_dir)) - else: - os.makedirs(build_dir, exist_ok=False) - self.created_build_dir = True - self.run_cmake = True - - self.build_dir = build_dir - - def _setup_source_dir(self): - # Initialize source_dir attribute, either from command line argument, - # implicitly from the build directory's CMake cache, or using the - # default (current working directory). - log.dbg('setting up source directory', level=log.VERBOSE_EXTREME) - if self.args.source_dir: - source_dir = self.args.source_dir - elif self.cmake_cache: - source_dir = self.cmake_cache.get('APPLICATION_SOURCE_DIR') - if not source_dir: - # Maybe Zephyr changed the key? Give the user a way - # to retry, at least. - log.die("can't determine application from build directory " - "{}, please specify an application to build". - format(self.build_dir)) - else: - source_dir = os.getcwd() - self.source_dir = os.path.abspath(source_dir) - - def _sanity_check(self): - # Sanity check the build configuration. - # Side effect: may update cmake_cache attribute. - log.dbg('sanity checking the build', level=log.VERBOSE_EXTREME) - if self.source_dir == self.build_dir: - # There's no forcing this. - log.die('source and build directory {} cannot be the same; ' - 'use --build-dir {} to specify a build directory'. - format(self.source_dir, self.build_dir)) - - srcrel = os.path.relpath(self.source_dir) - if is_zephyr_build(self.source_dir): - self._check_force('it looks like {srcrel} is a build directory: ' - 'did you mean -build-dir {srcrel} instead?'. - format(srcrel=srcrel)) - elif 'CMakeLists.txt' not in os.listdir(self.source_dir): - self._check_force('source directory "{srcrel}" does not contain ' - 'a CMakeLists.txt; is that really what you ' - 'want to build? (Use -s SOURCE_DIR to specify ' - 'the application source directory)'. - format(srcrel=srcrel)) - - if not is_zephyr_build(self.build_dir) and not self.args.board: - self._check_force('this looks like a new or clean build, ' - 'please provide --board') - - if not self.cmake_cache: - return # That's all we can check without a cache. - - cached_app = self.cmake_cache.get('APPLICATION_SOURCE_DIR') - log.dbg('APPLICATION_SOURCE_DIR:', cached_app, - level=log.VERBOSE_EXTREME) - source_abs = (os.path.abspath(self.args.source_dir) - if self.args.source_dir else None) - cached_abs = os.path.abspath(cached_app) if cached_app else None - if cached_abs and source_abs and source_abs != cached_abs: - self._check_force('build directory "{}" is for application "{}", ' - 'but source directory "{}" was specified; ' - 'please clean it or use --build-dir to set ' - 'another build directory'. - format(self.build_dir, cached_abs, - source_abs)) - self.run_cmake = True # If they insist, we need to re-run cmake. - - cached_board = self.cmake_cache.get('CACHED_BOARD') - log.dbg('CACHED_BOARD:', cached_board, level=log.VERBOSE_EXTREME) - if not cached_board and not self.args.board: - if self.created_build_dir: - self._check_force( - 'Building for the first time: you must provide --board') - else: - self._check_force( - 'Board is missing or unknown, please provide --board') - if self.args.board and cached_board and \ - self.args.board != cached_board: - self._check_force('Build directory {} targets board {}, ' - 'but board {} was specified. (Clean that ' - 'directory or use --build-dir to specify ' - 'a different one.)'. - format(self.build_dir, cached_board, - self.args.board)) - - def _check_force(self, msg): - if not self.args.force: - log.err(msg) - log.die('refusing to proceed without --force due to above error') - - def _run_cmake(self, cmake_opts): - if not self.run_cmake: - log.dbg('not running cmake; build system is present') - return - - # It's unfortunate to have to use the undocumented -B and -H - # options to set the source and binary directories. - # - # However, it's the only known way to set that directory and - # run CMake from the current working directory. This is - # important because users expect invocations like this to Just - # Work: - # - # west build -- -DOVERLAY_CONFIG=relative-path.conf - final_cmake_args = ['-B{}'.format(self.build_dir), - '-H{}'.format(self.source_dir), - '-G{}'.format(DEFAULT_CMAKE_GENERATOR)] - if self.args.board: - final_cmake_args.append('-DBOARD={}'.format(self.args.board)) - if cmake_opts: - final_cmake_args.extend(cmake_opts) - cmake.run_cmake(final_cmake_args) diff --git a/scripts/meta/west/commands/debug.py b/scripts/meta/west/commands/debug.py deleted file mode 100644 index 7ce3c1bb480..00000000000 --- a/scripts/meta/west/commands/debug.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''west "debug" and "debugserver" commands.''' - -from textwrap import dedent - -from west.commands.run_common import desc_common, add_parser_common, \ - do_run_common -from west.commands import WestCommand - - -class Debug(WestCommand): - - def __init__(self): - super(Debug, self).__init__( - 'debug', - dedent(''' - Connect to the board, program the flash, and start a - debugging session.\n\n''') + - desc_common('debug'), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return add_parser_common(parser_adder, self) - - def do_run(self, my_args, runner_args): - do_run_common(self, my_args, runner_args, - 'ZEPHYR_BOARD_DEBUG_RUNNER') - - -class DebugServer(WestCommand): - - def __init__(self): - super(DebugServer, self).__init__( - 'debugserver', - dedent(''' - Connect to the board and accept debug networking connections. - - The debug server binds to a known port, and allows client software - started elsewhere to connect to it and debug the running - Zephyr image.\n\n''') + - desc_common('debugserver'), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return add_parser_common(parser_adder, self) - - def do_run(self, my_args, runner_args): - do_run_common(self, my_args, runner_args, - 'ZEPHYR_BOARD_DEBUG_RUNNER') - -class Attach(WestCommand): - - def __init__(self): - super(Attach, self).__init__( - 'attach', - dedent(''' - Connect to the board without programming the flash, and - start a debugging session.\n\n''') + - desc_common('attach'), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return add_parser_common(parser_adder, self) - - def do_run(self, my_args, runner_args): - do_run_common(self, my_args, runner_args, - 'ZEPHYR_BOARD_DEBUG_RUNNER') diff --git a/scripts/meta/west/commands/flash.py b/scripts/meta/west/commands/flash.py deleted file mode 100644 index 72b716a0ff4..00000000000 --- a/scripts/meta/west/commands/flash.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''west "flash" command''' - -from west.commands.run_common import desc_common, add_parser_common, \ - do_run_common -from west.commands import WestCommand - - -class Flash(WestCommand): - - def __init__(self): - super(Flash, self).__init__( - 'flash', - 'Flash and run a binary on a board.\n\n' + - desc_common('flash'), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return add_parser_common(parser_adder, self) - - def do_run(self, my_args, runner_args): - do_run_common(self, my_args, runner_args, - 'ZEPHYR_BOARD_FLASH_RUNNER') diff --git a/scripts/meta/west/commands/project.py b/scripts/meta/west/commands/project.py deleted file mode 100644 index 6dc325861b7..00000000000 --- a/scripts/meta/west/commands/project.py +++ /dev/null @@ -1,1143 +0,0 @@ -# Copyright (c) 2018, Nordic Semiconductor ASA -# -# SPDX-License-Identifier: Apache-2.0 - -'''West project commands''' - -import argparse -import collections -import os -import shutil -import subprocess -import textwrap - -from west.config import config -from west import log -from west import util -from west.commands import WestCommand -from west.manifest import default_path, SpecialProject, \ - Manifest, MalformedManifest, META_NAMES - - -# Branch that points to the revision specified in the manifest (which might be -# an SHA). Local branches created with 'west branch' are set to track this -# branch. -_MANIFEST_REV_BRANCH = 'manifest-rev' - - -class List(WestCommand): - def __init__(self): - super().__init__( - 'list', - _wrap(''' - List projects. - - Individual projects can be specified by name. - - By default, lists all project names in the manifest, along with - each project's path, revision, URL, and whether it has been cloned. - - The west and manifest repositories in the top-level west directory - are not included by default. Use --all or the special project - names "west" and "manifest" to include them.''')) - - def do_add_parser(self, parser_adder): - default_fmt = '{name:14} {path:18} {revision:13} {url} {cloned}' - return _add_parser( - parser_adder, self, - _arg('-a', '--all', action='store_true', - help='''Do not ignore repositories in west/ (i.e. west and the - manifest) in the output. Since these are not part of - the manifest, some of their format values (like "revision") - come from other sources. The behavior of this option is - modeled after the Unix ls -a option.'''), - _arg('-f', '--format', default=default_fmt, - help='''Format string to use to list each project; see - FORMAT STRINGS below.'''), - _project_list_arg, - epilog=textwrap.dedent('''\ - FORMAT STRINGS - - Projects are listed using a Python 3 format string. Arguments - to the format string are accessed by name. - - The default format string is: - - "{}" - - The following arguments are available: - - - name: project name in the manifest - - url: full remote URL as specified by the manifest - - path: the relative path to the project from the top level, - as specified in the manifest where applicable - - abspath: absolute and normalized path to the project - - revision: project's manifest revision - - cloned: "(cloned)" if the project has been cloned, "(not cloned)" - otherwise - - clone_depth: project clone depth if specified, "None" otherwise - '''.format(default_fmt))) - - def do_run(self, args, user_args): - # We should only list the meta projects if they were explicitly - # given by name, or --all was given. - list_meta = bool(args.projects) or args.all - - for project in _projects(args, include_meta=True): - if project.name in META_NAMES and not list_meta: - continue - - # Spelling out the format keys explicitly here gives us - # future-proofing if the internal Project representation - # ever changes. - try: - result = args.format.format( - name=project.name, - url=project.url, - path=project.path, - abspath=project.abspath, - revision=project.revision, - cloned="(cloned)" if _cloned(project) else "(not cloned)", - clone_depth=project.clone_depth or "None") - except KeyError as e: - # The raised KeyError seems to just put the first - # invalid argument in the args tuple, regardless of - # how many unrecognizable keys there were. - log.die('unknown key "{}" in format string "{}"'. - format(e.args[0], args.format)) - - log.inf(result) - - -class Clone(WestCommand): - def __init__(self): - super().__init__( - 'clone', - _wrap(''' - Clone projects. - - Clones each of the specified projects (default: all projects) and - creates a branch in each. The branch is named after the project's - revision, and tracks the 'manifest-rev' branch (see below). - - If the project's revision is an SHA, the branch will simply be - called 'work'. - - This command is really just a shorthand for 'west fetch' + - 'west checkout -b '. If you clone a project with - 'west fetch' instead, you will start in a detached HEAD state at - 'manifest-rev'. - - {} - - {}'''.format(_NO_UPDATE_HELP, _MANIFEST_REV_HELP))) - - def do_add_parser(self, parser_adder): - return _add_parser( - parser_adder, self, - _arg('-b', - dest='branch', - metavar='BRANCH_NAME', - help='an alternative branch name to use, instead of one ' - 'based on the revision'), - _no_update_arg, - _project_list_arg) - - def do_run(self, args, user_args): - if args.update: - _update_manifest(args) - _update_west(args) - - for project in _projects(args, listed_must_be_cloned=False): - if args.branch: - branch = args.branch - elif _is_sha(project.revision): - # Don't name the branch after an SHA - branch = 'work' - else: - # Use the last component of the revision, in case it is a - # qualified ref (refs/heads/foo and the like) - branch = project.revision.split('/')[-1] - - _fetch(project) - _create_branch(project, branch) - _checkout(project, branch) - - -class Fetch(WestCommand): - def __init__(self): - super().__init__( - 'fetch', - _wrap(''' - Fetch projects. - - Fetches upstream changes in each of the specified projects - (default: all projects). Repositories that do not already exist are - cloned. - - {} - - {}'''.format(_NO_UPDATE_HELP, _MANIFEST_REV_HELP))) - - def do_add_parser(self, parser_adder): - return _add_parser(parser_adder, self, _no_update_arg, - _project_list_arg) - - def do_run(self, args, user_args): - if args.update: - _update_manifest(args) - _update_west(args) - - for project in _projects(args, listed_must_be_cloned=False): - _fetch(project) - - -class Pull(WestCommand): - def __init__(self): - super().__init__( - 'pull', - _wrap(''' - Clone/fetch and rebase projects. - - Fetches upstream changes in each of the specified projects - (default: all projects) and rebases the checked-out branch (or - detached HEAD state) on top of '{}', effectively bringing the - branch up to date. Repositories that do not already exist are - cloned. - - {} - - {}'''.format(_MANIFEST_REV_BRANCH, _NO_UPDATE_HELP, - _MANIFEST_REV_HELP))) - - def do_add_parser(self, parser_adder): - return _add_parser(parser_adder, self, _no_update_arg, - _project_list_arg) - - def do_run(self, args, user_args): - if args.update: - _update_manifest(args) - _update_west(args) - - for project in _projects(args, listed_must_be_cloned=False): - _fetch(project) - _rebase(project) - - -class Rebase(WestCommand): - def __init__(self): - super().__init__( - 'rebase', - _wrap(''' - Rebase projects. - - Rebases the checked-out branch (or detached HEAD) on top of '{}' in - each of the specified projects (default: all cloned projects), - effectively bringing the branch up to date. - - '''.format(_MANIFEST_REV_BRANCH) + _MANIFEST_REV_HELP)) - - def do_add_parser(self, parser_adder): - return _add_parser(parser_adder, self, _project_list_arg) - - def do_run(self, args, user_args): - for project in _cloned_projects(args): - _rebase(project) - - -class Branch(WestCommand): - def __init__(self): - super().__init__( - 'branch', - _wrap(''' - Create a branch or list branches, in multiple projects. - - Creates a branch in each of the specified projects (default: all - cloned projects). The new branches are set to track '{}'. - - With no arguments, lists all local branches along with the - repositories they appear in. - - '''.format(_MANIFEST_REV_BRANCH) + _MANIFEST_REV_HELP)) - - def do_add_parser(self, parser_adder): - return _add_parser( - parser_adder, self, - _arg('branch', nargs='?', metavar='BRANCH_NAME'), - _project_list_arg) - - def do_run(self, args, user_args): - if args.branch: - # Create a branch in the specified projects - for project in _cloned_projects(args): - _create_branch(project, args.branch) - else: - # No arguments. List local branches from all cloned projects along - # with the projects they appear in. - - branch2projs = collections.defaultdict(list) - for project in _cloned_projects(args): - for branch in _branches(project): - branch2projs[branch].append(project.name) - - for branch, projs in sorted(branch2projs.items()): - log.inf('{:18} {}'.format(branch, ", ".join(projs))) - - -class Checkout(WestCommand): - def __init__(self): - super().__init__( - 'checkout', - _wrap(''' - Check out local branch. - - Checks out a local branch in each of the specified projects - (default: all cloned projects). Projects that do not have the - branch are left alone. - - Note: To check out remote branches, use ordinary Git commands - inside the repositories. This command is meant for switching - between work branches that span multiple repositories, without any - interference from whatever remote branches might exist. - - If '-b BRANCH_NAME' is passed, the new branch will be set to track - '{}', like for 'west branch BRANCH_NAME'. - '''.format(_MANIFEST_REV_BRANCH))) - - def do_add_parser(self, parser_adder): - return _add_parser( - parser_adder, self, - _arg('-b', - dest='create_branch', - action='store_true', - help='create the branch before checking it out'), - _arg('branch', metavar='BRANCH_NAME'), - _project_list_arg) - - def do_run(self, args, user_args): - branch_exists = False - - for project in _cloned_projects(args): - if args.create_branch: - _create_branch(project, args.branch) - _checkout(project, args.branch) - branch_exists = True - elif _has_branch(project, args.branch): - _checkout(project, args.branch) - branch_exists = True - - if not branch_exists: - msg = 'No branch {} exists in any '.format(args.branch) - if args.projects: - log.die(msg + 'of the listed projects') - else: - log.die(msg + 'cloned project') - - -class Diff(WestCommand): - def __init__(self): - super().__init__( - 'diff', - _wrap(''' - 'git diff' projects. - - Runs 'git diff' for each of the specified projects (default: all - cloned projects). - - Extra arguments are passed as-is to 'git diff'. - '''), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return _add_parser(parser_adder, self, _project_list_arg) - - def do_run(self, args, user_args): - for project in _cloned_projects(args): - # Use paths that are relative to the base directory to make it - # easier to see where the changes are - _git(project, 'diff --src-prefix={path}/ --dst-prefix={path}/', - extra_args=user_args) - - -class Status(WestCommand): - def __init__(self): - super().__init__( - 'status', - _wrap(''' - Runs 'git status' for each of the specified projects (default: all - cloned projects). Extra arguments are passed as-is to 'git status'. - '''), - accepts_unknown_args=True) - - def do_add_parser(self, parser_adder): - return _add_parser(parser_adder, self, _project_list_arg) - - def do_run(self, args, user_args): - for project in _cloned_projects(args): - _inf(project, 'status of {name_and_path}') - _git(project, 'status', extra_args=user_args) - - -class Update(WestCommand): - def __init__(self): - super().__init__( - 'update', - _wrap(''' - Updates the manifest repository and/or the West source code - repository. The remote to update from is taken from the - manifest.remote and manifest.remote configuration settings, and the - revision from manifest.revision and west.revision configuration - settings. - - There is normally no need to run this command manually, because - 'west fetch' and 'west pull' automatically update the West and - manifest repositories to the latest version before doing anything - else. - - Pass --update-west or --update-manifest to update just that - repository. With no arguments, both are updated. - - Updates are skipped (with a warning) if they can't be done via - fast-forward, unless --reset-manifest, --reset-west, or - --reset-projects is given. - ''')) - - def do_add_parser(self, parser_adder): - return _add_parser( - parser_adder, self, - _arg('--update-west', - dest='update_west', - action='store_true', - help='update the west source code repository'), - _arg('--update-manifest', - dest='update_manifest', - action='store_true', - help='update the manifest repository'), - _arg('--reset-west', - action='store_true', - help='''Like --update-west, but run 'git reset --keep' - afterwards to reset the west repository to the commit - pointed at by the west.remote and west.revision - configuration settings. This is used internally when - changing west.remote or west.revision via - 'west init'.'''), - _arg('--reset-manifest', - action='store_true', - help='''like --reset-west, for the manifest repository, using - manifest.remote and manifest.revision.'''), - _arg('--reset-projects', - action='store_true', - help='''Fetches upstream data in all projects, then runs 'git - reset --keep' to reset them to the manifest revision. - This is used internally when changing manifest.remote or - manifest.revision via 'west init'.''')) - - def do_run(self, args, user_args): - if not (args.update_manifest or args.reset_manifest or - args.update_west or args.reset_west or - args.reset_projects): - - # No arguments is an alias for --update-west --update-manifest - _update_manifest(args) - _update_west(args) - return - - if args.reset_manifest: - _update_and_reset_special(args, 'manifest') - elif args.update_manifest: - _update_manifest(args) - - if args.reset_west: - _update_and_reset_special(args, 'west') - elif args.update_west: - _update_west(args) - - if args.reset_projects: - _reset_projects(args) - - -class ForAll(WestCommand): - def __init__(self): - super().__init__( - 'forall', - _wrap(''' - Runs a shell (Linux) or batch (Windows) command within the - repository of each of the specified projects (default: all cloned - projects). Note that you have to quote the command if it consists - of more than one word, to prevent the shell you use to run 'west' - from splitting it up. - - Since the command is run through the shell, you can use wildcards - and the like. - - For example, the following command will list the contents of - proj-1's and proj-2's repositories on Linux, in long form: - - west forall -c 'ls -l' proj-1 proj-2 - ''')) - - def do_add_parser(self, parser_adder): - return _add_parser( - parser_adder, self, - _arg('-c', - dest='command', - metavar='COMMAND', - required=True), - _project_list_arg) - - def do_run(self, args, user_args): - for project in _cloned_projects(args): - _inf(project, "Running '{}' in {{name_and_path}}" - .format(args.command)) - - subprocess.Popen(args.command, shell=True, cwd=project.abspath) \ - .wait() - - -def _arg(*args, **kwargs): - # Helper for creating a new argument parser for a single argument, - # later passed in parents= to add_parser() - - parser = argparse.ArgumentParser(add_help=False) - parser.add_argument(*args, **kwargs) - return parser - - -# Arguments shared between more than one command - -_manifest_arg = _arg( - '-m', '--manifest', - help='path to manifest file (default: west/manifest/default.yml)') - -# For 'fetch' and 'pull' -_no_update_arg = _arg( - '--no-update', - dest='update', - action='store_false', - help='do not update the manifest or West before fetching project data') - -# List of projects -_project_list_arg = _arg('projects', metavar='PROJECT', nargs='*') - - -def _add_parser(parser_adder, cmd, *extra_args, **kwargs): - # Adds and returns a subparser for the project-related WestCommand 'cmd'. - # All of these commands (currently) take the manifest path flag, so it's - # provided by default here, but any defaults can be overridden with kwargs. - - if 'description' not in kwargs: - kwargs['description'] = cmd.description - if 'formatter_class' not in kwargs: - kwargs['formatter_class'] = argparse.RawDescriptionHelpFormatter - if 'parents' not in kwargs: - kwargs['parents'] = (_manifest_arg,) + extra_args - - return parser_adder.add_parser(cmd.name, **kwargs) - - -def _wrap(s): - # Wraps help texts for commands. Some of them have variable length (due to - # _MANIFEST_REV_BRANCH), so just a textwrap.dedent() can look a bit wonky. - - # [1:] gets rid of the initial newline. It's turned into a space by - # textwrap.fill() otherwise. - paragraphs = textwrap.dedent(s[1:]).split("\n\n") - - return "\n\n".join(textwrap.fill(paragraph) for paragraph in paragraphs) - - -_NO_UPDATE_HELP = """ -Unless --no-update is passed, the manifest and West source code repositories -are updated prior to cloning. See the 'update' command. -"""[1:].replace('\n', ' ') - - -_MANIFEST_REV_HELP = """ -The '{}' branch points to the revision that the manifest specified for the -project as of the most recent 'west fetch'/'west pull'. -""".format(_MANIFEST_REV_BRANCH)[1:].replace("\n", " ") - - -def _cloned_projects(args): - # Returns _projects(args, listed_must_be_cloned=True) if a list of projects - # was given by the user (i.e., listed projects are required to be cloned). - # If no projects were listed, returns all cloned projects. - - # This approach avoids redundant _cloned() checks - return _projects(args) if args.projects else \ - [project for project in _all_projects(args) if _cloned(project)] - - -def _projects(args, listed_must_be_cloned=True, include_meta=False): - # Returns a list of project instances for the projects requested in 'args' - # (the command-line arguments), in the same order that they were listed by - # the user. If args.projects is empty, no projects were listed, and all - # projects will be returned. If a non-existent project was listed by the - # user, an error is raised. - # - # Before the manifest is parsed, it is validated agains a pykwalify schema. - # An error is raised on validation errors. - # - # listed_must_be_cloned (default: True): - # If True, an error is raised if an uncloned project was listed. This - # only applies to projects listed explicitly on the command line. - # - # include_meta (default: False): - # If True, "meta" projects (i.e. west and the manifest) may be given - # in args.projects without raising errors, and are also included in the - # return value if args.projects is empty. - - projects = _all_projects(args) - - if include_meta: - projects += [_special_project(args, name) for name in META_NAMES] - - if not args.projects: - # No projects specified. Return all projects. - return projects - - # Sort the projects by the length of their absolute paths, with the longest - # path first. That way, projects within projects (e.g., for submodules) are - # tried before their parent projects, when projects are specified via their - # path. - projects.sort(key=lambda project: len(project.abspath), reverse=True) - - # Listed but missing projects. Used for error reporting. - missing_projects = [] - - def normalize(path): - # Returns a case-normalized canonical absolute version of 'path', for - # comparisons. The normcase() is a no-op on platforms on case-sensitive - # filesystems. - return os.path.normcase(os.path.realpath(path)) - - res = [] - for project_arg in args.projects: - for project in projects: - if project.name == project_arg: - # The argument is a project name - res.append(project) - break - else: - # The argument is not a project name. See if it is a project - # (sub)path. - for project in projects: - # The startswith() means we also detect subdirectories of - # project repositories. Giving a plain file in the repo will - # work here too, but that probably doesn't hurt. - if normalize(project_arg).startswith( - normalize(project.abspath)): - res.append(project) - break - else: - # Neither a project name nor a project path. We will report an - # error below. - missing_projects.append(project_arg) - - if missing_projects: - log.die('Unknown project name{0}/path{0} {1} (available projects: {2})' - .format('s' if len(missing_projects) > 1 else '', - ', '.join(missing_projects), - ', '.join(project.name for project in projects))) - - # Check that all listed repositories are cloned, if requested - if listed_must_be_cloned: - # We could still get here with a missing manifest repository if the - # user gave a --manifest argument. - uncloned_meta = [prj.name for prj in res if not _cloned(prj) and - prj.name in META_NAMES] - if uncloned_meta: - log.die('Missing meta project{}: {}.'. - format('s' if len(uncloned_meta) > 1 else '', - ', '.join(uncloned_meta)), - 'The Zephyr installation has been corrupted.') - - uncloned = [prj.name for prj in res - if not _cloned(prj) and prj.name not in META_NAMES] - if uncloned: - log.die('The following projects are not cloned: {}. Please clone ' - "them first with 'west clone'." - .format(", ".join(uncloned))) - - return res - - -def _all_projects(args): - # Get a list of project objects from the manifest. - # - # If the manifest is malformed, a fatal error occurs and the - # command aborts. - - try: - return list(Manifest.from_file(_manifest_path(args), - 'manifest').projects) - except MalformedManifest as m: - log.die(m.args[0]) - - -def _manifest_path(args): - # Returns the path to the manifest file. Defaults to - # .west/manifest/default.yml if the user didn't specify a manifest. - - return args.manifest or default_path() - - -def _fetch(project): - # Fetches upstream changes for 'project' and updates the 'manifest-rev' - # branch to point to the revision specified in the manifest. If the - # project's repository does not already exist, it is created first. - - if not _cloned(project): - _inf(project, 'Creating repository for {name_and_path}') - _git_base(project, 'init {abspath}') - # This remote is only added for the user's convenience. We always fetch - # directly from the URL specified in the manifest. - _git(project, 'remote add -- {remote_name} {url}') - - # Fetch the revision specified in the manifest into the manifest-rev branch - - msg = "Fetching changes for {name_and_path}" - if project.clone_depth: - fetch_cmd = "fetch --depth={clone_depth}" - msg += " with --depth {clone_depth}" - else: - fetch_cmd = "fetch" - - _inf(project, msg) - # This two-step approach avoids a "trying to write non-commit object" error - # when the revision is an annotated tag. ^{commit} type peeling isn't - # supported for the in a : refspec, so we have to do it - # separately. - # - # --tags is required to get tags when the remote is specified as an URL. - if _is_sha(project.revision): - # Don't fetch a SHA directly, as server may restrict from doing so. - _git(project, fetch_cmd + ' --tags -- {url}') - _git(project, 'update-ref {qual_manifest_rev_branch} {revision}') - else: - _git(project, fetch_cmd + ' --tags -- {url} {revision}') - _git(project, - 'update-ref {qual_manifest_rev_branch} FETCH_HEAD^{{commit}}') - - if not _head_ok(project): - # If nothing it checked out (which would usually only happen just after - # we initialize the repository), check out 'manifest-rev' in a detached - # HEAD state. - # - # Otherwise, the initial state would have nothing checked out, and HEAD - # would point to a non-existent refs/heads/master branch (that would - # get created if the user makes an initial commit). That state causes - # e.g. 'west rebase' to fail, and might look confusing. - # - # The --detach flag is strictly redundant here, because the - # refs/heads/ form already detaches HEAD, but it avoids a - # spammy detached HEAD warning from Git. - _git(project, 'checkout --detach {qual_manifest_rev_branch}') - - -def _rebase(project): - # Rebases the project against the manifest-rev branch - - if _up_to_date_with(project, _MANIFEST_REV_BRANCH): - _inf(project, - '{name_and_path} is up-to-date with {manifest_rev_branch}') - else: - _inf(project, 'Rebasing {name_and_path} to {manifest_rev_branch}') - _git(project, 'rebase {qual_manifest_rev_branch}') - - -def _sha(project, rev): - # Returns the SHA of a revision (HEAD, v2.0.0, etc.), passed as a string in - # 'rev' - - return _git(project, 'rev-parse ' + rev, capture_stdout=True).stdout - - -def _merge_base(project, rev1, rev2): - # Returns the latest commit in common between 'rev1' and 'rev2' - - return _git(project, 'merge-base -- {} {}'.format(rev1, rev2), - capture_stdout=True).stdout - - -def _up_to_date_with(project, rev): - # Returns True if all commits in 'rev' are also in HEAD. This is used to - # check if 'project' needs rebasing. 'revision' can be anything that - # resolves to a commit. - - return _sha(project, rev) == _merge_base(project, 'HEAD', rev) - - -def _cloned(project): - # Returns True if the project's path is a directory that looks - # like the top-level directory of a Git repository, and False - # otherwise. - - def handle(result): - log.dbg('project', project.name, - 'is {}cloned'.format('' if result else 'not '), - level=log.VERBOSE_EXTREME) - return result - - if not os.path.isdir(project.abspath): - return handle(False) - - # --is-inside-work-tree doesn't require that the directory is the top-level - # directory of a Git repository. Use --show-cdup instead, which prints an - # empty string (i.e., just a newline, which we strip) for the top-level - # directory. - res = _git(project, 'rev-parse --show-cdup', capture_stdout=True, - check=False) - - return handle(not (res.returncode or res.stdout)) - - -def _branches(project): - # Returns a sorted list of all local branches in 'project' - - # refname:lstrip=-1 isn't available before Git 2.8 (introduced by commit - # 'tag: do not show ambiguous tag names as "tags/foo"'). Strip - # 'refs/heads/' manually instead. - return [ref[len('refs/heads/'):] for ref in - _git(project, - 'for-each-ref --sort=refname --format=%(refname) refs/heads', - capture_stdout=True).stdout.split('\n')] - - -def _create_branch(project, branch): - if _has_branch(project, branch): - _inf(project, "Branch '{}' already exists in {{name_and_path}}" - .format(branch)) - else: - _inf(project, "Creating branch '{}' in {{name_and_path}}" - .format(branch)) - - _git(project, - 'branch --quiet --track -- {} {{qual_manifest_rev_branch}}' - .format(branch)) - - -def _has_branch(project, branch): - return _ref_ok(project, 'refs/heads/' + branch) - - -def _ref_ok(project, ref): - # Returns True if the reference 'ref' exists and can be resolved to a - # commit - return _git(project, 'show-ref --quiet --verify ' + ref, check=False) \ - .returncode == 0 - - -def _head_ok(project): - # Returns True if the reference 'HEAD' exists and is not a tag or remote - # ref (e.g. refs/remotes/origin/HEAD). - # Some versions of git will report 1, when doing - # 'git show-ref --verify HEAD' even if HEAD is valid, see #119. - # 'git show-ref --head ' will always return 0 if HEAD or - # is valid. - # We are only interested in HEAD, thus we must avoid being - # valid. '/' can never point to valid reference, thus 'show-ref --head /' - # will return: - # - 0 if HEAD is present - # - 1 otherwise - return _git(project, 'show-ref --quiet --head /', check=False) \ - .returncode == 0 - - -def _checkout(project, branch): - _inf(project, - "Checking out branch '{}' in {{name_and_path}}".format(branch)) - _git(project, 'checkout ' + branch) - - -def _special_project(args, name): - # Returns a Project instance for one of the special repositories in west/, - # so that we can reuse the project-related functions for them - - if name == 'manifest': - url = config.get(name, 'remote', fallback='origin') - revision = config.get(name, 'revision', fallback='master') - return SpecialProject(name, revision=revision, - path=os.path.join('west', name), url=url) - - return Manifest.from_file(_manifest_path(args), name).west_project - - -def _update_west(args): - _update_special(args, 'west') - - -def _update_manifest(args): - _update_special(args, 'manifest') - - -def _update_special(args, name): - with _error_context(_FAILED_UPDATE_MSG): - project = _special_project(args, name) - _dbg(project, 'Updating {name_and_path}', level=log.VERBOSE_NORMAL) - - old_sha = _sha(project, 'HEAD') - - # Only update special repositories if possible via fast-forward, as - # automatic rebasing is probably more annoying than useful when working - # directly on them. - # - # --tags is required to get tags when the remote is specified as a URL. - # --ff-only is required to ensure that the merge only takes place if it - # can be fast-forwarded. - if _git(project, - 'fetch --quiet --tags -- {url} {revision}', - check=False).returncode: - - _wrn(project, - 'Skipping automatic update of {name_and_path}. ' - "{revision} cannot be fetched (from {url}).") - - elif _git(project, - 'merge --quiet --ff-only FETCH_HEAD', - check=False).returncode: - - _wrn(project, - 'Skipping automatic update of {name_and_path}. ' - "Can't be fast-forwarded to {revision} (from {url}).") - - elif old_sha != _sha(project, 'HEAD'): - _inf(project, - 'Updated {name_and_path} to {revision} (from {url}).') - - if project.name == 'west': - # Signal self-update, which will cause a restart. This is a bit - # nicer than doing the restart here, as callers will have a - # chance to flush file buffers, etc. - raise WestUpdated() - - -def _update_and_reset_special(args, name): - # Updates one of the special repositories (the manifest and west) by - # resetting to the new revision after fetching it (with 'git reset --keep') - - project = _special_project(args, name) - with _error_context(', while updating/resetting special project'): - _inf(project, - "Fetching and resetting {name_and_path} to '{revision}'") - _git(project, 'fetch -- {url} {revision}') - if _git(project, 'reset --keep FETCH_HEAD', check=False).returncode: - _wrn(project, - 'Failed to reset special project {name_and_path} to ' - "{revision} (with 'git reset --keep')") - - -def _reset_projects(args): - # Fetches changes in all cloned projects and then resets them the manifest - # revision (with 'git reset --keep') - - for project in _all_projects(args): - if _cloned(project): - _fetch(project) - _inf(project, 'Resetting {name_and_path} to {manifest_rev_branch}') - if _git(project, 'reset --keep {manifest_rev_branch}', - check=False).returncode: - - _wrn(project, - 'Failed to reset {name_and_path} to ' - "{manifest_rev_branch} (with 'git reset --keep')") - - -_FAILED_UPDATE_MSG = """ -, while running automatic self-update. Pass --no-update to 'west fetch/pull' to -skip updating the manifest and West for the duration of the command."""[1:] - - -class WestUpdated(Exception): - '''Raised after West has updated its own source code''' - - -def _is_sha(s): - try: - int(s, 16) - except ValueError: - return False - - return len(s) == 40 - - -def _git_base(project, cmd, *, extra_args=(), capture_stdout=False, - check=True): - # Runs a git command in the West top directory. See _git_helper() for - # parameter documentation. - # - # Returns a CompletedProcess instance (see below). - - return _git_helper(project, cmd, extra_args, util.west_topdir(), - capture_stdout, check) - - -def _git(project, cmd, *, extra_args=(), capture_stdout=False, check=True): - # Runs a git command within a particular project. See _git_helper() for - # parameter documentation. - # - # Returns a CompletedProcess instance (see below). - - return _git_helper(project, cmd, extra_args, project.abspath, - capture_stdout, check) - - -def _git_helper(project, cmd, extra_args, cwd, capture_stdout, check): - # Runs a git command. - # - # project: - # The Project instance for the project, derived from the manifest file. - # - # cmd: - # String with git arguments. Supports some "(foo)" shorthands. See below. - # - # extra_args: - # List of additional arguments to pass to the git command (e.g. from the - # user). - # - # cwd: - # Directory to switch to first (None = current directory) - # - # capture_stdout: - # True if stdout should be captured into the returned - # subprocess.CompletedProcess instance instead of being printed. - # - # We never capture stderr, to prevent error messages from being eaten. - # - # check: - # True if an error should be raised if the git command finishes with a - # non-zero return code. - # - # Returns a subprocess.CompletedProcess instance. - - # TODO: Run once somewhere? - if shutil.which('git') is None: - log.die('Git is not installed or cannot be found') - - args = (('git',) + - tuple(_expand_shorthands(project, arg) for arg in cmd.split()) + - tuple(extra_args)) - cmd_str = util.quote_sh_list(args) - - log.dbg("running '{}'".format(cmd_str), 'in', cwd, level=log.VERBOSE_VERY) - popen = subprocess.Popen( - args, stdout=subprocess.PIPE if capture_stdout else None, cwd=cwd) - - stdout, _ = popen.communicate() - - dbg_msg = "'{}' in {} finished with exit status {}" \ - .format(cmd_str, cwd, popen.returncode) - if capture_stdout: - dbg_msg += " and wrote {} to stdout".format(stdout) - log.dbg(dbg_msg, level=log.VERBOSE_VERY) - - if check and popen.returncode: - msg = "Command '{}' failed for {{name_and_path}}".format(cmd_str) - if _error_context_msg: - msg += _error_context_msg.replace('\n', ' ') - _die(project, msg) - - if capture_stdout: - # Manual UTF-8 decoding and universal newlines. Before Python 3.6, - # Popen doesn't seem to allow using universal newlines mode (which - # enables decoding) with a specific encoding (because the encoding= - # parameter is missing). - # - # Also strip all trailing newlines as convenience. The splitlines() - # already means we lose a final '\n' anyway. - stdout = "\n".join(stdout.decode('utf-8').splitlines()).rstrip("\n") - - return CompletedProcess(popen.args, popen.returncode, stdout) - - -# Some Python shenanigans to be able to set up a context with -# -# with _error_context("Doing stuff"): -# Do the stuff -# -# A context is just some extra text that gets printed on Git errors. -# -# Note: If we ever need to support nested contexts, _error_context_msg could be -# turned into a stack. - -_error_context_msg = None - - -class _error_context: - def __init__(self, msg): - self.msg = msg - - def __enter__(self): - global _error_context_msg - _error_context_msg = self.msg - - def __exit__(self, *args): - global _error_context_msg - _error_context_msg = None - - -def _expand_shorthands(project, s): - # Expands project-related shorthands in 's' to their values, - # returning the expanded string - - # Some of the trickier ones below. 'qual' stands for 'qualified', meaning - # the full path to the ref (e.g. refs/heads/master). - # - # manifest-rev-branch: - # The name of the magic branch that points to the manifest revision - # - # qual-manifest-rev-branch: - # A qualified reference to the magic manifest revision branch, e.g. - # refs/heads/manifest-rev - - return s.format(name=project.name, - name_and_path='{} ({})'.format( - project.name, os.path.join(project.path, "")), - remote_name=('None' if project.remote is None - else project.remote.name), - url=project.url, - path=project.path, - abspath=project.abspath, - revision=project.revision, - manifest_rev_branch=_MANIFEST_REV_BRANCH, - qual_manifest_rev_branch=('refs/heads/' + - _MANIFEST_REV_BRANCH), - clone_depth=str(project.clone_depth)) - - -def _inf(project, msg): - # Print '=== msg' (to clearly separate it from Git output). Supports the - # same (foo) shorthands as the git commands. - # - # Prints the message in green if stdout is a terminal, to clearly separate - # it from command (usually Git) output. - - log.inf('=== ' + _expand_shorthands(project, msg), colorize=True) - - -def _wrn(project, msg): - # Warn with 'msg'. Supports the same (foo) shorthands as the git commands. - - log.wrn(_expand_shorthands(project, msg)) - - -def _dbg(project, msg, level): - # Like _wrn(), for debug messages - - log.dbg(_expand_shorthands(project, msg), level=level) - - -def _die(project, msg): - # Like _wrn(), for dying - - log.die(_expand_shorthands(project, msg)) - - -# subprocess.CompletedProcess-alike, used instead of the real deal for Python -# 3.4 compatibility, and with two small differences: -# -# - Trailing newlines are stripped from stdout -# -# - The 'stderr' attribute is omitted, because we never capture stderr -CompletedProcess = collections.namedtuple( - 'CompletedProcess', 'args returncode stdout') diff --git a/scripts/meta/west/commands/run_common.py b/scripts/meta/west/commands/run_common.py deleted file mode 100644 index 22a1f6e7a5b..00000000000 --- a/scripts/meta/west/commands/run_common.py +++ /dev/null @@ -1,452 +0,0 @@ -# Copyright (c) 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Common code used by commands which execute runners. -''' - -import argparse -from os import getcwd, path -from subprocess import CalledProcessError -import textwrap - -from west import cmake -from west import log -from west import util -from west.build import DEFAULT_BUILD_DIR, is_zephyr_build -from west.runners import get_runner_cls, ZephyrBinaryRunner -from west.runners.core import RunnerConfig -from west.commands import CommandContextError - -# Context-sensitive help indentation. -# Don't change this, or output from argparse won't match up. -INDENT = ' ' * 2 - - -def add_parser_common(parser_adder, command): - parser = parser_adder.add_parser( - command.name, - formatter_class=argparse.RawDescriptionHelpFormatter, - description=command.description) - - # Remember to update scripts/west-completion.bash if you add or remove - # flags - - parser.add_argument('-H', '--context', action='store_true', - help='''Rebuild application and print context-sensitive - help; this may be combined with --runner to restrict - output to a given runner.''') - - group = parser.add_argument_group(title='General Options') - - group.add_argument('-d', '--build-dir', - help='''Build directory to obtain runner information - from. If not given, this command tries to use build/ - and then the current working directory, in that - order.''') - group.add_argument('-c', '--cmake-cache', - help='''Path to CMake cache file containing runner - configuration (this is generated by the Zephyr - build system when compiling binaries); - default: {}. - - If this is a relative path, it is assumed relative to - the build directory. An absolute path can also be - given instead.'''.format(cmake.DEFAULT_CACHE)) - group.add_argument('-r', '--runner', - help='''If given, overrides any cached {} - runner.'''.format(command.name)) - group.add_argument('--skip-rebuild', action='store_true', - help='''If given, do not rebuild the application - before running {} commands.'''.format(command.name)) - - group = parser.add_argument_group( - title='Configuration overrides', - description=textwrap.dedent('''\ - These values usually come from the Zephyr build system itself - as stored in the CMake cache; providing these options - overrides those settings.''')) - - # Important: - # - # 1. The destination variables of these options must match - # the RunnerConfig slots. - # 2. The default values for all of these must be None. - # - # This is how we detect if the user provided them or not when - # overriding values from the cached configuration. - - command_verb = "flash" if command == "flash" else "debug" - - group.add_argument('--board-dir', - help='Zephyr board directory') - group.add_argument('--elf-file', - help='Path to elf file to {0}'.format(command_verb)) - group.add_argument('--hex-file', - help='Path to hex file to {0}'.format(command_verb)) - group.add_argument('--bin-file', - help='Path to binary file to {0}'.format(command_verb)) - group.add_argument('--gdb', - help='Path to GDB, if applicable') - group.add_argument('--openocd', - help='Path to OpenOCD, if applicable') - group.add_argument( - '--openocd-search', - help='Path to add to OpenOCD search path, if applicable') - - return parser - - -def desc_common(command_name): - return textwrap.dedent('''\ - Any options not recognized by this command are passed to the - back-end {command} runner (run "west {command} --context" - for help on available runner-specific options). - - If you need to pass an option to a runner which has the - same name as one recognized by this command, you can - end argument parsing with a '--', like so: - - west {command} --{command}-arg=value -- --runner-arg=value2 - '''.format(**{'command': command_name})) - - -def cached_runner_config(build_dir, cache): - '''Parse the RunnerConfig from a build directory and CMake Cache.''' - board_dir = cache['ZEPHYR_RUNNER_CONFIG_BOARD_DIR'] - elf_file = cache.get('ZEPHYR_RUNNER_CONFIG_ELF_FILE', - cache['ZEPHYR_RUNNER_CONFIG_KERNEL_ELF']) - hex_file = cache.get('ZEPHYR_RUNNER_CONFIG_HEX_FILE', - cache['ZEPHYR_RUNNER_CONFIG_KERNEL_HEX']) - bin_file = cache.get('ZEPHYR_RUNNER_CONFIG_BIN_FILE', - cache['ZEPHYR_RUNNER_CONFIG_KERNEL_BIN']) - gdb = cache.get('ZEPHYR_RUNNER_CONFIG_GDB') - openocd = cache.get('ZEPHYR_RUNNER_CONFIG_OPENOCD') - openocd_search = cache.get('ZEPHYR_RUNNER_CONFIG_OPENOCD_SEARCH') - - return RunnerConfig(build_dir, board_dir, - elf_file, hex_file, bin_file, - gdb=gdb, openocd=openocd, - openocd_search=openocd_search) - - -def _override_config_from_namespace(cfg, namespace): - '''Override a RunnerConfig's contents with command-line values.''' - for var in cfg.__slots__: - if var in namespace: - val = getattr(namespace, var) - if val is not None: - setattr(cfg, var, val) - - -def _build_dir(args, die_if_none=True): - # Get the build directory for the given argument list and environment. - if args.build_dir: - return args.build_dir - - cwd = getcwd() - default = path.join(cwd, DEFAULT_BUILD_DIR) - if is_zephyr_build(default): - return default - elif is_zephyr_build(cwd): - return cwd - elif die_if_none: - log.die('--build-dir was not given, and neither {} ' - 'nor {} are zephyr build directories.'. - format(default, cwd)) - else: - return None - - -def do_run_common(command, args, runner_args, cached_runner_var): - if args.context: - _dump_context(command, args, runner_args, cached_runner_var) - return - - command_name = command.name - build_dir = _build_dir(args) - - if not args.skip_rebuild: - try: - cmake.run_build(build_dir) - except CalledProcessError: - if args.build_dir: - log.die('cannot run {}, build in {} failed'.format( - command_name, args.build_dir)) - else: - log.die('cannot run {}; no --build-dir given and build in ' - 'current directory {} failed'.format(command_name, - build_dir)) - - # Runner creation, phase 1. - # - # Get the default runner name from the cache, allowing a command - # line override. Get the ZephyrBinaryRunner class by name, and - # make sure it supports the command. - - cache_file = path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE) - cache = cmake.CMakeCache(cache_file) - board = cache['CACHED_BOARD'] - available = cache.get_list('ZEPHYR_RUNNERS') - if not available: - log.wrn('No cached runners are available in', cache_file) - runner = args.runner or cache.get(cached_runner_var) - - if runner is None: - raise CommandContextError(textwrap.dedent(""" - No {} runner available for {}. Please either specify one - manually, or check your board's documentation for - alternative instructions.""".format(command_name, board))) - - log.inf('Using runner:', runner) - if runner not in available: - log.wrn('Runner {} is not configured for use with {}, ' - 'this may not work'.format(runner, board)) - runner_cls = get_runner_cls(runner) - if command_name not in runner_cls.capabilities().commands: - log.die('Runner {} does not support command {}'.format( - runner, command_name)) - - # Runner creation, phase 2. - # - # At this point, the common options above are already parsed in - # 'args', and unrecognized arguments are in 'runner_args'. - # - # - Pull the RunnerConfig out of the cache - # - Override cached values with applicable command-line options - - cfg = cached_runner_config(build_dir, cache) - _override_config_from_namespace(cfg, args) - - # Runner creation, phase 3. - # - # - Pull out cached runner arguments, and append command-line - # values (which should override the cache) - # - Construct a runner-specific argument parser to handle cached - # values plus overrides given in runner_args - # - Parse arguments and create runner instance from final - # RunnerConfig and parsed arguments. - - cached_runner_args = cache.get_list( - 'ZEPHYR_RUNNER_ARGS_{}'.format(cmake.make_c_identifier(runner))) - assert isinstance(runner_args, list), runner_args - # If the user passed -- to force the parent argument parser to stop - # parsing, it will show up here, and needs to be filtered out. - runner_args = [arg for arg in runner_args if arg != '--'] - final_runner_args = cached_runner_args + runner_args - parser = argparse.ArgumentParser(prog=runner) - runner_cls.add_parser(parser) - parsed_args, unknown = parser.parse_known_args(args=final_runner_args) - if unknown: - raise CommandContextError('Runner', runner, - 'received unknown arguments', unknown) - runner = runner_cls.create(cfg, parsed_args) - runner.run(command_name) - - -# -# Context-specific help -# - -def _dump_context(command, args, runner_args, cached_runner_var): - build_dir = _build_dir(args, die_if_none=False) - - # Try to figure out the CMake cache file based on the build - # directory or an explicit argument. - if build_dir is not None: - cache_file = path.abspath( - path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE)) - elif args.cmake_cache: - cache_file = path.abspath(args.cmake_cache) - else: - cache_file = None - - # Load the cache itself, if possible. - if cache_file is None: - log.wrn('No build directory (--build-dir) or CMake cache ' - '(--cache-file) given or found; output will be limited') - cache = None - else: - try: - cache = cmake.CMakeCache(cache_file) - except Exception: - log.die('Cannot load cache {}.'.format(cache_file)) - - # If we have a build directory, try to ensure build artifacts are - # up to date. If that doesn't work, still try to print information - # on a best-effort basis. - if build_dir and not args.skip_rebuild: - try: - cmake.run_build(build_dir) - except CalledProcessError: - msg = 'Failed re-building application; cannot load context. ' - if args.build_dir: - msg += 'Is {} the right --build-dir?'.format(args.build_dir) - else: - msg += textwrap.dedent('''\ - Use --build-dir (-d) to specify a build directory; the one - used was {}.'''.format(build_dir)) - log.die('\n'.join(textwrap.wrap(msg, initial_indent='', - subsequent_indent=INDENT, - break_on_hyphens=False))) - - if cache is None: - _dump_no_context_info(command, args) - if not args.runner: - return - - if args.runner: - # Just information on one runner was requested. - _dump_one_runner_info(cache, args, build_dir, INDENT) - return - - board = cache['CACHED_BOARD'] - - all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if - command.name in cls.capabilities().commands} - available = [r for r in cache.get_list('ZEPHYR_RUNNERS') if r in all_cls] - available_cls = {r: all_cls[r] for r in available if r in all_cls} - - default_runner = cache.get(cached_runner_var) - cfg = cached_runner_config(build_dir, cache) - - log.inf('All Zephyr runners which support {}:'.format(command.name), - colorize=True) - for line in util.wrap(', '.join(all_cls.keys()), INDENT): - log.inf(line) - log.inf('(Not all may work with this build, see available runners below.)', - colorize=True) - - if cache is None: - log.warn('Missing or invalid CMake cache {}; there is no context.', - 'Use --build-dir to specify the build directory.') - return - - log.inf('Build directory:', colorize=True) - log.inf(INDENT + build_dir) - log.inf('Board:', colorize=True) - log.inf(INDENT + board) - log.inf('CMake cache:', colorize=True) - log.inf(INDENT + cache_file) - - if not available: - # Bail with a message if no runners are available. - msg = ('No runners available for {}. ' - 'Consult the documentation for instructions on how to run ' - 'binaries on this target.').format(board) - for line in util.wrap(msg, ''): - log.inf(line, colorize=True) - return - - log.inf('Available {} runners:'.format(command.name), colorize=True) - log.inf(INDENT + ', '.join(available)) - log.inf('Additional options for available', command.name, 'runners:', - colorize=True) - for runner in available: - _dump_runner_opt_help(runner, all_cls[runner]) - log.inf('Default {} runner:'.format(command.name), colorize=True) - log.inf(INDENT + default_runner) - _dump_runner_config(cfg, '', INDENT) - log.inf('Runner-specific information:', colorize=True) - for runner in available: - log.inf('{}{}:'.format(INDENT, runner), colorize=True) - _dump_runner_cached_opts(cache, runner, INDENT * 2, INDENT * 3) - _dump_runner_caps(available_cls[runner], INDENT * 2) - - if len(available) > 1: - log.inf('(Add -r RUNNER to just print information about one runner.)', - colorize=True) - - -def _dump_no_context_info(command, args): - all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if - command.name in cls.capabilities().commands} - log.inf('All Zephyr runners which support {}:'.format(command.name), - colorize=True) - for line in util.wrap(', '.join(all_cls.keys()), INDENT): - log.inf(line) - if not args.runner: - log.inf('Add -r RUNNER to print more information about any runner.', - colorize=True) - - -def _dump_one_runner_info(cache, args, build_dir, indent): - runner = args.runner - cls = get_runner_cls(runner) - - if cache is None: - _dump_runner_opt_help(runner, cls) - _dump_runner_caps(cls, '') - return - - available = runner in cache.get_list('ZEPHYR_RUNNERS') - cfg = cached_runner_config(build_dir, cache) - - log.inf('Build directory:', colorize=True) - log.inf(INDENT + build_dir) - log.inf('Board:', colorize=True) - log.inf(INDENT + cache['CACHED_BOARD']) - log.inf('CMake cache:', colorize=True) - log.inf(INDENT + cache.cache_file) - log.inf(runner, 'is available:', 'yes' if available else 'no', - colorize=True) - _dump_runner_opt_help(runner, cls) - _dump_runner_config(cfg, '', indent) - if available: - _dump_runner_cached_opts(cache, runner, '', indent) - _dump_runner_caps(cls, '') - if not available: - log.wrn('Runner', runner, 'is not configured in this build.') - - -def _dump_runner_caps(cls, base_indent): - log.inf('{}Capabilities:'.format(base_indent), colorize=True) - log.inf('{}{}'.format(base_indent + INDENT, cls.capabilities())) - - -def _dump_runner_opt_help(runner, cls): - # Construct and print the usage text - dummy_parser = argparse.ArgumentParser(prog='', add_help=False) - cls.add_parser(dummy_parser) - formatter = dummy_parser._get_formatter() - for group in dummy_parser._action_groups: - # Break the abstraction to filter out the 'flash', 'debug', etc. - # TODO: come up with something cleaner (may require changes - # in the runner core). - actions = group._group_actions - if len(actions) == 1 and actions[0].dest == 'command': - # This is the lone positional argument. Skip it. - continue - formatter.start_section('REMOVE ME') - formatter.add_text(group.description) - formatter.add_arguments(actions) - formatter.end_section() - # Get the runner help, with the "REMOVE ME" string gone - runner_help = '\n'.join(formatter.format_help().splitlines()[1:]) - - log.inf('{} options:'.format(runner), colorize=True) - log.inf(runner_help) - - -def _dump_runner_config(cfg, initial_indent, subsequent_indent): - log.inf('{}Cached common runner configuration:'.format(initial_indent), - colorize=True) - for var in cfg.__slots__: - log.inf('{}--{}={}'.format(subsequent_indent, var, getattr(cfg, var))) - - -def _dump_runner_cached_opts(cache, runner, initial_indent, subsequent_indent): - runner_args = _get_runner_args(cache, runner) - if not runner_args: - return - - log.inf('{}Cached runner-specific options:'.format(initial_indent), - colorize=True) - for arg in runner_args: - log.inf('{}{}'.format(subsequent_indent, arg)) - - -def _get_runner_args(cache, runner): - runner_ident = cmake.make_c_identifier(runner) - args_var = 'ZEPHYR_RUNNER_ARGS_{}'.format(runner_ident) - return cache.get_list(args_var) diff --git a/scripts/meta/west/config.py b/scripts/meta/west/config.py deleted file mode 100644 index 34be7e4b57b..00000000000 --- a/scripts/meta/west/config.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) 2018, Nordic Semiconductor ASA -# -# SPDX-License-Identifier: Apache-2.0 - -''' -Configuration file handling, using the standard configparser module. -''' - -import configparser -import os -import platform - -from west.util import west_dir - - -# Configuration values. -# -# Initially empty, populated in read_config(). Always having this available is -# nice in case something checks configuration values before the configuration -# file has been read (e.g. the log.py functions, to check color settings, and -# tests). -config = configparser.ConfigParser() - - -def read_config(): - ''' - Reads all configuration files, making the configuration values available as - a configparser.ConfigParser object in config.config. This object works - similarly to a dictionary: config.config['foo']['bar'] gets the value for - key 'bar' in section 'foo'. - - Git conventions for configuration file locations are used. See the FILES - section in the git-config(1) man page. - - The following configuration files are read. - - System-wide: - - Linux: /etc/westconfig - Mac OS: /usr/local/etc/westconfig - Windows: %PROGRAMDATA%\\west\\config - - User-specific: - - $XDG_CONFIG_HOME/west/config (on Linux) - and - ~/.westconfig - - ($XDG_CONFIG_DIR defaults to ~/.config/ if unset.) - - Instance-specific: - - /west/config - - Configuration values from later configuration files override configuration - from earlier ones. Instance-specific configuration values have the highest - precedence, and system-wide the lowest. - ''' - - # Gather (potential) configuration file paths - - # System-wide and user-specific - - if platform.system() == 'Linux': - # Probably wouldn't hurt to check $XDG_CONFIG_HOME (defaults to - # ~/.config) on all systems. It's listed in git-config(1). People were - # iffy about it as of writing though. - files = ['/etc/westconfig', - os.path.join(os.environ.get('XDG_CONFIG_HOME', - os.path.expanduser('~/.config')), - 'west', 'config')] - - elif platform.system() == 'Darwin': # Mac OS - # This was seen on a local machine ($(prefix) = /usr/local) - files = ['/usr/local/etc/westconfig'] - elif platform.system() == 'Windows': - # Seen on a local machine - files = [os.path.expandvars('%PROGRAMDATA%\\west\\config')] - - files.append(os.path.expanduser('~/.westconfig')) - - # Repository-specific - - files.append(os.path.join(west_dir(), 'config')) - - # - # Parse all existing configuration files - # - - config.read(files, encoding='utf-8') - - -def use_colors(): - # Convenience function for reading the color.ui setting - return config.getboolean('color', 'ui', fallback=True) diff --git a/scripts/meta/west/log.py b/scripts/meta/west/log.py deleted file mode 100644 index 76928f3a3df..00000000000 --- a/scripts/meta/west/log.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Logging module for west - -Provides common methods for logging messages to display to the user.''' - -from west import config - -import colorama -import sys - -VERBOSE_NONE = 0 -'''Base verbosity level (zero), no verbose messages printed.''' - -VERBOSE_NORMAL = 1 -'''Base verbosity level, some verbose messages printed.''' - -VERBOSE_VERY = 2 -'''Very verbose output messages will be printed.''' - -VERBOSE_EXTREME = 3 -'''Extremely verbose output messages will be printed.''' - -VERBOSE = VERBOSE_NONE -'''Global verbosity level. VERBOSE_NONE is the default.''' - - -def set_verbosity(value): - '''Set the logging verbosity level.''' - global VERBOSE - VERBOSE = int(value) - - -def dbg(*args, level=VERBOSE_NORMAL): - '''Print a verbose debug logging message. - - The message is only printed if level is at least the current - verbosity level.''' - if level > VERBOSE: - return - print(*args) - - -def inf(*args, colorize=False): - '''Print an informational message. - - colorize (default: False): - If True, the message is printed in bright green if stdout is a terminal. - ''' - - if not config.use_colors(): - colorize = False - - # This approach colorizes any sep= and end= text too, as expected. - # - # colorama automatically strips the ANSI escapes when stdout isn't a - # terminal (by wrapping sys.stdout). - if colorize: - print(colorama.Fore.LIGHTGREEN_EX, end='') - - print(*args) - - if colorize: - _reset_colors(sys.stdout) - - -def wrn(*args): - '''Print a warning.''' - - if config.use_colors(): - print(colorama.Fore.LIGHTRED_EX, end='', file=sys.stderr) - - print('WARNING: ', end='', file=sys.stderr) - print(*args, file=sys.stderr) - - if config.use_colors(): - _reset_colors(sys.stderr) - - -def err(*args, fatal=False): - '''Print an error.''' - - if config.use_colors(): - print(colorama.Fore.LIGHTRED_EX, end='', file=sys.stderr) - - print('FATAL ERROR: ' if fatal else 'ERROR: ', end='', file=sys.stderr) - print(*args, file=sys.stderr) - - if config.use_colors(): - _reset_colors(sys.stderr) - - -def die(*args, exit_code=1): - '''Print a fatal error, and abort with the given exit code.''' - err(*args, fatal=True) - sys.exit(exit_code) - - -def _reset_colors(file): - # The flush=True avoids issues with unrelated output from commands (usually - # Git) becoming colorized, due to the final attribute reset ANSI escape - # getting line-buffered - print(colorama.Style.RESET_ALL, end='', file=file, flush=True) diff --git a/scripts/meta/west/main.py b/scripts/meta/west/main.py deleted file mode 100755 index fb581c8e0ba..00000000000 --- a/scripts/meta/west/main.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Zephyr RTOS meta-tool (west) main module -''' - - -import argparse -import colorama -from functools import partial -import os -import sys -from subprocess import CalledProcessError, check_output, DEVNULL - -from west import log -from west import config -from west.commands import CommandContextError -from west.commands.build import Build -from west.commands.flash import Flash -from west.commands.debug import Debug, DebugServer, Attach -from west.commands.project import List, Clone, Fetch, Pull, Rebase, Branch, \ - Checkout, Diff, Status, Update, ForAll, \ - WestUpdated -from west.manifest import Manifest -from west.util import quote_sh_list, in_multirepo_install, west_dir - -IN_MULTIREPO_INSTALL = in_multirepo_install(os.path.dirname(__file__)) - -BUILD_FLASH_COMMANDS = [ - Build(), - Flash(), - Debug(), - DebugServer(), - Attach(), -] - -PROJECT_COMMANDS = [ - List(), - Clone(), - Fetch(), - Pull(), - Rebase(), - Branch(), - Checkout(), - Diff(), - Status(), - Update(), - ForAll(), -] - -# Built-in commands in this West. For compatibility with monorepo -# installations of West within the Zephyr tree, we only expose the -# project commands if this is a multirepo installation. -COMMANDS = BUILD_FLASH_COMMANDS - -if IN_MULTIREPO_INSTALL: - COMMANDS += PROJECT_COMMANDS - - -class InvalidWestContext(RuntimeError): - pass - - -def command_handler(command, known_args, unknown_args): - command.run(known_args, unknown_args) - - -def set_zephyr_base(args): - '''Ensure ZEPHYR_BASE is set, emitting warnings if that's not - possible, or if the user is pointing it somewhere different than - what the manifest expects.''' - zb_env = os.environ.get('ZEPHYR_BASE') - - if args.zephyr_base: - # The command line --zephyr-base takes precedence over - # everything else. - zb = os.path.abspath(args.zephyr_base) - zb_origin = 'command line' - else: - # If the user doesn't specify it concretely, use the project - # with path 'zephyr' if that exists, or the ZEPHYR_BASE value - # in the calling environment. - # - # At some point, we need a more flexible way to set environment - # variables based on manifest contents, but this is good enough - # to get started with and to ask for wider testing. - manifest = Manifest.from_file() - for project in manifest.projects: - if project.path == 'zephyr': - zb = project.abspath - zb_origin = 'manifest file {}'.format(manifest.path) - break - else: - if zb_env is None: - log.wrn('no --zephyr-base given, ZEPHYR_BASE is unset,', - 'and no manifest project has path "zephyr"') - zb = None - zb_origin = None - else: - zb = zb_env - zb_origin = 'environment' - - if zb_env and os.path.abspath(zb) != os.path.abspath(zb_env): - # The environment ZEPHYR_BASE takes precedence over either the - # command line or the manifest, but in normal multi-repo - # operation we shouldn't expect to need to set ZEPHYR_BASE to - # point to some random place. In practice, this is probably - # happening because zephyr-env.sh/cmd was run in some other - # zephyr installation, and the user forgot about that. - log.wrn('ZEPHYR_BASE={}'.format(zb_env), - 'in the calling environment, but has been set to', - zb, 'instead by the', zb_origin) - - os.environ['ZEPHYR_BASE'] = zb - - log.dbg('ZEPHYR_BASE={} (origin: {})'.format(zb, zb_origin)) - - -def print_version_info(): - # The bootstrapper will print its own version, as well as that of - # the west repository itself, then exit. So if this file is being - # asked to print the version, it's because it's being run - # directly, and not via the bootstrapper. - # - # Rather than play tricks like invoking "pip show west" (which - # assumes the bootstrapper was installed via pip, the common but - # not universal case), refuse the temptation to make guesses and - # print an honest answer. - log.inf('West bootstrapper version: N/A, not run via bootstrapper') - - # The running west installation. - if IN_MULTIREPO_INSTALL: - try: - desc = check_output(['git', 'describe', '--tags'], - stderr=DEVNULL, - cwd=os.path.dirname(__file__)) - west_version = desc.decode(sys.getdefaultencoding()).strip() - except CalledProcessError: - west_version = 'unknown' - else: - west_version = 'N/A, monorepo installation' - west_src_west = os.path.dirname(__file__) - print('West repository version: {} ({})'. - format(west_version, - os.path.dirname(os.path.dirname(west_src_west)))) - - -def parse_args(argv): - # The prog='west' override avoids the absolute path of the main.py script - # showing up when West is run via the wrapper - west_parser = argparse.ArgumentParser( - prog='west', description='The Zephyr RTOS meta-tool.', - epilog='Run "west -h" for help on each command.') - - # Remember to update scripts/west-completion.bash if you add or remove - # flags - - west_parser.add_argument('-z', '--zephyr-base', default=None, - help='''Override the Zephyr base directory. The - default is the manifest project with path - "zephyr".''') - - west_parser.add_argument('-v', '--verbose', default=0, action='count', - help='''Display verbose output. May be given - multiple times to increase verbosity.''') - - west_parser.add_argument('-V', '--version', action='store_true') - - subparser_gen = west_parser.add_subparsers(title='commands', - dest='command') - - for command in COMMANDS: - parser = command.add_parser(subparser_gen) - parser.set_defaults(handler=partial(command_handler, command)) - - args, unknown = west_parser.parse_known_args(args=argv) - - if args.version: - print_version_info() - sys.exit(0) - - # Set up logging verbosity before doing anything else, so - # e.g. verbose messages related to argument handling errors - # work properly. - log.set_verbosity(args.verbose) - - if IN_MULTIREPO_INSTALL: - set_zephyr_base(args) - - if 'handler' not in args: - if IN_MULTIREPO_INSTALL: - log.err('west installation found (in {}), but no command given'. - format(west_dir())) - else: - log.err('no west command given') - west_parser.print_help(file=sys.stderr) - sys.exit(1) - - return args, unknown - - -def main(argv=None): - # Makes ANSI color escapes work on Windows, and strips them when - # stdout/stderr isn't a terminal - colorama.init() - - if argv is None: - argv = sys.argv[1:] - args, unknown = parse_args(argv) - - if IN_MULTIREPO_INSTALL: - # Read the configuration files - config.read_config() - - for_stack_trace = 'run as "west -v ... {} ..." for a stack trace'.format( - args.command) - try: - args.handler(args, unknown) - except WestUpdated: - # West has been automatically updated. Restart ourselves to run the - # latest version, with the same arguments that we were given. - os.execv(sys.executable, [sys.executable] + argv) - except KeyboardInterrupt: - sys.exit(0) - except CalledProcessError as cpe: - log.err('command exited with status {}: {}'.format( - cpe.args[0], quote_sh_list(cpe.args[1]))) - if args.verbose: - raise - else: - log.inf(for_stack_trace) - except CommandContextError as cce: - log.die('command', args.command, 'cannot be run in this context:', - *cce.args) - - -if __name__ == "__main__": - main() diff --git a/scripts/meta/west/manifest-schema.yml b/scripts/meta/west/manifest-schema.yml deleted file mode 100644 index c7d4e22dd2d..00000000000 --- a/scripts/meta/west/manifest-schema.yml +++ /dev/null @@ -1,132 +0,0 @@ -## A pykwalify schema for basic validation of the structure of a -## manifest YAML file. (Full validation would require additional work, -## e.g. to validate that remote URLs obey the URL format specified in -## rfc1738.) -## -## This schema has similar semantics to the repo XML format: -## -## https://gerrit.googlesource.com/git-repo/+/master/docs/manifest-format.txt -## -## However, the features don't map 1:1. - -# The top-level manifest is a map. There may be multiple sections in the -# manifest file. Each section can be validated by their own schema. -# This schema validates the 'manifest' section. -type: map -mapping: - # The "defaults" key specifies some default values used in the - # rest of the manifest. - # - # The value is a map with the following keys: - # - # - remote: if given, this is the default remote in each project - # - revision: if given, this is the default revision to check - # out of each project - # - # See below for more information about remotes and projects. - # - # Examples: - # - # default: - # remote: zephyrproject-rtos - # revision: master - defaults: - required: false - type: map - mapping: - remote: - required: false - type: str - revision: - required: false - type: str - - # The "remotes" key specifies a sequence of remotes, each of - # which has a name and a fetch URL. - # - # These work like repo remotes, in that they specify a URL - # prefix which remote-specific Git repositories hang off of. - # (This saves typing and makes it easier to move things around - # when most repositories are on the same server or GitHub - # organization.) - # - # Example: - # - # remotes: - # - name: zephyrproject-rtos - # url-base: https://github.com/zephyrproject-rtos - # - name: developer-fork - # url-base: https://github.com/a-developer - remotes: - required: true - type: seq - sequence: - - type: map - mapping: - name: - required: true - type: str - url-base: - required: true - type: str - - # The "projects" key specifies a sequence of "projects", - # i.e. Git repositories. These work like repo projects, in that - # each project has a name, a remote, and optional additional - # metadata. - # - # Each project is a map with the following keys: - # - # - name: Mandatory, the name of the git repository. The clone - # URL is formed by remote url-base + '/' + name. The name cannot - # be one of the reserved values "west" and "manifest". - # - remote: Optional, the name of the remote to pull it from. - # If the remote is missing, the remote'key in the top-level - # defaults key is used instead. If both are missing, it's an error. - # - revision: Optional, the name of the revision to check out. - # If not given, the value from the default element will be used. - # If both are missing, then the default is 'master'. - # - path: Where to clone the repository locally. If missing, - # it's cloned at top level in a directory given by its name. - # - clone-depth: if given, it is a number which creates a shallow - # history in the cloned repository limited to the given number - # of commits. - # - # Example, using default and non-default remotes: - # - # projects: - # # Uses default remote (zephyrproject-rtos), so clone URL is: - # # https://github.com/zephyrproject-rtos/zephyr - # - name: zephyr - # # Manually specified remote; clone URL is: - # # https://github.com/a-developer/west - # - name: west - # remote: developer-fork - # # Manually specified remote, clone URL is: - # # https://github.com/zephyrproject-rtos/some-vendor-hal - # # Local clone path (relative to installation root) is: - # # ext/hal/some-vendor - # - name: some-vendor-hal - # remote: zephyrproject-rtos - # path: ext/hal/some-vendor - projects: - required: true - type: seq - sequence: - - type: map - mapping: - name: - required: true - type: str - remote: - required: false - type: str - revision: - required: false - type: text # SHAs could be only numbers - path: - required: false - type: str - clone-depth: - required: false - type: int diff --git a/scripts/meta/west/manifest.py b/scripts/meta/west/manifest.py deleted file mode 100644 index c3aa76a4a8a..00000000000 --- a/scripts/meta/west/manifest.py +++ /dev/null @@ -1,400 +0,0 @@ -# Copyright (c) 2018, Nordic Semiconductor ASA -# Copyright 2018, Foundries.io Ltd -# -# SPDX-License-Identifier: Apache-2.0 - -'''Parser and abstract data types for west manifests. - -The main class is Manifest. The recommended method for creating a -Manifest instance is via its from_file() or from_data() helper -methods. - -There are additionally Defaults, Remote, and Project types defined, -which represent the values by the same names in a west -manifest. (I.e. "Remote" represents one of the elements in the -"remote" sequence in the manifest, and so on.) Some Default values, -such as the default project revision, may be supplied by this module -if they are not present in the manifest data.''' - -import os - -import pykwalify.core -import yaml - -from west import util, log - -# Todo: take from _bootstrap? -# Default west repository URL. -WEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/west' -# Default revision to check out of the west repository. -WEST_REV_DEFAULT = 'master' - -META_NAMES = ['west', 'manifest'] -'''Names of the special "meta-projects", which are reserved and cannot -be used to name a project in the manifest file.''' - -MANIFEST_SECTIONS = ['manifest', 'west'] -'''Sections in the manifest file''' - - -def default_path(): - '''Return the path to the default manifest in the west directory. - - Raises WestNotFound if called from outside of a west working directory.''' - return os.path.join(util.west_dir(), 'manifest', 'default.yml') - - -class Manifest: - '''Represents the contents of a West manifest file. - - The most convenient way to construct an instance is using the - from_file and from_data helper methods.''' - - @staticmethod - def from_file(source_file=None, sections=MANIFEST_SECTIONS): - '''Create and return a new Manifest object given a source YAML file. - - :param source_file: Path to a YAML file containing the manifest. - :param sections: Only parse specified sections from YAML file, - default: all sections are parsed. - - If source_file is None, the value returned by default_path() - is used. - - Raises MalformedManifest in case of validation errors.''' - if source_file is None: - source_file = default_path() - return Manifest(source_file=source_file, sections=sections) - - @staticmethod - def from_data(source_data, sections=MANIFEST_SECTIONS): - '''Create and return a new Manifest object given parsed YAML data. - - :param source_data: Parsed YAML data as a Python object. - :param sections: Only parse specified sections from YAML data, - default: all sections are parsed. - - Raises MalformedManifest in case of validation errors.''' - return Manifest(source_data=source_data, sections=sections) - - def __init__(self, source_file=None, source_data=None, - sections=MANIFEST_SECTIONS): - '''Create a new Manifest object. - - :param source_file: Path to a YAML file containing the manifest. - :param source_data: Parsed YAML data as a Python object. - :param sections: Only parse specified sections from YAML file, - default: all sections are parsed. - - Normally, it is more convenient to use the `from_file` and - `from_data` convenience factories than calling the constructor - directly. - - Exactly one of the source_file and source_data parameters must - be given. - - Raises MalformedManifest in case of validation errors.''' - if source_file and source_data: - raise ValueError('both source_file and source_data were given') - - if source_file: - with open(source_file, 'r') as f: - self._data = yaml.safe_load(f.read()) - path = source_file - else: - self._data = source_data - path = None - - self.path = path - '''Path to the file containing the manifest, or None if created - from data rather than the file system.''' - - if not self._data: - self._malformed('manifest contains no data') - - if 'manifest' not in self._data: - self._malformed('manifest contains no manifest element') - - for key in self._data: - if key in sections: - try: - pykwalify.core.Core( - source_data=self._data[key], - schema_files=[_SCHEMA_PATH[key]] - ).validate() - except pykwalify.errors.SchemaError as e: - self._malformed(e, key) - - self.defaults = None - '''west.manifest.Defaults object representing default values - in the manifest, either as specified by the user or west itself.''' - - self.remotes = None - '''Sequence of west.manifest.Remote objects representing manifest - remotes.''' - - self.projects = None - '''Sequence of west.manifest.Project objects representing manifest - projects. - - Each element's values are fully initialized; there is no need - to consult the defaults field to supply missing values.''' - - self.west_project = None - '''west.manifest.SpecialProject object representing the west meta - project.''' - - # Set up the public attributes documented above, as well as - # any internal attributes needed to implement the public API. - self._load(self._data, sections) - - def get_remote(self, name): - '''Get a manifest Remote, given its name.''' - return self._remotes_dict[name] - - def _malformed(self, complaint, section='manifest'): - context = (' file {} '.format(self.path) if self.path - else ' data:\n{}\n'.format(self._data)) - raise MalformedManifest('Malformed manifest{}(schema: {}):\n{}' - .format(context, _SCHEMA_PATH[section], - complaint)) - - def _load(self, data, sections): - # Initialize this instance's fields from values given in the - # manifest data, which must be validated according to the schema. - if 'west' in sections: - west = data.get('west', {}) - - url = west.get('url') or WEST_URL_DEFAULT - revision = west.get('revision') or WEST_REV_DEFAULT - - self.west_project = SpecialProject('west', - url=url, - revision=revision, - path=os.path.join('west', - 'west')) - - # Next is the manifest section - if 'manifest' not in sections: - return - - projects = [] - project_abspaths = set() - - manifest = data.get('manifest') - - # Map from each remote's name onto that remote's data in the manifest. - remotes = tuple(Remote(r['name'], r['url-base']) for r in - manifest['remotes']) - remotes_dict = {r.name: r for r in remotes} - - # Get any defaults out of the manifest. - # - # md = manifest defaults (dictionary with values parsed from - # the manifest) - md = manifest.get('defaults', dict()) - mdrem = md.get('remote') - if mdrem: - # The default remote name, if provided, must refer to a - # well-defined remote. - if mdrem not in remotes_dict: - self._malformed('default remote {} is not defined'. - format(mdrem)) - default_remote = remotes_dict[mdrem] - default_remote_name = mdrem - else: - default_remote = None - default_remote_name = None - defaults = Defaults(remote=default_remote, revision=md.get('revision')) - - # mp = manifest project (dictionary with values parsed from - # the manifest) - for mp in manifest['projects']: - # Validate the project name. - name = mp['name'] - if name in META_NAMES: - self._malformed('the name "{}" is reserved and cannot '. - format(name) + - 'be used to name a manifest project') - - # Validate the project remote. - remote_name = mp.get('remote', default_remote_name) - if remote_name is None: - self._malformed('project {} does not specify a remote'. - format(name)) - if remote_name not in remotes_dict: - self._malformed('project {} remote {} is not defined'. - format(name, remote_name)) - project = Project(name, - remotes_dict[remote_name], - defaults, - path=mp.get('path'), - clone_depth=mp.get('clone-depth'), - revision=mp.get('revision')) - - # Two projects cannot have the same path. We use absolute - # paths to check for collisions to ensure paths are - # normalized (e.g. for case-insensitive file systems or - # in cases like on Windows where / or \ may serve as a - # path component separator). - if project.abspath in project_abspaths: - self._malformed('project {} path {} is already in use'. - format(project.name, project.path)) - - project_abspaths.add(project.abspath) - projects.append(project) - - self.defaults = defaults - self.remotes = remotes - self._remotes_dict = remotes_dict - self.projects = tuple(projects) - - -class MalformedManifest(Exception): - '''Exception indicating that west manifest parsing failed due to a - malformed value.''' - - -# Definitions for Manifest attribute types. - -class Defaults: - '''Represents default values in a manifest, either specified by the - user or by west itself. - - Defaults are neither comparable nor hashable.''' - - __slots__ = 'remote revision'.split() - - def __init__(self, remote=None, revision=None): - '''Initialize a defaults value from manifest data. - - :param remote: Remote instance corresponding to the default remote, - or None (an actual Remote object, not the name of - a remote as a string). - :param revision: Default Git revision; 'master' if not given.''' - if remote is not None: - _wrn_if_not_remote(remote) - if revision is None: - revision = 'master' - - self.remote = remote - self.revision = revision - - def __eq__(self, other): - return NotImplemented - - def __repr__(self): - return 'Defaults(remote={}, revision={})'.format(repr(self.remote), - repr(self.revision)) - - -class Remote: - '''Represents a remote defined in a west manifest. - - Remotes may be compared for equality, but are not hashable.''' - - __slots__ = 'name url_base'.split() - - def __init__(self, name, url_base): - '''Initialize a remote from manifest data. - - :param name: remote's name - :param url_base: remote's URL base.''' - if url_base.endswith('/'): - log.wrn('Remote', name, 'URL base', url_base, - 'ends with a slash ("/"); these are automatically', - 'appended by West') - - self.name = name - self.url_base = url_base - - def __eq__(self, other): - return self.name == other.name and self.url_base == other.url_base - - def __repr__(self): - return 'Remote(name={}, url_base={})'.format(repr(self.name), - repr(self.url_base)) - - -class Project: - '''Represents a project defined in a west manifest. - - Projects are neither comparable nor hashable.''' - - __slots__ = 'name remote url path abspath clone_depth revision'.split() - - def __init__(self, name, remote, defaults, path=None, clone_depth=None, - revision=None): - '''Specify a Project by name, Remote, and optional information. - - :param name: Project's user-defined name in the manifest. - :param remote: Remote instance corresponding to this Project as - specified in the manifest. This is used to build - the project's URL, and is also stored as an attribute. - :param defaults: If the revision parameter is not given, the project's - revision is set to defaults.revision. - :param path: Relative path to the project in the west - installation, if present in the manifest. If not given, - the project's ``name`` is used. - :param clone_depth: Nonnegative integer clone depth if present in - the manifest. - :param revision: Project revision as given in the manifest, if present. - If not given, defaults.revision is used instead. - ''' - _wrn_if_not_remote(remote) - - self.name = name - self.remote = remote - self.url = remote.url_base + '/' + name - self.path = os.path.normpath(path or name) - self.abspath = os.path.realpath(os.path.join(util.west_topdir(), - self.path)) - self.clone_depth = clone_depth - self.revision = revision or defaults.revision - - def __eq__(self, other): - return NotImplemented - - def __repr__(self): - reprs = [repr(x) for x in - (self.name, self.remote, self.url, self.path, - self.abspath, self.clone_depth, self.revision)] - return ('Project(name={}, remote={}, url={}, path={}, abspath={}, ' - 'clone_depth={}, revision={})').format(*reprs) - - -class SpecialProject(Project): - '''Represents a special project, e.g. the west or manifest project. - - Projects are neither comparable nor hashable.''' - - def __init__(self, name, path=None, revision=None, url=None): - '''Specify a Special Project by name, and url, and optional information. - - :param name: Special Project's user-defined name in the manifest - :param path: Relative path to the project in the west - installation, if present in the manifest. If None, - the project's ``name`` is used. - :param revision: Project revision as given in the manifest, if present. - :param url: Complete URL for special project. - ''' - self.name = name - self.url = url - self.path = path or name - self.abspath = os.path.realpath(os.path.join(util.west_topdir(), - self.path)) - self.revision = revision - self.remote = None - self.clone_depth = None - - -def _wrn_if_not_remote(remote): - if not isinstance(remote, Remote): - log.wrn('Remote', remote, 'is not a Remote instance') - - -_SCHEMA_PATH = {'manifest': os.path.join(os.path.dirname(__file__), - "manifest-schema.yml"), - 'west': os.path.join(os.path.dirname(__file__), - "_bootstrap", - "west-schema.yml")} diff --git a/scripts/meta/west/runners/__init__.py b/scripts/meta/west/runners/__init__.py deleted file mode 100644 index 85b6532de77..00000000000 --- a/scripts/meta/west/runners/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -from west.runners.core import ZephyrBinaryRunner - -# We import these here to ensure the ZephyrBinaryRunner subclasses are -# defined; otherwise, ZephyrBinaryRunner.create_for_shell_script() -# won't work. - -# Explicitly silence the unused import warning. -# flake8: noqa: F401 -from west.runners import arc -from west.runners import bossac -from west.runners import dfu -from west.runners import esp32 -from west.runners import jlink -from west.runners import nios2 -from west.runners import nrfjprog -from west.runners import nsim -from west.runners import openocd -from west.runners import pyocd -from west.runners import qemu -from west.runners import xtensa -from west.runners import intel_s1000 -from west.runners import blackmagicprobe - -def get_runner_cls(runner): - '''Get a runner's class object, given its name.''' - for cls in ZephyrBinaryRunner.get_runners(): - if cls.name() == runner: - return cls - raise ValueError('unknown runner "{}"'.format(runner)) - -__all__ = ['ZephyrBinaryRunner', 'get_runner_cls'] diff --git a/scripts/meta/west/runners/arc.py b/scripts/meta/west/runners/arc.py deleted file mode 100644 index bb1253facfb..00000000000 --- a/scripts/meta/west/runners/arc.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# Copyright (c) 2017 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''ARC architecture-specific runners.''' - -from os import path - -from west.runners.core import ZephyrBinaryRunner - -DEFAULT_ARC_TCL_PORT = 6333 -DEFAULT_ARC_TELNET_PORT = 4444 -DEFAULT_ARC_GDB_PORT = 3333 - - -class EmStarterKitBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for the EM Starterkit board, using openocd.''' - - # This unusual 'flash' implementation matches the original shell script. - # - # It works by starting a GDB server in a separate session, connecting a - # client to it to load the program, and running 'continue' within the - # client to execute the application. - # - - def __init__(self, cfg, - tui=False, tcl_port=DEFAULT_ARC_TCL_PORT, - telnet_port=DEFAULT_ARC_TELNET_PORT, - gdb_port=DEFAULT_ARC_GDB_PORT): - super(EmStarterKitBinaryRunner, self).__init__(cfg) - self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else []) - search_args = [] - if cfg.openocd_search is not None: - search_args = ['-s', cfg.openocd_search] - self.openocd_cmd = [cfg.openocd or 'openocd'] + search_args - self.tcl_port = tcl_port - self.telnet_port = telnet_port - self.gdb_port = gdb_port - - @classmethod - def name(cls): - return 'em-starterkit' - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--tui', default=False, action='store_true', - help='if given, GDB uses -tui') - parser.add_argument('--tcl-port', default=DEFAULT_ARC_TCL_PORT, - help='openocd TCL port, defaults to 6333') - parser.add_argument('--telnet-port', default=DEFAULT_ARC_TELNET_PORT, - help='openocd telnet port, defaults to 4444') - parser.add_argument('--gdb-port', default=DEFAULT_ARC_GDB_PORT, - help='openocd gdb port, defaults to 3333') - - @classmethod - def create(cls, cfg, args): - if cfg.gdb is None: - raise ValueError('--gdb not provided at command line') - - return EmStarterKitBinaryRunner( - cfg, - tui=args.tui, tcl_port=args.tcl_port, telnet_port=args.telnet_port, - gdb_port=args.gdb_port) - - def do_run(self, command, **kwargs): - kwargs['openocd-cfg'] = path.join(self.cfg.board_dir, 'support', - 'openocd.cfg') - - if command in {'flash', 'debug'}: - self.flash_debug(command, **kwargs) - else: - self.debugserver(**kwargs) - - def flash_debug(self, command, **kwargs): - config = kwargs['openocd-cfg'] - - server_cmd = (self.openocd_cmd + - ['-f', config] + - ['-c', 'tcl_port {}'.format(self.tcl_port), - '-c', 'telnet_port {}'.format(self.telnet_port), - '-c', 'gdb_port {}'.format(self.gdb_port), - '-c', 'init', - '-c', 'targets', - '-c', 'halt']) - - continue_arg = [] - if command == 'flash': - continue_arg = ['-ex', 'set confirm off', '-ex', 'monitor resume', - '-ex', 'quit'] - - gdb_cmd = (self.gdb_cmd + - ['-ex', 'target remote :{}'.format(self.gdb_port), - '-ex', 'load'] + - continue_arg + - [self.cfg.elf_file]) - - self.run_server_and_client(server_cmd, gdb_cmd) - - def debugserver(self, **kwargs): - config = kwargs['openocd-cfg'] - cmd = (self.openocd_cmd + - ['-f', config, - '-c', 'init', - '-c', 'targets', - '-c', 'reset halt']) - self.check_call(cmd) diff --git a/scripts/meta/west/runners/blackmagicprobe.py b/scripts/meta/west/runners/blackmagicprobe.py deleted file mode 100644 index dd19526ad38..00000000000 --- a/scripts/meta/west/runners/blackmagicprobe.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (c) 2018 Roman Tataurov -# Modified 2018 Tavish Naruka -# -# SPDX-License-Identifier: Apache-2.0 -'''Runner for flashing with Black Magic Probe.''' -# https://github.com/blacksphere/blackmagic/wiki - -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - - -class BlackMagicProbeRunner(ZephyrBinaryRunner): - '''Runner front-end for Black Magic probe.''' - - def __init__(self, cfg, gdb_serial): - super(BlackMagicProbeRunner, self).__init__(cfg) - self.gdb = [cfg.gdb] if cfg.gdb else None - self.elf_file = cfg.elf_file - self.gdb_serial = gdb_serial - - @classmethod - def name(cls): - return 'blackmagicprobe' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash', 'debug', 'attach'}) - - @classmethod - def create(cls, cfg, args): - return BlackMagicProbeRunner(cfg, args.gdb_serial) - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--gdb-serial', default='/dev/ttyACM0', - help='GDB serial port') - - def bmp_flash(self, command, **kwargs): - if self.gdb is None: - raise ValueError('Cannot flash; gdb is missing') - if self.elf_file is None: - raise ValueError('Cannot debug; elf file is missing') - command = (self.gdb + - ['-ex', "set confirm off", - '-ex', "target extended-remote {}".format(self.gdb_serial), - '-ex', "monitor swdp_scan", - '-ex', "attach 1", - '-ex', "load {}".format(self.elf_file), - '-ex', "kill", - '-ex', "quit", - '-silent']) - self.check_call(command) - - def bmp_attach(self, command, **kwargs): - if self.gdb is None: - raise ValueError('Cannot attach; gdb is missing') - if self.elf_file is None: - command = (self.gdb + - ['-ex', "set confirm off", - '-ex', "target extended-remote {}".format( - self.gdb_serial), - '-ex', "monitor swdp_scan", - '-ex', "attach 1"]) - else: - command = (self.gdb + - ['-ex', "set confirm off", - '-ex', "target extended-remote {}".format( - self.gdb_serial), - '-ex', "monitor swdp_scan", - '-ex', "attach 1", - '-ex', "file {}".format(self.elf_file)]) - self.check_call(command) - - def bmp_debug(self, command, **kwargs): - if self.gdb is None: - raise ValueError('Cannot debug; gdb is missing') - if self.elf_file is None: - raise ValueError('Cannot debug; elf file is missing') - command = (self.gdb + - ['-ex', "set confirm off", - '-ex', "target extended-remote {}".format(self.gdb_serial), - '-ex', "monitor swdp_scan", - '-ex', "attach 1", - '-ex', "file {}".format(self.elf_file), - '-ex', "load {}".format(self.elf_file)]) - self.check_call(command) - - def do_run(self, command, **kwargs): - - if command == 'flash': - self.bmp_flash(command, **kwargs) - elif command == 'debug': - self.bmp_debug(command, **kwargs) - elif command == 'attach': - self.bmp_attach(command, **kwargs) - else: - self.bmp_flash(command, **kwargs) diff --git a/scripts/meta/west/runners/bossac.py b/scripts/meta/west/runners/bossac.py deleted file mode 100644 index 5ba6c480b12..00000000000 --- a/scripts/meta/west/runners/bossac.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''bossac-specific runner (flash only) for Atmel SAM microcontrollers.''' - -import platform - -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - -DEFAULT_BOSSAC_PORT = '/dev/ttyACM0' - - -class BossacBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for bossac.''' - - def __init__(self, cfg, bossac='bossac', port=DEFAULT_BOSSAC_PORT): - super(BossacBinaryRunner, self).__init__(cfg) - self.bossac = bossac - self.port = port - - @classmethod - def name(cls): - return 'bossac' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash'}) - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--bossac', default='bossac', - help='path to bossac, default is bossac') - parser.add_argument('--bossac-port', default='/dev/ttyACM0', - help='serial port to use, default is /dev/ttyACM0') - - @classmethod - def create(cls, cfg, args): - return BossacBinaryRunner(cfg, bossac=args.bossac, - port=args.bossac_port) - - def do_run(self, command, **kwargs): - if platform.system() != 'Linux': - msg = 'CAUTION: No flash tool for your host system found!' - raise NotImplementedError(msg) - - cmd_stty = ['stty', '-F', self.port, 'raw', 'ispeed', '1200', - 'ospeed', '1200', 'cs8', '-cstopb', 'ignpar', 'eol', '255', - 'eof', '255'] - cmd_flash = [self.bossac, '-p', self.port, '-R', '-e', '-w', '-v', - '-b', self.cfg.bin_file] - - self.check_call(cmd_stty) - self.check_call(cmd_flash) diff --git a/scripts/meta/west/runners/core.py b/scripts/meta/west/runners/core.py deleted file mode 100644 index 60dc6cc5979..00000000000 --- a/scripts/meta/west/runners/core.py +++ /dev/null @@ -1,508 +0,0 @@ -#! /usr/bin/env python3 - -# Copyright (c) 2017 Linaro Limited. -# Copyright (c) 2017 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -"""Zephyr binary runner core interfaces - -This provides the core ZephyrBinaryRunner class meant for public use, -as well as some other helpers for concrete runner classes. -""" - -import abc -import argparse -import os -import platform -import signal -import subprocess - -from west import log -from west.util import quote_sh_list - -# Turn on to enable just printing the commands that would be run, -# without actually running them. This can break runners that are expecting -# output or if one command depends on another, so it's just for debugging. -JUST_PRINT = False - - -class _DebugDummyPopen: - - def terminate(self): - pass - - def wait(self): - pass - - -MAX_PORT = 49151 - - -class NetworkPortHelper: - '''Helper class for dealing with local IP network ports.''' - - def get_unused_ports(self, starting_from): - '''Find unused network ports, starting at given values. - - starting_from is an iterable of ports the caller would like to use. - - The return value is an iterable of ports, in the same order, using - the given values if they were unused, or the next sequentially - available unused port otherwise. - - Ports may be bound between this call's check and actual usage, so - callers still need to handle errors involving returned ports.''' - start = list(starting_from) - used = self._used_now() - ret = [] - - for desired in start: - port = desired - while port in used: - port += 1 - if port > MAX_PORT: - msg = "ports above {} are in use" - raise ValueError(msg.format(desired)) - used.add(port) - ret.append(port) - - return ret - - def _used_now(self): - handlers = { - 'Windows': self._used_now_windows, - 'Linux': self._used_now_linux, - 'Darwin': self._used_now_darwin, - } - handler = handlers[platform.system()] - return handler() - - def _used_now_windows(self): - cmd = ['netstat', '-a', '-n', '-p', 'tcp'] - return self._parser_windows(cmd) - - def _used_now_linux(self): - cmd = ['ss', '-a', '-n', '-t'] - return self._parser_linux(cmd) - - def _used_now_darwin(self): - cmd = ['netstat', '-a', '-n', '-p', 'tcp'] - return self._parser_darwin(cmd) - - def _parser_windows(self, cmd): - out = subprocess.check_output(cmd).split(b'\r\n') - used_bytes = [x.split()[1].rsplit(b':', 1)[1] for x in out - if x.startswith(b' TCP')] - return {int(b) for b in used_bytes} - - def _parser_linux(self, cmd): - out = subprocess.check_output(cmd).splitlines()[1:] - used_bytes = [s.split()[3].rsplit(b':', 1)[1] for s in out] - return {int(b) for b in used_bytes} - - def _parser_darwin(self, cmd): - out = subprocess.check_output(cmd).split(b'\n') - used_bytes = [x.split()[3].rsplit(b':', 1)[1] for x in out - if x.startswith(b'tcp')] - return {int(b) for b in used_bytes} - - -class BuildConfiguration: - '''This helper class provides access to build-time configuration. - - Configuration options can be read as if the object were a dict, - either object['CONFIG_FOO'] or object.get('CONFIG_FOO'). - - Configuration values in .config and generated_dts_board.conf are - available.''' - - def __init__(self, build_dir): - self.build_dir = build_dir - self.options = {} - self._init() - - def __getitem__(self, item): - return self.options[item] - - def get(self, option, *args): - return self.options.get(option, *args) - - def _init(self): - build_z = os.path.join(self.build_dir, 'zephyr') - generated = os.path.join(build_z, 'include', 'generated') - files = [os.path.join(build_z, '.config'), - os.path.join(generated, 'generated_dts_board.conf')] - for f in files: - self._parse(f) - - def _parse(self, filename): - with open(filename, 'r') as f: - for line in f: - line = line.strip() - if not line or line.startswith('#'): - continue - option, value = line.split('=', 1) - self.options[option] = self._parse_value(value) - - def _parse_value(self, value): - if value.startswith('"') or value.startswith("'"): - return value.split() - try: - return int(value, 0) - except ValueError: - return value - - -class RunnerCaps: - '''This class represents a runner class's capabilities. - - Each capability is represented as an attribute with the same - name. Flag attributes are True or False. - - Available capabilities: - - - commands: set of supported commands; default is {'flash', - 'debug', 'debugserver', 'attach'}. - - - flash_addr: whether the runner supports flashing to an - arbitrary address. Default is False. If true, the runner - must honor the --dt-flash option. - ''' - - def __init__(self, - commands={'flash', 'debug', 'debugserver', 'attach'}, - flash_addr=False): - self.commands = commands - self.flash_addr = bool(flash_addr) - - def __str__(self): - return 'RunnerCaps(commands={}, flash_addr={})'.format( - self.commands, self.flash_addr) - - -class RunnerConfig: - '''Runner execution-time configuration. - - This is a common object shared by all runners. Individual runners - can register specific configuration options using their - do_add_parser() hooks. - - This class's __slots__ contains exactly the configuration variables. - ''' - - __slots__ = ['build_dir', 'board_dir', 'elf_file', 'hex_file', - 'bin_file', 'gdb', 'openocd', 'openocd_search'] - - # TODO: revisit whether we can get rid of some of these. Having - # tool-specific configuration options here is a layering - # violation, but it's very convenient to have a single place to - # store the locations of tools (like gdb and openocd) that are - # needed by multiple ZephyrBinaryRunner subclasses. - def __init__(self, build_dir, board_dir, - elf_file, hex_file, bin_file, - gdb=None, openocd=None, openocd_search=None): - self.build_dir = build_dir - '''Zephyr application build directory''' - - self.board_dir = board_dir - '''Zephyr board directory''' - - self.elf_file = elf_file - '''Path to the elf file that the runner should operate on''' - - self.hex_file = hex_file - '''Path to the hex file that the runner should operate on''' - - self.bin_file = bin_file - '''Path to the bin file that the runner should operate on''' - - self.gdb = gdb - ''''Path to GDB compatible with the target, may be None.''' - - self.openocd = openocd - '''Path to OpenOCD to use for this target, may be None.''' - - self.openocd_search = openocd_search - '''directory to add to OpenOCD search path, may be None.''' - - -_YN_CHOICES = ['Y', 'y', 'N', 'n', 'yes', 'no', 'YES', 'NO'] - - -class _DTFlashAction(argparse.Action): - - def __call__(self, parser, namespace, values, option_string=None): - if values.lower().startswith('y'): - namespace.dt_flash = True - else: - namespace.dt_flash = False - - -class ZephyrBinaryRunner(abc.ABC): - '''Abstract superclass for binary runners (flashers, debuggers). - - **Note**: these APIs are still evolving, and will change! - - With some exceptions, boards supported by Zephyr must provide - generic means to be flashed (have a Zephyr firmware binary - permanently installed on the device for running) and debugged - (have a breakpoint debugger and program loader on a host - workstation attached to a running target). - - This is supported by three top-level commands managed by the - Zephyr build system: - - - 'flash': flash a previously configured binary to the board, - start execution on the target, then return. - - - 'debug': connect to the board via a debugging protocol, program - the flash, then drop the user into a debugger interface with - symbol tables loaded from the current binary, and block until it - exits. - - - 'debugserver': connect via a board-specific debugging protocol, - then reset and halt the target. Ensure the user is now able to - connect to a debug server with symbol tables loaded from the - binary. - - - 'attach': connect to the board via a debugging protocol, then drop - the user into a debugger interface with symbol tables loaded from - the current binary, and block until it exits. Unlike 'debug', this - command does not program the flash. - - This class provides an API for these commands. Every runner has a - name (like 'pyocd'), and declares commands it can handle (like - 'flash'). Zephyr boards (like 'nrf52_pca10040') declare compatible - runner(s) by name to the build system, which makes concrete runner - instances to execute commands via this class. - - If your board can use an existing runner, all you have to do is - give its name to the build system. How to do that is out of the - scope of this documentation, but use the existing boards as a - starting point. - - If you want to define and use your own runner: - - 1. Define a ZephyrBinaryRunner subclass, and implement its - abstract methods. You may need to override capabilities(). - - 2. Make sure the Python module defining your runner class is - imported, e.g. by editing this package's __init__.py (otherwise, - get_runners() won't work). - - 3. Give your runner's name to the Zephyr build system in your - board's build files. - - For command-line invocation from the Zephyr build system, runners - define their own argparse-based interface through the common - add_parser() (and runner-specific do_add_parser() it delegates - to), and provide a way to create instances of themselves from - a RunnerConfig and parsed runner-specific arguments via create(). - - Runners use a variety of target-specific tools and configuration - values, the user interface to which is abstracted by this - class. Each runner subclass should take any values it needs to - execute one of these commands in its constructor. The actual - command execution is handled in the run() method.''' - - def __init__(self, cfg): - '''Initialize core runner state. - - `cfg` is a RunnerConfig instance.''' - self.cfg = cfg - - @staticmethod - def get_runners(): - '''Get a list of all currently defined runner classes.''' - return ZephyrBinaryRunner.__subclasses__() - - @classmethod - @abc.abstractmethod - def name(cls): - '''Return this runner's user-visible name. - - When choosing a name, pick something short and lowercase, - based on the name of the tool (like openocd, jlink, etc.) or - the target architecture/board (like xtensa, em-starterkit, - etc.).''' - - @classmethod - def capabilities(cls): - '''Returns a RunnerCaps representing this runner's capabilities. - - This implementation returns the default capabilities. - - Subclasses should override appropriately if needed.''' - return RunnerCaps() - - @classmethod - def add_parser(cls, parser): - '''Adds a sub-command parser for this runner. - - The given object, parser, is a sub-command parser from the - argparse module. For more details, refer to the documentation - for argparse.ArgumentParser.add_subparsers(). - - The lone common optional argument is: - - * --dt-flash (if the runner capabilities includes flash_addr) - - Runner-specific options are added through the do_add_parser() - hook.''' - # Common options that depend on runner capabilities. - if cls.capabilities().flash_addr: - parser.add_argument('--dt-flash', default='n', choices=_YN_CHOICES, - action=_DTFlashAction, - help='''If 'yes', use configuration generated - by device tree (DT) to compute flash - addresses.''') - - # Runner-specific options. - cls.do_add_parser(parser) - - @classmethod - @abc.abstractmethod - def do_add_parser(cls, parser): - '''Hook for adding runner-specific options.''' - - @classmethod - @abc.abstractmethod - def create(cls, cfg, args): - '''Create an instance from command-line arguments. - - - `cfg`: RunnerConfig instance (pass to superclass __init__) - - `args`: runner-specific argument namespace parsed from - execution environment, as specified by `add_parser()`.''' - - @classmethod - def get_flash_address(cls, args, build_conf, default=0x0): - '''Helper method for extracting a flash address. - - If args.dt_flash is true, get the address from the - BoardConfiguration, build_conf. (If - CONFIG_HAS_FLASH_LOAD_OFFSET is n in that configuration, it - returns CONFIG_FLASH_BASE_ADDRESS. Otherwise, it returns - CONFIG_FLASH_BASE_ADDRESS + CONFIG_FLASH_LOAD_OFFSET.) - - Otherwise (when args.dt_flash is False), the default value is - returned.''' - if args.dt_flash: - if build_conf['CONFIG_HAS_FLASH_LOAD_OFFSET']: - return (build_conf['CONFIG_FLASH_BASE_ADDRESS'] + - build_conf['CONFIG_FLASH_LOAD_OFFSET']) - else: - return build_conf['CONFIG_FLASH_BASE_ADDRESS'] - else: - return default - - def run(self, command, **kwargs): - '''Runs command ('flash', 'debug', 'debugserver', 'attach'). - - This is the main entry point to this runner.''' - caps = self.capabilities() - if command not in caps.commands: - raise ValueError('runner {} does not implement command {}'.format( - self.name(), command)) - self.do_run(command, **kwargs) - - @abc.abstractmethod - def do_run(self, command, **kwargs): - '''Concrete runner; run() delegates to this. Implement in subclasses. - - In case of an unsupported command, raise a ValueError.''' - - def run_server_and_client(self, server, client): - '''Run a server that ignores SIGINT, and a client that handles it. - - This routine portably: - - - creates a Popen object for the ``server`` command which ignores - SIGINT - - runs ``client`` in a subprocess while temporarily ignoring SIGINT - - cleans up the server after the client exits. - - It's useful to e.g. open a GDB server and client.''' - server_proc = self.popen_ignore_int(server) - previous = signal.signal(signal.SIGINT, signal.SIG_IGN) - try: - self.check_call(client) - finally: - signal.signal(signal.SIGINT, previous) - server_proc.terminate() - server_proc.wait() - - def call(self, cmd): - '''Subclass subprocess.call() wrapper. - - Subclasses should use this method to run command in a - subprocess and get its return code, rather than - using subprocess directly, to keep accurate debug logs. - ''' - quoted = quote_sh_list(cmd) - - if JUST_PRINT: - log.inf(quoted) - return 0 - - log.dbg(quoted) - return subprocess.call(cmd) - - def check_call(self, cmd): - '''Subclass subprocess.check_call() wrapper. - - Subclasses should use this method to run command in a - subprocess and check that it executed correctly, rather than - using subprocess directly, to keep accurate debug logs. - ''' - quoted = quote_sh_list(cmd) - - if JUST_PRINT: - log.inf(quoted) - return - - log.dbg(quoted) - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - raise - - def check_output(self, cmd): - '''Subclass subprocess.check_output() wrapper. - - Subclasses should use this method to run command in a - subprocess and check that it executed correctly, rather than - using subprocess directly, to keep accurate debug logs. - ''' - quoted = quote_sh_list(cmd) - - if JUST_PRINT: - log.inf(quoted) - return b'' - - log.dbg(quoted) - try: - return subprocess.check_output(cmd) - except subprocess.CalledProcessError: - raise - - def popen_ignore_int(self, cmd): - '''Spawn a child command, ensuring it ignores SIGINT. - - The returned subprocess.Popen object must be manually terminated.''' - cflags = 0 - preexec = None - system = platform.system() - quoted = quote_sh_list(cmd) - - if system == 'Windows': - cflags |= subprocess.CREATE_NEW_PROCESS_GROUP - elif system in {'Linux', 'Darwin'}: - preexec = os.setsid - - if JUST_PRINT: - log.inf(quoted) - return _DebugDummyPopen() - - log.dbg(quoted) - return subprocess.Popen(cmd, creationflags=cflags, preexec_fn=preexec) diff --git a/scripts/meta/west/runners/dfu.py b/scripts/meta/west/runners/dfu.py deleted file mode 100644 index 595409964c8..00000000000 --- a/scripts/meta/west/runners/dfu.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for flashing with dfu-util.''' - -from collections import namedtuple -import sys -import time - -from west import log -from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \ - BuildConfiguration - - -DfuSeConfig = namedtuple('DfuSeConfig', ['address', 'options']) - - -class DfuUtilBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for dfu-util.''' - - def __init__(self, cfg, pid, alt, img, exe='dfu-util', - dfuse_config=None): - super(DfuUtilBinaryRunner, self).__init__(cfg) - self.alt = alt - self.img = img - self.cmd = [exe, '-d,{}'.format(pid)] - try: - self.list_pattern = ', alt={},'.format(int(self.alt)) - except ValueError: - self.list_pattern = ', name="{}",'.format(self.alt) - - if dfuse_config is None: - self.dfuse = False - else: - self.dfuse = True - self.dfuse_config = dfuse_config - - @classmethod - def name(cls): - return 'dfu-util' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash'}, flash_addr=True) - - @classmethod - def do_add_parser(cls, parser): - # Required: - parser.add_argument("--pid", required=True, - help="USB VID:PID of the board") - parser.add_argument("--alt", required=True, - help="interface alternate setting number or name") - - # Optional: - parser.add_argument("--img", - help="binary to flash, default is --bin-file") - parser.add_argument("--dfuse", default=False, action='store_true', - help='''set if target is a DfuSe device; - implies --dt-flash.''') - parser.add_argument("--dfuse-modifiers", default='leave', - help='''colon-separated list of DfuSe modifiers - (default is "leave", which starts execution - immediately); --dfuse must also be given for this - option to take effect.''') - parser.add_argument('--dfu-util', default='dfu-util', - help='dfu-util executable; defaults to "dfu-util"') - - @classmethod - def create(cls, cfg, args): - if args.img is None: - args.img = cfg.bin_file - - if args.dfuse: - args.dt_flash = True # --dfuse implies --dt-flash. - build_conf = BuildConfiguration(cfg.build_dir) - dcfg = DfuSeConfig(address=cls.get_flash_address(args, build_conf), - options=args.dfuse_modifiers) - else: - dcfg = None - - return DfuUtilBinaryRunner(cfg, args.pid, args.alt, args.img, - exe=args.dfu_util, dfuse_config=dcfg) - - def find_device(self): - cmd = list(self.cmd) + ['-l'] - output = self.check_output(cmd) - output = output.decode(sys.getdefaultencoding()) - return self.list_pattern in output - - def do_run(self, command, **kwargs): - reset = False - if not self.find_device(): - reset = True - log.dbg('Device not found, waiting for it', - level=log.VERBOSE_EXTREME) - # Use of print() here is advised. We don't want to lose - # this information in a separate log -- this is - # interactive and requires a terminal. - print('Please reset your board to switch to DFU mode...') - while not self.find_device(): - time.sleep(0.1) - - cmd = list(self.cmd) - if self.dfuse: - # http://dfu-util.sourceforge.net/dfuse.html - dcfg = self.dfuse_config - addr_opts = hex(dcfg.address) + ':' + dcfg.options - cmd.extend(['-s', addr_opts]) - cmd.extend(['-a', self.alt, '-D', self.img]) - self.check_call(cmd) - - if self.dfuse and 'leave' in dcfg.options.split(':'): - # Normal DFU devices generally need to be reset to switch - # back to the flashed program. - # - # DfuSe targets do as well, except when 'leave' is given - # as an option. - reset = False - if reset: - print('Now reset your board again to switch back to runtime mode.') diff --git a/scripts/meta/west/runners/esp32.py b/scripts/meta/west/runners/esp32.py deleted file mode 100644 index e13fe1b8ef6..00000000000 --- a/scripts/meta/west/runners/esp32.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for flashing ESP32 devices with esptool/espidf.''' - -from os import path - -from west import log -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - - -class Esp32BinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for espidf.''' - - def __init__(self, cfg, device, baud=921600, flash_size='detect', - flash_freq='40m', flash_mode='dio', espidf='espidf', - bootloader_bin=None, partition_table_bin=None): - super(Esp32BinaryRunner, self).__init__(cfg) - self.elf = cfg.elf_file - self.device = device - self.baud = baud - self.flash_size = flash_size - self.flash_freq = flash_freq - self.flash_mode = flash_mode - self.espidf = espidf - self.bootloader_bin = bootloader_bin - self.partition_table_bin = partition_table_bin - - @classmethod - def name(cls): - return 'esp32' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash'}) - - @classmethod - def do_add_parser(cls, parser): - # Required - parser.add_argument('--esp-idf-path', required=True, - help='path to ESP-IDF') - - # Optional - parser.add_argument('--esp-device', default='/dev/ttyUSB0', - help='serial port to flash, default /dev/ttyUSB0') - parser.add_argument('--esp-baud-rate', default='921600', - help='serial baud rate, default 921600') - parser.add_argument('--esp-flash-size', default='detect', - help='flash size, default "detect"') - parser.add_argument('--esp-flash-freq', default='40m', - help='flash frequency, default "40m"') - parser.add_argument('--esp-flash-mode', default='dio', - help='flash mode, default "dio"') - parser.add_argument( - '--esp-tool', - help='''if given, complete path to espidf. default is to search for - it in [ESP_IDF_PATH]/components/esptool_py/esptool/esptool.py''') - parser.add_argument('--esp-flash-bootloader', - help='Bootloader image to flash') - parser.add_argument('--esp-flash-partition_table', - help='Partition table to flash') - - @classmethod - def create(cls, cfg, args): - if args.esp_tool: - espidf = args.esp_tool - else: - espidf = path.join(args.esp_idf_path, 'components', 'esptool_py', - 'esptool', 'esptool.py') - - return Esp32BinaryRunner( - cfg, args.esp_device, baud=args.esp_baud_rate, - flash_size=args.esp_flash_size, flash_freq=args.esp_flash_freq, - flash_mode=args.esp_flash_mode, espidf=espidf, - bootloader_bin=args.esp_flash_bootloader, - partition_table_bin=args.esp_flash_partition_table) - - def do_run(self, command, **kwargs): - bin_name = path.splitext(self.elf)[0] + path.extsep + 'bin' - cmd_convert = [self.espidf, '--chip', 'esp32', 'elf2image', self.elf] - cmd_flash = [self.espidf, '--chip', 'esp32', '--port', self.device, - '--baud', self.baud, '--before', 'default_reset', - '--after', 'hard_reset', 'write_flash', '-u', - '--flash_mode', self.flash_mode, - '--flash_freq', self.flash_freq, - '--flash_size', self.flash_size] - - if self.bootloader_bin: - cmd_flash.extend(['0x1000', self.bootloader_bin]) - cmd_flash.extend(['0x8000', self.partition_table_bin]) - cmd_flash.extend(['0x10000', bin_name]) - else: - cmd_flash.extend(['0x1000', bin_name]) - - log.inf("Converting ELF to BIN") - self.check_call(cmd_convert) - - log.inf("Flashing ESP32 on {} ({}bps)".format(self.device, self.baud)) - self.check_call(cmd_flash) diff --git a/scripts/meta/west/runners/intel_s1000.py b/scripts/meta/west/runners/intel_s1000.py deleted file mode 100644 index c56c9d5953d..00000000000 --- a/scripts/meta/west/runners/intel_s1000.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2018 Intel Corporation. -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for debugging and flashing Intel S1000 CRB''' -from os import path -import time -import signal -from west import log -from west.runners.core import ZephyrBinaryRunner - -DEFAULT_XT_GDB_PORT = 20000 - - -class IntelS1000BinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for Intel S1000.''' - - def __init__(self, cfg, xt_ocd_dir, - ocd_topology, ocd_jtag_instr, gdb_flash_file, - gdb_port=DEFAULT_XT_GDB_PORT): - super(IntelS1000BinaryRunner, self).__init__(cfg) - self.board_dir = cfg.board_dir - self.elf_name = cfg.elf_file - self.gdb_cmd = cfg.gdb - self.xt_ocd_dir = xt_ocd_dir - self.ocd_topology = ocd_topology - self.ocd_jtag_instr = ocd_jtag_instr - self.gdb_flash_file = gdb_flash_file - self.gdb_port = gdb_port - - @classmethod - def name(cls): - return 'intel_s1000' - - @classmethod - def do_add_parser(cls, parser): - # Optional - parser.add_argument( - '--xt-ocd-dir', default='/opt/tensilica/xocd-12.0.4/xt-ocd', - help='ocd-dir, defaults to /opt/tensilica/xocd-12.0.4/xt-ocd') - parser.add_argument( - '--ocd-topology', default='topology_dsp0_flyswatter2.xml', - help='ocd-topology, defaults to topology_dsp0_flyswatter2.xml') - parser.add_argument( - '--ocd-jtag-instr', default='dsp0_gdb.txt', - help='ocd-jtag-instr, defaults to dsp0_gdb.txt') - parser.add_argument( - '--gdb-flash-file', default='load_elf.txt', - help='gdb-flash-file, defaults to load_elf.txt') - parser.add_argument( - '--gdb-port', default=DEFAULT_XT_GDB_PORT, - help='xt-gdb port, defaults to 20000') - - @classmethod - def create(cls, cfg, args): - return IntelS1000BinaryRunner( - cfg, args.xt_ocd_dir, - args.ocd_topology, args.ocd_jtag_instr, args.gdb_flash_file, - gdb_port=args.gdb_port) - - def do_run(self, command, **kwargs): - kwargs['ocd-topology'] = path.join(self.board_dir, 'support', - self.ocd_topology) - kwargs['ocd-jtag-instr'] = path.join(self.board_dir, 'support', - self.ocd_jtag_instr) - kwargs['gdb-flash-file'] = path.join(self.board_dir, 'support', - self.gdb_flash_file) - - if command == 'flash': - self.flash(**kwargs) - elif command == 'debugserver': - self.debugserver(**kwargs) - else: - self.do_debug(**kwargs) - - def flash(self, **kwargs): - topology_file = kwargs['ocd-topology'] - jtag_instr_file = kwargs['ocd-jtag-instr'] - gdb_flash_file = kwargs['gdb-flash-file'] - - self.print_gdbserver_message(self.gdb_port) - server_cmd = [self.xt_ocd_dir, - '-c', topology_file, - '-I', jtag_instr_file] - - # Start the server - # Note that XTOCD takes a few seconds to execute and always fails the - # first time. It has to be relaunched the second time to work. - server_proc = self.popen_ignore_int(server_cmd) - time.sleep(6) - server_proc.terminate() - server_proc = self.popen_ignore_int(server_cmd) - time.sleep(6) - - # Start the client - gdb_cmd = [self.gdb_cmd, '-x', gdb_flash_file] - client_proc = self.popen_ignore_int(gdb_cmd) - - # Wait for 3 seconds (waiting for XTGDB to finish loading the image) - time.sleep(3) - - # At this point, the ELF image is loaded and the program is in - # execution. Now we can quit the client (xt-gdb) and the server - # (xt-ocd) as they are not needed anymore. The loaded program - # (ELF) will continue to run though. - client_proc.terminate() - server_proc.terminate() - - def do_debug(self, **kwargs): - if self.elf_name is None: - raise ValueError('Cannot debug; elf is missing') - if self.gdb_cmd is None: - raise ValueError('Cannot debug; no gdb specified') - - topology_file = kwargs['ocd-topology'] - jtag_instr_file = kwargs['ocd-jtag-instr'] - - self.print_gdbserver_message(self.gdb_port) - server_cmd = [self.xt_ocd_dir, - '-c', topology_file, - '-I', jtag_instr_file] - - # Start the server - # Note that XTOCD takes a few seconds to execute and always fails the - # first time. It has to be relaunched the second time to work. - server_proc = self.popen_ignore_int(server_cmd) - time.sleep(6) - server_proc.terminate() - server_proc = self.popen_ignore_int(server_cmd) - time.sleep(6) - - gdb_cmd = [self.gdb_cmd, - '-ex', 'target remote :{}'.format(self.gdb_port), - self.elf_name] - - # Start the client - # The below statement will consume the "^C" keypress ensuring - # the python main application doesn't exit. This is important - # since ^C in gdb means a "halt" operation. - previous = signal.signal(signal.SIGINT, signal.SIG_IGN) - try: - self.check_call(gdb_cmd) - finally: - signal.signal(signal.SIGINT, previous) - server_proc.terminate() - server_proc.wait() - - def print_gdbserver_message(self, gdb_port): - log.inf('Intel S1000 GDB server running on port {}'.format(gdb_port)) - - def debugserver(self, **kwargs): - topology_file = kwargs['ocd-topology'] - jtag_instr_file = kwargs['ocd-jtag-instr'] - - self.print_gdbserver_message(self.gdb_port) - server_cmd = [self.xt_ocd_dir, - '-c', topology_file, - '-I', jtag_instr_file] - - # Note that XTOCD takes a few seconds to execute and always fails the - # first time. It has to be relaunched the second time to work. - server_proc = self.popen_ignore_int(server_cmd) - time.sleep(6) - server_proc.terminate() - self.check_call(server_cmd) diff --git a/scripts/meta/west/runners/jlink.py b/scripts/meta/west/runners/jlink.py deleted file mode 100644 index bc5d59b66bc..00000000000 --- a/scripts/meta/west/runners/jlink.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for debugging with J-Link.''' - -import os -import tempfile -import sys - -from west import log -from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \ - BuildConfiguration - -DEFAULT_JLINK_EXE = 'JLink.exe' if sys.platform == 'win32' else 'JLinkExe' -DEFAULT_JLINK_GDB_PORT = 2331 - - -class JLinkBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for the J-Link GDB server.''' - - def __init__(self, cfg, device, - commander=DEFAULT_JLINK_EXE, - flash_addr=0x0, erase=True, - iface='swd', speed='auto', - gdbserver='JLinkGDBServer', gdb_port=DEFAULT_JLINK_GDB_PORT, - tui=False): - super(JLinkBinaryRunner, self).__init__(cfg) - self.bin_name = cfg.bin_file - self.elf_name = cfg.elf_file - self.gdb_cmd = [cfg.gdb] if cfg.gdb else None - self.device = device - self.commander = commander - self.flash_addr = flash_addr - self.erase = erase - self.gdbserver_cmd = [gdbserver] - self.iface = iface - self.speed = speed - self.gdb_port = gdb_port - self.tui_arg = ['-tui'] if tui else [] - - @classmethod - def name(cls): - return 'jlink' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'}, - flash_addr=True) - - @classmethod - def do_add_parser(cls, parser): - # Required: - parser.add_argument('--device', required=True, help='device name') - - # Optional: - parser.add_argument('--iface', default='swd', - help='interface to use, default is swd') - parser.add_argument('--speed', default='auto', - help='interface speed, default is autodetect') - parser.add_argument('--tui', default=False, action='store_true', - help='if given, GDB uses -tui') - parser.add_argument('--gdbserver', default='JLinkGDBServer', - help='GDB server, default is JLinkGDBServer') - parser.add_argument('--gdb-port', default=DEFAULT_JLINK_GDB_PORT, - help='pyocd gdb port, defaults to {}'.format( - DEFAULT_JLINK_GDB_PORT)) - parser.add_argument('--commander', default=DEFAULT_JLINK_EXE, - help='J-Link Commander, default is JLinkExe') - parser.add_argument('--erase', default=False, action='store_true', - help='if given, mass erase flash before loading') - - @classmethod - def create(cls, cfg, args): - build_conf = BuildConfiguration(cfg.build_dir) - flash_addr = cls.get_flash_address(args, build_conf) - return JLinkBinaryRunner(cfg, args.device, - commander=args.commander, - flash_addr=flash_addr, erase=args.erase, - iface=args.iface, speed=args.speed, - gdbserver=args.gdbserver, - gdb_port=args.gdb_port, - tui=args.tui) - - def print_gdbserver_message(self): - log.inf('J-Link GDB server running on port {}'.format(self.gdb_port)) - - def do_run(self, command, **kwargs): - server_cmd = (self.gdbserver_cmd + - ['-select', 'usb', # only USB connections supported - '-port', str(self.gdb_port), - '-if', self.iface, - '-speed', self.speed, - '-device', self.device, - '-silent', - '-singlerun']) - - if command == 'flash': - self.flash(**kwargs) - elif command == 'debugserver': - self.print_gdbserver_message() - self.check_call(server_cmd) - else: - if self.gdb_cmd is None: - raise ValueError('Cannot debug; gdb is missing') - if self.elf_name is None: - raise ValueError('Cannot debug; elf is missing') - client_cmd = (self.gdb_cmd + - self.tui_arg + - [self.elf_name] + - ['-ex', 'target remote :{}'.format(self.gdb_port)]) - if command == 'debug': - client_cmd += ['-ex', 'monitor halt', - '-ex', 'monitor reset', - '-ex', 'load'] - self.print_gdbserver_message() - self.run_server_and_client(server_cmd, client_cmd) - - def flash(self, **kwargs): - if self.bin_name is None: - raise ValueError('Cannot flash; bin_name is missing') - - lines = ['r'] # Reset and halt the target - - if self.erase: - lines.append('erase') # Erase all flash sectors - - lines.append('loadfile {} 0x{:x}'.format(self.bin_name, - self.flash_addr)) - lines.append('g') # Start the CPU - lines.append('q') # Close the connection and quit - - log.dbg('JLink commander script:') - log.dbg('\n'.join(lines)) - - # Don't use NamedTemporaryFile: the resulting file can't be - # opened again on Windows. - with tempfile.TemporaryDirectory(suffix='jlink') as d: - fname = os.path.join(d, 'runner.jlink') - with open(fname, 'wb') as f: - f.writelines(bytes(line + '\n', 'utf-8') for line in lines) - - cmd = ([self.commander] + - ['-if', self.iface, - '-speed', self.speed, - '-device', self.device, - '-CommanderScript', fname]) - - log.inf('Flashing Target Device') - self.check_call(cmd) diff --git a/scripts/meta/west/runners/nios2.py b/scripts/meta/west/runners/nios2.py deleted file mode 100644 index 1298a023879..00000000000 --- a/scripts/meta/west/runners/nios2.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for NIOS II, based on quartus-flash.py and GDB.''' - -from west import log -from west.runners.core import ZephyrBinaryRunner, NetworkPortHelper - - -class Nios2BinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for NIOS II.''' - - # From the original shell script: - # - # "XXX [flash] only support[s] cases where the .elf is sent - # over the JTAG and the CPU directly boots from __start. CONFIG_XIP - # and CONFIG_INCLUDE_RESET_VECTOR must be disabled." - - def __init__(self, cfg, quartus_py=None, cpu_sof=None, tui=False): - super(Nios2BinaryRunner, self).__init__(cfg) - self.hex_name = cfg.hex_file - self.elf_name = cfg.elf_file - self.cpu_sof = cpu_sof - self.quartus_py = quartus_py - self.gdb_cmd = [cfg.gdb] if cfg.gdb else None - self.tui_arg = ['-tui'] if tui else [] - - @classmethod - def name(cls): - return 'nios2' - - @classmethod - def do_add_parser(cls, parser): - # TODO merge quartus-flash.py script into this file. - parser.add_argument('--quartus-flash', required=True) - parser.add_argument('--cpu-sof', required=True, - help='path to the the CPU .sof data') - parser.add_argument('--tui', default=False, action='store_true', - help='if given, GDB uses -tui') - - @classmethod - def create(cls, cfg, args): - return Nios2BinaryRunner(cfg, - quartus_py=args.quartus_flash, - cpu_sof=args.cpu_sof, - tui=args.tui) - - def do_run(self, command, **kwargs): - if command == 'flash': - self.flash(**kwargs) - else: - self.debug_debugserver(command, **kwargs) - - def flash(self, **kwargs): - if self.quartus_py is None: - raise ValueError('Cannot flash; --quartus-flash not given.') - if self.cpu_sof is None: - raise ValueError('Cannot flash; --cpu-sof not given.') - - cmd = [self.quartus_py, - '--sof', self.cpu_sof, - '--kernel', self.hex_name] - - self.check_call(cmd) - - def print_gdbserver_message(self, gdb_port): - log.inf('Nios II GDB server running on port {}'.format(gdb_port)) - - def debug_debugserver(self, command, **kwargs): - # Per comments in the shell script, the NIOSII GDB server - # doesn't exit gracefully, so it's better to explicitly search - # for an unused port. The script picks a random value in - # between 1024 and 49151, but we'll start with the - # "traditional" 3333 choice. - gdb_start = 3333 - nh = NetworkPortHelper() - gdb_port = nh.get_unused_ports([gdb_start])[0] - - server_cmd = (['nios2-gdb-server', - '--tcpport', str(gdb_port), - '--stop', '--reset-target']) - - if command == 'debugserver': - self.print_gdbserver_message(gdb_port) - self.check_call(server_cmd) - else: - if self.elf_name is None: - raise ValueError('Cannot debug; elf is missing') - if self.gdb_cmd is None: - raise ValueError('Cannot debug; no gdb specified') - - gdb_cmd = (self.gdb_cmd + - self.tui_arg + - [self.elf_name, - '-ex', 'target remote :{}'.format(gdb_port)]) - - self.print_gdbserver_message(gdb_port) - self.run_server_and_client(server_cmd, gdb_cmd) diff --git a/scripts/meta/west/runners/nrfjprog.py b/scripts/meta/west/runners/nrfjprog.py deleted file mode 100644 index 0aafdc57c7b..00000000000 --- a/scripts/meta/west/runners/nrfjprog.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for flashing with nrfjprog.''' - -import sys - -from west import log -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - - -class NrfJprogBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for nrfjprog.''' - - def __init__(self, cfg, family, softreset, snr, erase=False): - super(NrfJprogBinaryRunner, self).__init__(cfg) - self.hex_ = cfg.hex_file - self.family = family - self.softreset = softreset - self.snr = snr - self.erase = erase - - @classmethod - def name(cls): - return 'nrfjprog' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash'}) - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--nrf-family', required=True, - choices=['NRF51', 'NRF52'], - help='family of nRF MCU') - parser.add_argument('--softreset', required=False, - action='store_true', - help='use reset instead of pinreset') - parser.add_argument('--erase', action='store_true', - help='if given, mass erase flash before loading') - parser.add_argument('--snr', required=False, - help='serial number of board to use') - - @classmethod - def create(cls, cfg, args): - return NrfJprogBinaryRunner(cfg, args.nrf_family, args.softreset, - args.snr, erase=args.erase) - - def get_board_snr_from_user(self): - snrs = self.check_output(['nrfjprog', '--ids']) - snrs = snrs.decode(sys.getdefaultencoding()).strip().splitlines() - - if len(snrs) == 0: - raise RuntimeError('"nrfjprog --ids" did not find a board; ' - 'is the board connected?') - elif len(snrs) == 1: - board_snr = snrs[0] - if board_snr == '0': - raise RuntimeError('"nrfjprog --ids" returned 0; ' - 'is a debugger already connected?') - return board_snr - - log.dbg("Refusing the temptation to guess a board", - level=log.VERBOSE_EXTREME) - - # Use of print() here is advised. We don't want to lose - # this information in a separate log -- this is - # interactive and requires a terminal. - print('There are multiple boards connected.') - for i, snr in enumerate(snrs, 1): - print('{}. {}'.format(i, snr)) - - p = 'Please select one with desired serial number (1-{}): '.format( - len(snrs)) - while True: - value = input(p) - try: - value = int(value) - except ValueError: - continue - if 1 <= value <= len(snrs): - break - - return snrs[value - 1] - - def do_run(self, command, **kwargs): - commands = [] - if (self.snr is None): - board_snr = self.get_board_snr_from_user() - else: - board_snr = self.snr.lstrip("0") - program_cmd = ['nrfjprog', '--program', self.hex_, '-f', self.family, - '--snr', board_snr] - - print('Flashing file: {}'.format(self.hex_)) - if self.erase: - commands.extend([ - ['nrfjprog', - '--eraseall', - '-f', self.family, - '--snr', board_snr], - program_cmd - ]) - else: - if self.family == 'NRF51': - commands.append(program_cmd + ['--sectorerase']) - else: - commands.append(program_cmd + ['--sectoranduicrerase']) - - if self.family == 'NRF52' and not self.softreset: - commands.extend([ - # Enable pin reset - ['nrfjprog', '--pinresetenable', '-f', self.family, - '--snr', board_snr], - ]) - - if self.softreset: - commands.append(['nrfjprog', '--reset', '-f', self.family, - '--snr', board_snr]) - else: - commands.append(['nrfjprog', '--pinreset', '-f', self.family, - '--snr', board_snr]) - - for cmd in commands: - self.check_call(cmd) - - log.inf('Board with serial number {} flashed successfully.'.format( - board_snr)) diff --git a/scripts/meta/west/runners/nsim.py b/scripts/meta/west/runners/nsim.py deleted file mode 100644 index 5eb6dc6a617..00000000000 --- a/scripts/meta/west/runners/nsim.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) 2018 Synopsys Inc. -# Copyright (c) 2017 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''ARC architecture-specific runners.''' - -from os import path - -from west.runners.core import ZephyrBinaryRunner - -DEFAULT_ARC_GDB_PORT = 3333 -DEFAULT_PROPS_FILE = 'nsim.props' - - -class NsimBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for the ARC si.''' - - # This unusual 'flash' implementation matches the original shell script. - # - # It works by starting a GDB server in a separate session, connecting a - # client to it to load the program, and running 'continue' within the - # client to execute the application. - # - - def __init__(self, cfg, - tui=False, - gdb_port=DEFAULT_ARC_GDB_PORT, - props=DEFAULT_PROPS_FILE): - super(NsimBinaryRunner, self).__init__(cfg) - self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else []) - self.nsim_cmd = ['nsimdrv'] - self.gdb_port = gdb_port - self.props = props - - @classmethod - def name(cls): - return 'arc-nsim' - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--gdb-port', default=DEFAULT_ARC_GDB_PORT, - help='nsim gdb port, defaults to 3333') - parser.add_argument('--props', default=DEFAULT_PROPS_FILE, - help='nsim props file, defaults to nsim.props') - - @classmethod - def create(cls, cfg, args): - if cfg.gdb is None: - raise ValueError('--gdb not provided at command line') - - return NsimBinaryRunner( - cfg, - gdb_port=args.gdb_port, - props=args.props) - - def do_run(self, command, **kwargs): - kwargs['nsim-cfg'] = path.join(self.cfg.board_dir, 'support', - self.props) - - if command == 'flash': - self.do_flash(**kwargs) - elif command == 'debug': - self.do_debug(**kwargs) - else: - self.debugserver(**kwargs) - - def do_flash(self, **kwargs): - config = kwargs['nsim-cfg'] - - cmd = (self.nsim_cmd + ['-propsfile', config, self.cfg.elf_file]) - self.check_call(cmd) - - def do_debug(self, **kwargs): - config = kwargs['nsim-cfg'] - - server_cmd = (self.nsim_cmd + ['-gdb', - '-port={}'.format(self.gdb_port), - '-propsfile', config]) - - gdb_cmd = (self.gdb_cmd + - ['-ex', 'target remote :{}'.format(self.gdb_port), - '-ex', 'load', self.cfg.elf_file]) - - self.run_server_and_client(server_cmd, gdb_cmd) - - def debugserver(self, **kwargs): - config = kwargs['nsim-cfg'] - - cmd = (self.nsim_cmd + - ['-gdb', '-port={}'.format(self.gdb_port), - '-propsfile', config]) - - self.check_call(cmd) diff --git a/scripts/meta/west/runners/openocd.py b/scripts/meta/west/runners/openocd.py deleted file mode 100644 index 32d14da93c5..00000000000 --- a/scripts/meta/west/runners/openocd.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for openocd.''' - -from os import path - -from west.runners.core import ZephyrBinaryRunner - -DEFAULT_OPENOCD_TCL_PORT = 6333 -DEFAULT_OPENOCD_TELNET_PORT = 4444 -DEFAULT_OPENOCD_GDB_PORT = 3333 - - -class OpenOcdBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for openocd.''' - - def __init__(self, cfg, - pre_cmd=None, load_cmd=None, verify_cmd=None, post_cmd=None, - tui=None, - tcl_port=DEFAULT_OPENOCD_TCL_PORT, - telnet_port=DEFAULT_OPENOCD_TELNET_PORT, - gdb_port=DEFAULT_OPENOCD_GDB_PORT): - super(OpenOcdBinaryRunner, self).__init__(cfg) - self.openocd_config = path.join(cfg.board_dir, 'support', - 'openocd.cfg') - - search_args = [] - if cfg.openocd_search is not None: - search_args = ['-s', cfg.openocd_search] - self.openocd_cmd = [cfg.openocd] + search_args - self.elf_name = cfg.elf_file - self.load_cmd = load_cmd - self.verify_cmd = verify_cmd - self.pre_cmd = pre_cmd - self.post_cmd = post_cmd - self.tcl_port = tcl_port - self.telnet_port = telnet_port - self.gdb_port = gdb_port - self.gdb_cmd = [cfg.gdb] if cfg.gdb else None - self.tui_arg = ['-tui'] if tui else [] - - @classmethod - def name(cls): - return 'openocd' - - @classmethod - def do_add_parser(cls, parser): - # Options for flashing: - parser.add_argument('--cmd-pre-load', - help='Command to run before flashing') - parser.add_argument('--cmd-load', - help='''Command to load/flash binary - (required when flashing)''') - parser.add_argument('--cmd-verify', - help='''Command to verify flashed binary''') - parser.add_argument('--cmd-post-verify', - help='Command to run after verification') - - # Options for debugging: - parser.add_argument('--tui', default=False, action='store_true', - help='if given, GDB uses -tui') - parser.add_argument('--tcl-port', default=DEFAULT_OPENOCD_TCL_PORT, - help='openocd TCL port, defaults to 6333') - parser.add_argument('--telnet-port', - default=DEFAULT_OPENOCD_TELNET_PORT, - help='openocd telnet port, defaults to 4444') - parser.add_argument('--gdb-port', default=DEFAULT_OPENOCD_GDB_PORT, - help='openocd gdb port, defaults to 3333') - - @classmethod - def create(cls, cfg, args): - return OpenOcdBinaryRunner( - cfg, - pre_cmd=args.cmd_pre_load, load_cmd=args.cmd_load, - verify_cmd=args.cmd_verify, post_cmd=args.cmd_post_verify, - tui=args.tui, - tcl_port=args.tcl_port, telnet_port=args.telnet_port, - gdb_port=args.gdb_port) - - def do_run(self, command, **kwargs): - if command == 'flash': - self.do_flash(**kwargs) - elif command == 'debug': - self.do_debug(**kwargs) - else: - self.do_debugserver(**kwargs) - - def do_flash(self, **kwargs): - if self.load_cmd is None: - raise ValueError('Cannot flash; load command is missing') - if self.verify_cmd is None: - raise ValueError('Cannot flash; verify command is missing') - - pre_cmd = [] - if self.pre_cmd is not None: - pre_cmd = ['-c', self.pre_cmd] - - post_cmd = [] - if self.post_cmd is not None: - post_cmd = ['-c', self.post_cmd] - - cmd = (self.openocd_cmd + - ['-f', self.openocd_config, - '-c', 'init', - '-c', 'targets'] + - pre_cmd + - ['-c', 'reset halt', - '-c', self.load_cmd, - '-c', 'reset halt', - '-c', self.verify_cmd] + - post_cmd + - ['-c', 'reset run', - '-c', 'shutdown']) - self.check_call(cmd) - - def do_debug(self, **kwargs): - if self.gdb_cmd is None: - raise ValueError('Cannot debug; no gdb specified') - if self.elf_name is None: - raise ValueError('Cannot debug; no .elf specified') - - server_cmd = (self.openocd_cmd + - ['-f', self.openocd_config, - '-c', 'tcl_port {}'.format(self.tcl_port), - '-c', 'telnet_port {}'.format(self.telnet_port), - '-c', 'gdb_port {}'.format(self.gdb_port), - '-c', 'init', - '-c', 'targets', - '-c', 'halt']) - - gdb_cmd = (self.gdb_cmd + self.tui_arg + - ['-ex', 'target remote :{}'.format(self.gdb_port), - self.elf_name]) - - self.run_server_and_client(server_cmd, gdb_cmd) - - def do_debugserver(self, **kwargs): - cmd = (self.openocd_cmd + - ['-f', self.openocd_config, - '-c', 'init', - '-c', 'targets', - '-c', 'reset halt']) - self.check_call(cmd) diff --git a/scripts/meta/west/runners/pyocd.py b/scripts/meta/west/runners/pyocd.py deleted file mode 100644 index 1d9d88c94ee..00000000000 --- a/scripts/meta/west/runners/pyocd.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for pyOCD .''' - -import os - -from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \ - BuildConfiguration -from west import log - -DEFAULT_PYOCD_GDB_PORT = 3333 - - -class PyOcdBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for pyOCD.''' - - def __init__(self, cfg, target, - flashtool='pyocd-flashtool', flash_addr=0x0, - flashtool_opts=None, - gdbserver='pyocd-gdbserver', - gdb_port=DEFAULT_PYOCD_GDB_PORT, tui=False, - board_id=None, daparg=None, frequency=None): - super(PyOcdBinaryRunner, self).__init__(cfg) - - self.target_args = ['-t', target] - self.flashtool = flashtool - self.flash_addr_args = ['-a', hex(flash_addr)] if flash_addr else [] - self.gdb_cmd = [cfg.gdb] if cfg.gdb is not None else None - self.gdbserver = gdbserver - self.gdb_port = gdb_port - self.tui_args = ['-tui'] if tui else [] - self.hex_name = cfg.hex_file - self.bin_name = cfg.bin_file - self.elf_name = cfg.elf_file - - board_args = [] - if board_id is not None: - board_args = ['-b', board_id] - self.board_args = board_args - - daparg_args = [] - if daparg is not None: - daparg_args = ['-da', daparg] - self.daparg_args = daparg_args - - frequency_args = [] - if frequency is not None: - frequency_args = ['-f', frequency] - self.frequency_args = frequency_args - - self.flashtool_extra = flashtool_opts if flashtool_opts else [] - - @classmethod - def name(cls): - return 'pyocd' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'}, - flash_addr=True) - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--target', required=True, - help='target override') - - parser.add_argument('--daparg', - help='Additional -da arguments to pyocd tool') - parser.add_argument('--flashtool', default='pyocd-flashtool', - help='flash tool path, default is pyocd-flashtool') - parser.add_argument('--flashtool-opt', default=[], action='append', - help='''Additional options for pyocd-flashtool, - e.g. -ce to chip erase''') - parser.add_argument('--frequency', - help='SWD clock frequency in Hz') - parser.add_argument('--gdbserver', default='pyocd-gdbserver', - help='GDB server, default is pyocd-gdbserver') - parser.add_argument('--gdb-port', default=DEFAULT_PYOCD_GDB_PORT, - help='pyocd gdb port, defaults to {}'.format( - DEFAULT_PYOCD_GDB_PORT)) - parser.add_argument('--tui', default=False, action='store_true', - help='if given, GDB uses -tui') - parser.add_argument('--board-id', - help='ID of board to flash, default is to prompt') - - @classmethod - def create(cls, cfg, args): - daparg = os.environ.get('PYOCD_DAPARG') - if daparg: - log.wrn('Setting PYOCD_DAPARG in the environment is', - 'deprecated; use the --daparg option instead.') - if args.daparg is None: - log.dbg('Missing --daparg set to {} from environment'.format( - daparg), level=log.VERBOSE_VERY) - args.daparg = daparg - - build_conf = BuildConfiguration(cfg.build_dir) - flash_addr = cls.get_flash_address(args, build_conf) - - return PyOcdBinaryRunner( - cfg, args.target, flashtool=args.flashtool, - flash_addr=flash_addr, flashtool_opts=args.flashtool_opt, - gdbserver=args.gdbserver, gdb_port=args.gdb_port, tui=args.tui, - board_id=args.board_id, daparg=args.daparg, - frequency=args.frequency) - - def port_args(self): - return ['-p', str(self.gdb_port)] - - def do_run(self, command, **kwargs): - if command == 'flash': - self.flash(**kwargs) - else: - self.debug_debugserver(command, **kwargs) - - def flash(self, **kwargs): - if os.path.isfile(self.hex_name): - fname = self.hex_name - elif os.path.isfile(self.bin_name): - fname = self.bin_name - else: - raise ValueError( - 'Cannot flash; no hex ({}) or bin ({}) files'.format( - self.hex_name, self.bin_name)) - - cmd = ([self.flashtool] + - self.flash_addr_args + - self.daparg_args + - self.target_args + - self.board_args + - self.frequency_args + - self.flashtool_extra + - [fname]) - - log.inf('Flashing Target Device') - self.check_call(cmd) - - def print_gdbserver_message(self): - log.inf('pyOCD GDB server running on port {}'.format(self.gdb_port)) - - def debug_debugserver(self, command, **kwargs): - server_cmd = ([self.gdbserver] + - self.daparg_args + - self.port_args() + - self.target_args + - self.board_args + - self.frequency_args) - - if command == 'debugserver': - self.print_gdbserver_message() - self.check_call(server_cmd) - else: - if self.gdb_cmd is None: - raise ValueError('Cannot debug; gdb is missing') - if self.elf_name is None: - raise ValueError('Cannot debug; elf is missing') - client_cmd = (self.gdb_cmd + - self.tui_args + - [self.elf_name] + - ['-ex', 'target remote :{}'.format(self.gdb_port)]) - if command == 'debug': - client_cmd += ['-ex', 'monitor halt', - '-ex', 'monitor reset', - '-ex', 'load'] - - self.print_gdbserver_message() - self.run_server_and_client(server_cmd, client_cmd) diff --git a/scripts/meta/west/runners/qemu.py b/scripts/meta/west/runners/qemu.py deleted file mode 100644 index fc62fa4a142..00000000000 --- a/scripts/meta/west/runners/qemu.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner stub for QEMU.''' - -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - - -class QemuBinaryRunner(ZephyrBinaryRunner): - '''Place-holder for QEMU runner customizations.''' - - def __init__(self, cfg): - super(QemuBinaryRunner, self).__init__(cfg) - - @classmethod - def name(cls): - return 'qemu' - - @classmethod - def capabilities(cls): - # This is a stub. - return RunnerCaps(commands=set()) - - @classmethod - def do_add_parser(cls, parser): - pass # Nothing to do. - - @classmethod - def create(cls, cfg, args): - return QemuBinaryRunner(cfg) - - def do_run(self, command, **kwargs): - pass diff --git a/scripts/meta/west/runners/xtensa.py b/scripts/meta/west/runners/xtensa.py deleted file mode 100644 index 8c8e0e9c44b..00000000000 --- a/scripts/meta/west/runners/xtensa.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2017 Linaro Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Runner for debugging with xt-gdb.''' - -from os import path - -from west.runners.core import ZephyrBinaryRunner, RunnerCaps - - -class XtensaBinaryRunner(ZephyrBinaryRunner): - '''Runner front-end for xt-gdb.''' - - def __init__(self, cfg): - super(XtensaBinaryRunner, self).__init__(cfg) - - @classmethod - def name(cls): - return 'xtensa' - - @classmethod - def capabilities(cls): - return RunnerCaps(commands={'debug'}) - - @classmethod - def do_add_parser(cls, parser): - parser.add_argument('--xcc-tools', required=True, - help='path to XTensa tools') - - @classmethod - def create(cls, cfg, args): - # Override any GDB with the one provided by the XTensa tools. - cfg.gdb = path.join(args.xcc_tools, 'bin', 'xt-gdb') - return XtensaBinaryRunner(cfg) - - def do_run(self, command, **kwargs): - gdb_cmd = [self.cfg.gdb, self.cfg.elf_file] - - self.check_call(gdb_cmd) diff --git a/scripts/meta/west/util.py b/scripts/meta/west/util.py deleted file mode 100644 index 458abbabe48..00000000000 --- a/scripts/meta/west/util.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2018 Open Source Foundries Limited. -# -# SPDX-License-Identifier: Apache-2.0 - -'''Miscellaneous utilities used by west -''' - -import os -import shlex -import textwrap - - -def quote_sh_list(cmd): - '''Transform a command from list into shell string form.''' - fmt = ' '.join('{}' for _ in cmd) - args = [shlex.quote(s) for s in cmd] - return fmt.format(*args) - - -def wrap(text, indent): - '''Convenience routine for wrapping text to a consistent indent.''' - return textwrap.wrap(text, initial_indent=indent, - subsequent_indent=indent) - - -class WestNotFound(RuntimeError): - '''Neither the current directory nor any parent has a West installation.''' - - -def west_dir(start=None): - '''Returns the absolute path of the west/ top level directory. - - Starts the search from the start directory, and goes to its - parents. If the start directory is not specified, the current - directory is used. - - Raises WestNotFound if no west top-level directory is found. - ''' - return os.path.join(west_topdir(start), 'west') - - -def west_topdir(start=None): - ''' - Like west_dir(), but returns the path to the parent directory of the west/ - directory instead, where project repositories are stored - ''' - # If you change this function, make sure to update the bootstrap - # script's find_west_topdir(). - - if start is None: - cur_dir = os.getcwd() - else: - cur_dir = start - - while True: - if os.path.isfile(os.path.join(cur_dir, 'west', '.west_topdir')): - return cur_dir - - parent_dir = os.path.dirname(cur_dir) - if cur_dir == parent_dir: - # At the root - raise WestNotFound('Could not find a West installation ' - 'in this or any parent directory') - cur_dir = parent_dir - - -def in_multirepo_install(start=None): - '''Returns True iff the path ``start`` is in a multi-repo installation. - - If start is not given, it defaults to the current working directory. - - This is equivalent to checking if west_dir() raises an exception - when given the same start kwarg. - ''' - try: - west_topdir(start) - result = True - except WestNotFound: - result = False - return result diff --git a/scripts/west b/scripts/west deleted file mode 100755 index 485645321a9..00000000000 --- a/scripts/west +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python3 - -# Zephyr launcher which is interoperable with: -# -# 1. "mono-repo" Zephyr installations that have 'make flash' -# etc. supplied by a copy of some west code in scripts/meta. -# -# 2. "multi-repo" Zephyr installations where west is provided in a -# separate Git repository elsewhere. -# -# This is basically a copy of the "wrapper" functionality in the west -# bootstrap script for the multi-repo case, plus a fallback onto the -# copy in scripts/meta/west for mono-repo installs. - -import os -import sys - -import colorama - -if sys.version_info < (3,): - sys.exit('fatal error: you are running Python 2') - -# Top-level west directory, containing west itself and the manifest. -WEST_DIR = 'west' -# Subdirectory to check out the west source repository into. -WEST = 'west' -# File inside of WEST_DIR which marks it as the top level of the -# Zephyr project installation. -# -# (The WEST_DIR name is not distinct enough to use when searching for -# the top level; other directories named "west" may exist elsewhere, -# e.g. zephyr/doc/west.) -WEST_MARKER = '.west_topdir' - - -class WestNotFound(RuntimeError): - '''Neither the current directory nor any parent has a West installation.''' - - -def find_west_topdir(start): - '''Find the top-level installation directory, starting at ``start``. - - If none is found, raises WestNotFound.''' - cur_dir = start - - while True: - if os.path.isfile(os.path.join(cur_dir, WEST_DIR, WEST_MARKER)): - return cur_dir - - parent_dir = os.path.dirname(cur_dir) - if cur_dir == parent_dir: - # At the root - raise WestNotFound() - cur_dir = parent_dir - - -def wrap(west_dir, argv): - # Pull in the west main module, after adding the directory - # containing the package to sys.path. - sys.path.append(west_dir) - import west.main - - # Invoke west's main with our arguments. It needs to be run from - # this process for 'west debug' to work properly, so don't change - # this code to running main in a subprocess. - west.main.main(sys.argv[1:]) - - -def main(): - # Figure out which west to run. If we're in a multirepo - # installation, prefer the standalone west. Otherwise, we're in a - # monorepo installation, so we need to fall back on the copy of - # west in the Zephyr repository's scripts/meta directory. - try: - topdir = find_west_topdir(__file__) - west_dir = os.path.join(topdir, 'west', 'west', 'src') - except WestNotFound: - west_dir = os.path.join(os.environ['ZEPHYR_BASE'], 'scripts', 'meta') - - try: - wrap(west_dir, sys.argv[1:]) - finally: - print(colorama.Fore.LIGHTRED_EX, end='') - print('NOTE: you just ran a copy of west from {};'. - format(os.path.dirname(__file__)), - 'this will be removed from the Zephyr repository in the future.', - 'West is now developed separately.') - print(colorama.Style.RESET_ALL, end='', flush=True) - - -if __name__ == '__main__': - main() diff --git a/zephyr-env.cmd b/zephyr-env.cmd index 752156865e3..6c79799e88d 100644 --- a/zephyr-env.cmd +++ b/zephyr-env.cmd @@ -4,14 +4,3 @@ set ZEPHYR_BASE=%~dp0 if exist "%userprofile%\zephyrrc.cmd" ( call "%userprofile%\zephyrrc.cmd" ) - -rem Zephyr meta-tool (west) launcher alias, which keeps monorepo -rem Zephyr installations' 'make flash' etc. working. See -rem https://www.python.org/dev/peps/pep-0486/ for details on the -rem virtualenv-related pieces. (We need to implement this manually -rem because Zephyr's minimum supported Python version is 3.4.) -if defined VIRTUAL_ENV ( - doskey west=python %ZEPHYR_BASE%\scripts\west $* -) else ( - doskey west=py -3 %ZEPHYR_BASE%\scripts\west $* -)