pax_global_header00006660000000000000000000000064146505373440014525gustar00rootroot0000000000000052 comment=f9072e8ff48b49d10011f35a2f05998e08de51a0 colcon-core-0.17.1/000077500000000000000000000000001465053734400140165ustar00rootroot00000000000000colcon-core-0.17.1/.github/000077500000000000000000000000001465053734400153565ustar00rootroot00000000000000colcon-core-0.17.1/.github/workflows/000077500000000000000000000000001465053734400174135ustar00rootroot00000000000000colcon-core-0.17.1/.github/workflows/bootstrap.yaml000066400000000000000000000027121465053734400223160ustar00rootroot00000000000000name: Run bootstrap tests on: workflow_call: jobs: setup: runs-on: ubuntu-latest outputs: strategy: ${{steps.load.outputs.strategy}} steps: - uses: actions/checkout@v4 with: repository: colcon/ci - id: load run: | strategy=$(jq -c -M '.' strategy.json) echo "strategy=${strategy}" >> $GITHUB_OUTPUT bootstrap: needs: [setup] strategy: ${{ fromJson(needs.setup.outputs.strategy) }} runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: ${{matrix.python}} - name: Install dependencies run: | python -m pip install -U pip setuptools python -m pip install -U -e .[test] python -m pip uninstall -y colcon-core - name: Build and test run: | cd .. python ${{ github.workspace }}/bin/colcon build --paths ${{ github.workspace }} python ${{ github.workspace }}/bin/colcon test --paths ${{ github.workspace }} --return-code-on-test-failure - name: Use the installed package (Bash) if: ${{runner.os != 'windows'}} shell: bash run: | . ../install/local_setup.sh colcon --help - name: Use the installed package (CMD) if: ${{runner.os == 'windows'}} shell: cmd run: | call ..\install\local_setup.bat colcon --help colcon-core-0.17.1/.github/workflows/ci.yaml000066400000000000000000000004071465053734400206730ustar00rootroot00000000000000name: Run tests on: push: branches: ['master'] pull_request: jobs: pytest: uses: colcon/ci/.github/workflows/pytest.yaml@main secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} bootstrap: uses: ./.github/workflows/bootstrap.yaml colcon-core-0.17.1/.gitignore000066400000000000000000000000151465053734400160020ustar00rootroot00000000000000__pycache__/ colcon-core-0.17.1/LICENSE000066400000000000000000000261361465053734400150330ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. colcon-core-0.17.1/README.rst000066400000000000000000000005541465053734400155110ustar00rootroot00000000000000colcon - collective construction ================================ ``colcon`` is a command line tool to improve the workflow of building, testing and using multiple software packages. It automates the process, handles the ordering and sets up the environment to use the packages. For more information see `colcon.readthedocs.io `_. colcon-core-0.17.1/bin/000077500000000000000000000000001465053734400145665ustar00rootroot00000000000000colcon-core-0.17.1/bin/colcon000077500000000000000000000124011465053734400157670ustar00rootroot00000000000000#!/usr/bin/env python3 # Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 """ This script is only used for bootstrapping and building this package. The installed command is generated by the `console_scripts` entry point. """ import os import sys # add this package to the Python path pkg_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) sys.path.insert(0, pkg_root) # override entry point discovery from colcon_core import extension_point # noqa: E402 custom_extension_points = {} def custom_load_extension_points( # noqa: D103 group_name, *, excludes=None ): global custom_extension_points assert group_name in custom_extension_points, \ f"get_extension_points() not overridden for group '{group_name}'" return { k: v for k, v in custom_extension_points[group_name].items() if excludes is None or k not in excludes} # override function before importing other modules extension_point.load_extension_points = custom_load_extension_points from colcon_core.command import HOME_ENVIRONMENT_VARIABLE # noqa: E402 I202 from colcon_core.command import LOG_LEVEL_ENVIRONMENT_VARIABLE # noqa: E402 from colcon_core.environment.path import PathEnvironment # noqa: E402 from colcon_core.environment.path \ import PythonScriptsPathEnvironment # noqa: E402 from colcon_core.environment.pythonpath \ import PythonPathEnvironment # noqa: E402 from colcon_core.event_handler.console_direct \ import ConsoleDirectEventHandler # noqa: E402 from colcon_core.event_handler.console_start_end \ import ConsoleStartEndEventHandler # noqa: E402 from colcon_core.event_handler.log_command \ import LogCommandEventHandler # noqa: E402 from colcon_core.executor \ import DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE # noqa: E402 from colcon_core.executor.sequential import SequentialExecutor # noqa: E402 from colcon_core.extension_point \ import EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE # noqa: E402 from colcon_core.package_augmentation.python \ import PythonPackageAugmentation # noqa: E402 from colcon_core.package_discovery.path \ import PathPackageDiscovery # noqa: E402 from colcon_core.package_identification.ignore \ import IgnorePackageIdentification # noqa: E402 from colcon_core.package_identification.python \ import PythonPackageIdentification # noqa: E402 from colcon_core.prefix_path.colcon import ColconPrefixPath # noqa: E402 from colcon_core.shell import ALL_SHELLS_ENVIRONMENT_VARIABLE # noqa: E402 from colcon_core.shell.bat import BatShell # noqa: E402 from colcon_core.shell.dsv import DsvShell # noqa: E402 from colcon_core.shell.sh import ShShell # noqa: E402 from colcon_core.task.python.build import PythonBuildTask # noqa: E402 from colcon_core.task.python.test import PythonTestTask # noqa: E402 from colcon_core.task.python.test.pytest \ import PytestPythonTestingStep # noqa: E402 from colcon_core.task.python.test.setuppy_test \ import SetuppyPythonTestingStep # noqa: E402 from colcon_core.verb.build import BuildVerb # noqa: E402 from colcon_core.verb.test import TestVerb # noqa: E402 custom_extension_points.update({ 'colcon_core.argument_parser': {}, 'colcon_core.environment': { 'path': PathEnvironment, 'pythonscriptspath': PythonScriptsPathEnvironment, 'pythonpath': PythonPathEnvironment, }, 'colcon_core.environment_variable': { 'all_shells': ALL_SHELLS_ENVIRONMENT_VARIABLE, 'default_executor': DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE, 'extension_blocklist': EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE, 'home': HOME_ENVIRONMENT_VARIABLE, 'log_level': LOG_LEVEL_ENVIRONMENT_VARIABLE, }, 'colcon_core.event_handler': { 'console_direct': ConsoleDirectEventHandler, 'console_start_end': ConsoleStartEndEventHandler, 'log_command': LogCommandEventHandler, }, 'colcon_core.executor': { 'sequential': SequentialExecutor, }, 'colcon_core.extension_point': { # there is no point in registering the extension_point extensions here # since they can't be queried through pkg_resources without installing }, 'colcon_core.package_augmentation': { 'python': PythonPackageAugmentation, }, 'colcon_core.package_discovery': { 'path': PathPackageDiscovery, }, 'colcon_core.package_identification': { 'ignore': IgnorePackageIdentification, 'python': PythonPackageIdentification, }, 'colcon_core.package_selection': {}, 'colcon_core.prefix_path': { 'colcon': ColconPrefixPath, }, 'colcon_core.python_testing': { 'pytest': PytestPythonTestingStep, 'setuppy_test': SetuppyPythonTestingStep, }, 'colcon_core.shell': { 'bat': BatShell, 'dsv': DsvShell, 'sh': ShShell, }, 'colcon_core.shell.find_installed_packages': { # Not essential for bootstrapping }, 'colcon_core.task.build': { 'python': PythonBuildTask, }, 'colcon_core.task.test': { 'python': PythonTestTask, }, 'colcon_core.verb': { 'build': BuildVerb, 'test': TestVerb, }, }) from colcon_core.command import main # noqa: E402 I100 I202 if __name__ == '__main__': sys.exit(main() or 0) colcon-core-0.17.1/codecov.yml000066400000000000000000000002221465053734400161570ustar00rootroot00000000000000coverage: status: project: default: informational: true patch: default: only_pulls: true target: 0% colcon-core-0.17.1/colcon/000077500000000000000000000000001465053734400152735ustar00rootroot00000000000000colcon-core-0.17.1/colcon/__init__.py000066400000000000000000000000001465053734400173720ustar00rootroot00000000000000colcon-core-0.17.1/colcon/__main__.py000066400000000000000000000004061465053734400173650ustar00rootroot00000000000000# Copyright 2010 Dirk Thomas # Licensed under the Apache License, Version 2.0 # This module exists for multi-Python environments # so the user can invoke colcon as `python -m colcon` import sys import colcon_core.command sys.exit(colcon_core.command.main()) colcon-core-0.17.1/colcon_core/000077500000000000000000000000001465053734400163035ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/__init__.py000066400000000000000000000001531465053734400204130ustar00rootroot00000000000000# Copyright 2016-2020 Dirk Thomas # Licensed under the Apache License, Version 2.0 __version__ = '0.17.1' colcon-core-0.17.1/colcon_core/argument_default.py000066400000000000000000000037761465053734400222200ustar00rootroot00000000000000# Copyright 2020 Dirk Thomas # Licensed under the Apache License, Version 2.0 class BooleanDefaultValue: """Boolean value distinguishable from explicitly passed arguments.""" def __init__(self, value): # noqa: D107 assert isinstance(value, bool) self._value = value def __bool__(self): # noqa: D105 return self._value class ListDefaultValue(list): """List value distinguishable from explicitly passed arguments.""" pass class StringDefaultValue(str): """String value distinguishable from explicitly passed arguments.""" pass _types = { bool: BooleanDefaultValue, list: ListDefaultValue, str: StringDefaultValue, } def wrap_default_value(value): """ Wrap a default value in a custom type. The custom type makes the default value distinguishable from explicitly passed arguments independent of the value. The custom types try to match the behavior of the original type as much as possible. :param value: The default value to be wrapped :returns: The wrapped value if the value type is supported, otherwise the passed value """ global _types if is_default_value(value): raise ValueError( 'the passed value is already wrapped: ' + str(type(value))) if type(value) in _types: return _types[type(value)](value) return value def unwrap_default_value(value): """ Unwrap a default value to the native type. :param value: The wrapped default value :returns: The unwrapped value :raises ValueError: if the value is not a wrapped default value """ global _types if not is_default_value(value): raise ValueError( 'the passed value is not a wrapped default value: ' + str(type(value))) type_ = [k for k, v in _types.items() if v == type(value)][0] return type_(value) def is_default_value(value): """Check if a value is a default value.""" global _types return isinstance(value, tuple(_types.values())) colcon-core-0.17.1/colcon_core/argument_parser/000077500000000000000000000000001465053734400215015ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/argument_parser/__init__.py000066400000000000000000000175421465053734400236230ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import traceback import types import warnings from colcon_core.generic_decorator import GenericDecorator from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority logger = colcon_logger.getChild(__name__) class ArgumentParserDecoratorExtensionPoint: """ The interface for argument parser decorator extensions. An argument parser decorator extension performs additional functionality when adding command line arguments. For each instance the attribute `ARGUMENT_PARSER_DECORATOR_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the argument parser decorator extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of argument parser decorator extensions.""" PRIORITY = 100 def decorate_argument_parser(self, *, parser): """ Decorate an argument parser to perform additional functionality. This method must be overridden in a subclass. :param parser: The argument parser :returns: A decorator """ raise NotImplementedError() def get_argument_parser_extensions(*, group_name=None): """ Get the available argument parser extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.ARGUMENT_PARSER_DECORATOR_NAME = name return order_extensions_by_priority(extensions) def decorate_argument_parser(parser): """ Decorate the parser using the available argument parser extensions. :param parser: The argument parser :returns: The decorated parser """ extensions = get_argument_parser_extensions() for extension in extensions.values(): logger.log( 1, 'decorate_argument_parser() %s', extension.ARGUMENT_PARSER_DECORATOR_NAME) try: decorated_parser = extension.decorate_argument_parser( parser=parser) assert hasattr(decorated_parser, 'add_argument'), \ 'decorate_argument_parser() should return a parser like object' except Exception as e: # noqa: F841 # catch exceptions raised in decorator extension exc = traceback.format_exc() logger.error( 'Exception in argument parser decorator extension ' f"'{extension.ARGUMENT_PARSER_DECORATOR_NAME}': {e}\n{exc}") # skip failing extension, continue with next one else: parser = decorated_parser return parser class ArgumentParserDecorator(GenericDecorator): """ Decorate an argument parser as well as all recursive subparsers. The methods and arguments are the same as :class:`argparse.ArgumentParser`. Subclasses can perform any kind of task when e.g. arguments are being added without being concerned in which part of the hierarchy it is added. Subclasses should not set any member variables directly but pass them as keyword arguments to the constructor. """ def __init__(self, parser, **kwargs): """ Decorate an argument parser. :param parser: The argument parser to decorate :param **kwargs: The keyword arguments are set as attributes on this instance """ assert '_parser' not in kwargs assert '_nested_decorators_' not in kwargs kwargs['_nested_decorators_'] = [] assert '_group_decorators' not in kwargs kwargs['_group_decorators'] = [] assert '_recursive_decorators' not in kwargs kwargs['_recursive_decorators'] = [] super().__init__(parser, **kwargs) @property def _nested_decorators(self): # pragma: no cover warnings.warn( 'colcon_core.argument_parser.ArgumentParserDecorator.' '_nested_decorators is a private variable and has been ' 'deprecated', stacklevel=2) return self._nested_decorators_ @property def _parser(self): """ Get the parser that this instance decorates (the decoree). TODO: Deprecate _parser in favor of _decoree """ return self._decoree def add_argument(self, *args, **kwargs): """ Add an argument. See :class:`argparse.ArgumentParser.add_argument` for the method arguments and return value. """ return self._parser.add_argument(*args, **kwargs) def add_argument_group(self, *args, **kwargs): """ Decorate group parser before adding. See :class:`argparse.ArgumentParser.add_argument_group` for the method arguments and return value. """ group = self.__class__( self._parser.add_argument_group(*args, **kwargs)) self._nested_decorators_.append(group) self._group_decorators.append(group) return group def add_mutually_exclusive_group(self, *args, **kwargs): """ Decorate mutually exclusive group parser before adding. See :class:`argparse.ArgumentParser.add_mutually_exclusive_group` for the method arguments and return value. """ group = self.__class__( self._parser.add_mutually_exclusive_group(*args, **kwargs)) self._nested_decorators_.append(group) self._group_decorators.append(group) return group def add_parser(self, *args, **kwargs): """ Decorate parser before adding. See :class:`argparse.ArgumentParser.add_parser` for the method arguments and return value. """ parser = self.__class__( self._parser.add_parser(*args, **kwargs)) self._nested_decorators_.append(parser) self._recursive_decorators.append(parser) return parser def add_subparsers(self, *args, **kwargs): """ Decorate subparser before adding. See :class:`argparse.ArgumentParser.add_subparsers` for the method arguments and return value. """ subparser = self.__class__( self._parser.add_subparsers(*args, **kwargs)) self._nested_decorators_.append(subparser) self._recursive_decorators.append(subparser) return subparser def register(self, *args, **kwargs): """ Register a type converter. See :class:`argparse.ArgumentParser.register` for the method arguments and return value. """ return self._parser.register(*args, **kwargs) class SuppressUsageOutput: """Context manager to suppress help action during `parse_known_args`.""" def __init__(self, parsers): """ Construct a SuppressUsageOutput. :param parsers: The parsers """ self._parsers = parsers self._callbacks = {} def __enter__(self): # noqa: D105 for p in self._parsers: self._callbacks[p] = p.print_help, p.exit # temporary prevent printing usage early if help is requested p.print_help = lambda: None # temporary prevent help action to exit early, # but keep exiting on invalid arguments p.exit = types.MethodType(_ignore_zero_exit(p.exit), p) return self def __exit__(self, *args): # noqa: D105 for p, callbacks in self._callbacks.items(): p.print_help, p.exit = callbacks def _ignore_zero_exit(original_exit_handler): def exit_(self, status=0, message=None): nonlocal original_exit_handler if status == 0: return return original_exit_handler(status=status, message=message) return exit_ colcon-core-0.17.1/colcon_core/argument_parser/action_collector.py000066400000000000000000000073641465053734400254100ustar00rootroot00000000000000# Copyright 2022 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from colcon_core.argument_parser import ArgumentParserDecorator class ActionCollectorDecorator(ArgumentParserDecorator): """Collect actions for all added arguments.""" def __init__(self, parser, **kwargs): """ Construct a ActionCollectorDecorator. :param parser: The argument parser to decorate """ # avoid setting members directly, the base class overrides __setattr__ # pass them as keyword arguments instead super().__init__( parser, _collected_actions=set(), **kwargs) def get_collected_actions(self): """ Get actions for all added arguments. :returns: The argument actions :rtype: dict """ return set(self._collected_actions) def add_argument(self, *args, **kwargs): """Collect actions for all added arguments.""" action = super().add_argument(*args, **kwargs) self._collected_actions.add(action) return action class SuppressRequiredActions: """ Context manager to suppress required actions during `parse_known_args`. This works only with parsers decorated with :class:`ActionCollectorDecorator`. It operates by temporarily setting the ``required`` attribute on all actions to ``False``, then restores the original behavior when exiting. """ def __init__(self, parsers, actions_to_omit=None): """ Construct a SuppressRequiredActions. :param parsers: The parsers :param actions_to_omit: A collection of actions which should not be suppressed """ self._parsers = parsers self._suppressed_actions = set() self._actions_to_omit = actions_to_omit or set() def __enter__(self): # noqa: D105 for p in self._parsers: if not issubclass(type(p), ActionCollectorDecorator): continue for action in p.get_collected_actions(): if not action.required: continue if action in self._actions_to_omit: continue self._suppressed_actions.add(action) action.required = False def __exit__(self, *args): # noqa: D105 for action in self._suppressed_actions: action.required = True class SuppressTypeConversions: """ Context manager to suppress type conversions during `parse_known_args`. This works only with parsers decorated with :class:`ActionCollectorDecorator`. It operates by temporarily setting the ``type`` attribute on all actions to ``None``, then restores the original conversion when exiting. """ def __init__(self, parsers, actions_to_omit=None): """ Construct a SuppressTypeConversions. :param parsers: The parsers :param actions_to_omit: A collection of actions which should not be suppressed """ self._parsers = parsers self._suppressed_actions = {} self._actions_to_omit = actions_to_omit or set() def __enter__(self): # noqa: D105 for p in self._parsers: if not issubclass(type(p), ActionCollectorDecorator): continue for action in p.get_collected_actions(): if action.type in (None, str): continue if action in self._actions_to_omit: continue self._suppressed_actions[action] = action.type action.type = None def __exit__(self, *args): # noqa: D105 for action, original_type in self._suppressed_actions.items(): action.type = original_type colcon-core-0.17.1/colcon_core/argument_parser/destination_collector.py000066400000000000000000000035171465053734400264500ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict from colcon_core.argument_parser import ArgumentParserDecorator class DestinationCollectorDecorator(ArgumentParserDecorator): """Collect the option names and destination of arguments.""" def __init__(self, parser, **kwargs): """ Construct a DestinationCollectorDecorator. :param parser: The argument parser to decorate """ # avoid setting members directly, the base class overrides __setattr__ # pass them as keyword arguments instead super().__init__( parser, _destinations=OrderedDict(), **kwargs) def get_destinations(self, *, recursive=True): """ Get destinations for all added arguments. :param bool recursive: The flag if destinations from added parsers / subparsers should be included, destinations from grouped arguments are always included :returns: The destination names :rtype: OrderedDict """ destinations = OrderedDict() destinations.update(self._destinations) for d in self._group_decorators: destinations.update(d.get_destinations()) if recursive: for d in self._recursive_decorators: destinations.update(d.get_destinations()) return destinations def add_argument(self, *args, **kwargs): """Collect option names and destination for all added arguments.""" argument = super().add_argument(*args, **kwargs) for arg in args: if not arg.startswith('-'): continue key = arg.lstrip('-') assert key not in self._destinations self._destinations[key] = argument.dest return argument colcon-core-0.17.1/colcon_core/argument_parser/type_collector.py000066400000000000000000000065561465053734400251160ustar00rootroot00000000000000# Copyright 2021 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import warnings from colcon_core.argument_parser import ArgumentParserDecorator class TypeCollectorDecorator(ArgumentParserDecorator): """Collect the type conversions of arguments.""" warnings.warn( "'colcon_core.argument_parser.type_collector." "TypeCollectorDecorator' has been deprecated", stacklevel=2) def __init__(self, parser, **kwargs): """ Construct a TypeCollectorDecorator. :param parser: The argument parser to decorate """ # avoid setting members directly, the base class overrides __setattr__ # pass them as keyword arguments instead super().__init__( parser, _registered_types={}, **kwargs) def get_types(self): """ Get types for all added arguments. :returns: The registered type converters :rtype: dict """ return dict(self._registered_types) def add_argument(self, *args, **kwargs): """Collect type converters for all added arguments.""" ret = super().add_argument(*args, **kwargs) type_arg = kwargs.get('type') if type_arg is not None: self._registered_types.setdefault(type_arg, type_arg) return ret def register(self, *args, **kwargs): """Collect explicitly added type converters.""" ret = super().register(*args, **kwargs) registry_name_arg = kwargs.get('registry_name', args[0]) if registry_name_arg == 'type': value_arg = kwargs.get('value', args[1]) object_arg = kwargs.get('object', args[2]) self._registered_types[value_arg] = object_arg return ret class SuppressTypeConversions: """ Context manager to suppress type conversions during `parse_known_args`. This works only with parsers decorated with :class:`TypeCollectorDecorator`. It operates by registering a no-op conversion function (`str()`) in place of the original conversion, then restores the original conversion when exiting. """ warnings.warn( "'colcon_core.argument_parser.type_collector." "SuppressTypeConversions' has been deprecated, use 'colcon_core." "argument_parser.action_collector.SuppressTypeConversions' " 'instead', stacklevel=2) def __init__(self, parsers, types_to_omit=None): """ Construct a SuppressTypeConversions. :param parsers: The parsers :param types_to_omit: A collection of types which should not be suppressed """ self._parsers = parsers self._suppressed_types = {} self._types_to_omit = types_to_omit or set() def __enter__(self): # noqa: D105 for p in self._parsers: if not issubclass(type(p), TypeCollectorDecorator): continue self._suppressed_types[p] = {} for v, o in getattr(p, '_registered_types', {}).items(): if v in self._types_to_omit: continue p._parser.register('type', v, str) self._suppressed_types[p][v] = o def __exit__(self, *args): # noqa: D105 for p, types in self._suppressed_types.items(): for v, o in types.items(): p.register('type', v, o) colcon-core-0.17.1/colcon_core/argument_type.py000066400000000000000000000021011465053734400215320ustar00rootroot00000000000000# Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import functools import os from colcon_core.argument_default import is_default_value def resolve_path(value, base=os.getcwd()): """ Resolve a path argument from the current directory. If the given value is an argument default, the value is returned unmodified. :param value: The value to resolve to an absolute path :returns: The unmodified value, or resolved path """ if value is None or is_default_value(value): return value res = os.path.abspath(os.path.join(base, str(value))) return res def get_cwd_path_resolver(): """ Create a function which resolves paths from the current directory. If the current directory changes between calling this function and calling the function returned by this function, the directory at the time of this function call is used. :returns: A function which takes a single string and returns a string """ return functools.partial(resolve_path, base=os.getcwd()) colcon-core-0.17.1/colcon_core/command.py000066400000000000000000000534061465053734400203030ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse import datetime import logging import os from pathlib import Path import shutil import signal import sys import traceback from colcon_core.environment_variable import EnvironmentVariable # a custom environment variable is necessary since PYTHONWARNINGS doesn't # support passing a regular expression for the module entry # see https://bugs.python.org/issue34624 """Environment variable to set the warnings filter for colcon modules""" WARNINGS_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_WARNINGS', 'Set the warnings filter similar to PYTHONWARNINGS except that the module ' "entry is implicitly set to 'colcon.*'") warnings_filters = os.environ.get(WARNINGS_ENVIRONMENT_VARIABLE.name) if warnings_filters: import warnings # filters are separated by commas for f in warnings_filters.split(','): # fields are separated by colons fields = f.split(':', 4) if len(fields) < 5: fields += [''] * (5 - len(fields)) action, message, category, module, line = fields try: category = warnings._getcategory(category) except Exception: # noqa: B902 print( f"The category field '{category}' must be a valid warnings " 'class name', file=sys.stderr) sys.exit(1) if module: print( 'The module field of the ' f'{WARNINGS_ENVIRONMENT_VARIABLE.name} filter should be ' 'empty, otherwise use PYTHONWARNINGS instead', file=sys.stderr) sys.exit(1) warnings.filterwarnings( action, message=message, category=category or Warning, module='colcon.*', lineno=line if line else 0) from colcon_core.argument_parser import decorate_argument_parser # noqa: E402 E501 I100 I202 from colcon_core.argument_parser import SuppressUsageOutput # noqa: E402 from colcon_core.extension_point import load_extension_points # noqa: E402 from colcon_core.feature_flags import check_implemented_flags # noqa: E402 from colcon_core.location import create_log_path # noqa: E402 from colcon_core.location import get_log_path # noqa: E402 from colcon_core.location import set_default_config_path # noqa: E402 from colcon_core.location import set_default_log_path # noqa: E402 from colcon_core.logging import add_file_handler # noqa: E402 from colcon_core.logging import colcon_logger # noqa: E402 from colcon_core.logging import get_numeric_log_level # noqa: E402 from colcon_core.logging import set_logger_level_from_env # noqa: E402 from colcon_core.plugin_system import get_first_line_doc # noqa: E402 from colcon_core.verb import get_verb_extensions # noqa: E402 """Environment variable to set the log level""" LOG_LEVEL_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_LOG_LEVEL', 'Set the log level (debug|10, info|20, warn|30, error|40, critical|50, or ' 'any other positive numeric value)') """Environment variable to set the configuration directory""" HOME_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_HOME', 'Set the configuration directory (default: ~/.colcon)') _command_exit_handlers = [] def register_command_exit_handler(handler): """ Register a callable to be invoked after the command finished. Repeated registrations of the same handler are ignored. :param handler: The callable """ if handler not in _command_exit_handlers: _command_exit_handlers.append(handler) def main( *, command_name='colcon', argv=None, verb_group_name=None, environment_variable_group_name=None, default_verb=None, ): """ Execute the main logic of the command. The overview of the process: * Configure logging level based on an environment variable * Configure the configuration path * Create the argument parser * Document all environment variables * Decorate the parsers with additional functionality * Add the available verbs and their arguments * Configure logging level based on an arguments * Create an invocation specific log directory * Invoke the logic of the selected verb (if applicable) * Invoke registered exit handlers in reverse order :param str command_name: The name of the command invoked :param list argv: The list of arguments :param str verb_group_name: The extension point group name for verbs :param str environment_variable_group_name: The extension point group name for environment variables :param Type default_verb: The verb class type to invoke if no explicit verb was provided on the command line :returns: The return code """ try: return _main( command_name=command_name, argv=argv, verb_group_name=verb_group_name, environment_variable_group_name=environment_variable_group_name, default_verb=default_verb) except KeyboardInterrupt: return signal.SIGINT finally: # invoke all exit handlers while _command_exit_handlers: handler = _command_exit_handlers.pop() handler() def _main( *, command_name, argv, verb_group_name, environment_variable_group_name, default_verb ): # default log level, for searchability: COLCON_LOG_LEVEL colcon_logger.setLevel(logging.WARNING) set_logger_level_from_env( colcon_logger, f'{command_name}_LOG_LEVEL'.upper()) colcon_logger.debug( 'Command line arguments: {argv}' .format(argv=argv if argv is not None else sys.argv)) # warn about any specified feature flags that are already implemented check_implemented_flags() # set default locations for config files, for searchability: COLCON_HOME set_default_config_path( path=(Path('~') / f'.{command_name}').expanduser(), env_var=f'{command_name}_HOME'.upper()) parser = create_parser(environment_variable_group_name) if default_verb is not None: default_verb_instance = default_verb() parser.set_defaults( verb_parser=parser, verb_extension=default_verb_instance, main=default_verb_instance.main) add_parser_arguments(parser, default_verb_instance) verb_extensions = get_verb_extensions(group_name=verb_group_name) # add subparsers for all verb extensions but without arguments for now subparser = create_subparser( parser, command_name, verb_extensions, attribute='verb_name') verb_parsers = add_parsers_without_arguments( parser, subparser, verb_extensions, attribute='verb_name') with SuppressUsageOutput([parser] + list(verb_parsers.values())): known_args, _ = parser.parse_known_args(args=argv) # add the arguments for the requested verb if known_args.verb_name is not None: add_parser_arguments(known_args.verb_parser, known_args.verb_extension) args = parser.parse_args(args=argv) context = CommandContext(command_name=command_name, args=args) if args.log_level: # the value might be provided externally and needs to be checked again colcon_logger.setLevel(get_numeric_log_level(args.log_level)) colcon_logger.debug(f'Parsed command line arguments: {args}') # verify that one of the verbs set the 'main' attribute to be invoked later if getattr(args, 'main', None) is None: print(parser.format_usage()) return 'Error: No verb provided' # set default locations for log files, for searchability: COLCON_LOG_PATH now = datetime.datetime.now() now_str = str(now)[:-7].replace(' ', '_').replace(':', '-') if args.verb_name is None: subdirectory = now_str else: subdirectory = f'{args.verb_name}_{now_str}' set_default_log_path( base_path=args.log_base, env_var=f'{command_name}_LOG_PATH'.upper(), subdirectory=subdirectory) # add a file handler writing all levels if logging isn't disabled log_path = get_log_path() if log_path is not None: create_log_path(args.verb_name) handler = add_file_handler( colcon_logger, log_path / 'logger_all.log') # write previous log messages to the file handler log_record = colcon_logger.makeRecord( colcon_logger.name, logging.DEBUG, __file__, 0, 'Command line arguments: {argv}' .format(argv=argv if argv is not None else sys.argv), None, None) handler.handle(log_record) log_record = colcon_logger.makeRecord( colcon_logger.name, logging.DEBUG, __file__, 0, f'Parsed command line arguments: {args}', None, None) handler.handle(log_record) # set an environment variable named after the command (if not already set) # which allows subprocesses to identify they are invoked by this command if command_name.upper() not in os.environ: os.environ[command_name.upper()] = '1' # invoke verb return verb_main(context, colcon_logger) def create_parser(environment_variables_group_name=None): """ Create the argument parser. It uses a custom raw description help formatter to maintain newlines. It uses the available argument parser extensions to decorate the parsers. It enumerates the registered environment variables in the epilog of the help message. :param str environment_variables_group_name: The entry point group name for the environment variable extensions :returns: The argument parser """ # workaround a limitation in argparse to accept arguments to options # which begin with a dash but are not options themselves # https://bugs.python.org/issue9334 class CustomArgumentParser(argparse.ArgumentParser): def _parse_optional(self, arg_string): result = super()._parse_optional(arg_string) # Up until https://github.com/python/cpython/pull/114180 , # _parse_optional() returned a 3-tuple when it couldn't classify # the option. As of that PR (which is in Python 3.13, and # backported to Python 3.12), it returns a 4-tuple. Check for # either here. if result in ( (None, arg_string, None), (None, arg_string, None, None), ): # in the case there the arg is classified as an unknown 'O' # override that and classify it as an 'A' return None return result epilog = get_environment_variables_epilog(environment_variables_group_name) if epilog: epilog += '\n\n' epilog += READTHEDOCS_MESSAGE # top level parser parser = CustomArgumentParser( prog=get_prog_name(), formatter_class=CustomFormatter, epilog=epilog) # enable introspecting and intercepting all command line arguments parser = decorate_argument_parser(parser) add_log_level_argument(parser) return parser def get_prog_name(): """Get the prog name used for the argparse parser.""" prog = sys.argv[0] basename = os.path.basename(prog) if basename == '__main__.py': # use the module name in case the script was invoked with python -m ... prog = os.path.basename(os.path.dirname(prog)) else: default_prog = shutil.which(basename) or '' default_ext = os.path.splitext(default_prog)[1] real_prog = prog if ( sys.platform == 'win32' and os.path.splitext(real_prog)[1] != default_ext ): # On Windows, setuptools entry points drop the file extension from # argv[0], but shutil.which does not. If the two don't end in the # same extension, try appending the shutil extension for a better # chance at matching. real_prog += default_ext try: # The os.path.samefile requires that both files exist on disk, but # has the advantage of working around symlinks, UNC-style paths, # DOS 8.3 path atoms, and path normalization. if os.path.samefile(default_prog, real_prog): # use basename only if it is on the PATH prog = basename except (FileNotFoundError, NotADirectoryError): pass return prog class CustomFormatter(argparse.RawDescriptionHelpFormatter): """A custom formatter to maintain newlines.""" def _split_lines(self, text, width): """Maintain newlines when the text starts with 'r|'.""" lines = [] for line in text.splitlines(): if len(line) <= width: lines.append(line) else: lines += super()._split_lines(line, width) return lines def get_environment_variables_epilog(group_name=None): """ Get a message enumerating the registered environment variables. :param str group_name: The entry point group name for the environment variable extensions :returns: The message for the argument parser epilog :rtype: str """ if group_name is None: group_name = 'colcon_core.environment_variable' # list environment variables with descriptions entry_points = load_extension_points(group_name) if not entry_points: return '' env_vars = { env_var.name: env_var.description for env_var in entry_points.values()} epilog_lines = [] for name in sorted(env_vars.keys()): epilog_lines += _format_pair(name, env_vars[name], indent=2, align=24) return 'Environment variables:\n' + '\n'.join(epilog_lines) READTHEDOCS_MESSAGE = 'For more help and usage tips, see ' \ 'https://colcon.readthedocs.io' def add_log_level_argument(parser): """ Add the argument for the log level to the parser. :param parser: The argument parser """ parser.add_argument( '--log-base', help='The base path for all log directories (default: ./log, to ' f'disable: {os.devnull})') parser.add_argument( '--log-level', action=LogLevelAction, help='Set log level for the console output, either by numeric or ' 'string value (default: warning)') class LogLevelAction(argparse.Action): """Accept either positive integers or known log level names.""" def __init__(self, option_strings, dest, *, nargs=None, **kwargs): """See :class:`argparse.Action.__init__`.""" if nargs is not None: # pragma: no cover raise ValueError('nargs not allowed') super().__init__(option_strings, dest, **kwargs) def __call__(self, parser, namespace, values, option_string=None): """See :class:`argparse.Action.__call__`.""" try: value = get_numeric_log_level(values) except ValueError as e: # noqa: F841 parser.error(f'{option_string} has unsupported value, {e}') setattr(namespace, self.dest, value) def add_subparsers(parser, cmd_name, verb_extensions, *, attribute): """ Create argparse subparsers for each verb. The `cmd_name` is used for the title and description of the argparse `add_subparsers` function call. For each verb extension a subparser is created. If the extension has an `add_arguments` method it is being called with the subparser being passed as the only argument. :param parser: The argument parser for this command :param str cmd_name: The name of the command to which the verbs are being added :param dict verb_extensions: The verb extensions indexed by the verb name :param str attribute: The name of the attribute in the parsed args for the selected verb """ subparser = create_subparser( parser, cmd_name, verb_extensions, attribute=attribute) verb_parsers = add_parsers_without_arguments( parser, subparser, verb_extensions, attribute=attribute) for name, verb_parser in verb_parsers.items(): add_parser_arguments( verb_parser, verb_extensions[name]) def create_subparser(parser, cmd_name, verb_extensions, *, attribute): """ Create the special action object to contain subparsers. The `cmd_name` is used for the title and description of the argparse `add_subparsers` function call. :param parser: The argument parser for this command :param str cmd_name: The name of the command to which the verbs are being added :param dict verb_extensions: The verb extensions indexed by the verb name :param str attribute: The name of the attribute in the parsed args for the selected verb :returns: The special action object """ # list of available verbs with their descriptions verbs = [] for name, extension in verb_extensions.items(): verbs += _format_pair( name, get_first_line_doc(extension), indent=0, align=22) # add subparser with description of verb extensions subparser = parser.add_subparsers( title=f'{cmd_name} verbs', description='\n'.join(verbs) or None, dest=attribute, help=( f'call `{cmd_name} VERB -h` for specific help' if verbs else argparse.SUPPRESS), ) return subparser def add_parsers_without_arguments( parser, subparser, verb_extensions, *, attribute ): """ Create subparsers for each verb but without any arguments. For each verb extension a subparser is created. :param parser: The argument parser for this command :param subparser: The special action object to add the subparsers to :param dict verb_extensions: The verb extensions indexed by the verb name :param str attribute: The name of the attribute in the extension containing the verb :returns: The subparsers indexed by the verb name :rtype: dict """ verb_parsers = {} # add verb specific group and arguments for name, extension in verb_extensions.items(): verb_parser = subparser.add_parser( getattr(extension, attribute.upper()), description=get_first_line_doc(extension) + '.', formatter_class=parser.formatter_class, ) verb_parser.set_defaults( verb_parser=verb_parser, verb_extension=extension, main=extension.main) verb_parsers[name] = verb_parser return verb_parsers def add_parser_arguments(verb_parser, extension): """ Add the arguments and recursive subparsers to a specific verb parser. If the extension has an `add_arguments` method it is being called with the subparser being passed as the only argument. :param verb_parser: The verb parser :param extension: The verb extension """ if hasattr(extension, 'add_arguments'): retval = extension.add_arguments(parser=verb_parser) if retval is not None: colcon_logger.error( f"Exception in verb extension '{extension.VERB_NAME}': " 'add_arguments() should return None') def _format_pair(key, value, *, indent, align): """ Format a key value pair to align with others printed by argparse. :param str key: The key :param str value: The value :param int indent: The indentation level of the key :param int align: The indentation level of the value :returns: The indented and potentially wrapped line(s) :rtype: str """ lines = [] prefix = ' ' * indent + key # wrap between key and value if the gap between them is smaller than this minimum_gap = 2 if len(prefix) + minimum_gap <= align: # key fits in the same line as the value lines.append(prefix + ' ' * (align - len(prefix))) else: # the key is too long, the value needs to start on the next line lines.append(prefix) lines.append(' ' * align) # wrap between key and value if the gap between them is smaller than this maximum_line_length = 80 maximum_value_length = maximum_line_length - align while value: if len(value) > maximum_value_length: try: # look for a space within the desired length i = value.rindex(' ', 0, maximum_value_length) except ValueError: # no space to wrap, just append everything in a single line pass else: # append part to last line lines[-1] += value[0:i] value = value[i + 1:].lstrip() # start a new line with the spaces for the alignment lines.append(' ' * align) continue # either the remaining value is short enough or no space was found lines[-1] += value break return lines class CommandContext: """The context providing the command name and the parsed arguments.""" __slots__ = ('command_name', 'args') def __init__(self, *, command_name: str, args: object): # noqa: D107 self.command_name = command_name self.args = args def verb_main(context, logger): """ Invoke the logic of the selected verb. If the invocation is interrupted the returned error code is `signal.SIGINT`. If the verb raises a `RuntimeException` an error message is logged which contains the message of the exception. For any other exception a traceback is included in the logged error message. :param context: The :class:`CommandContext` :param logger: The logger :returns: The return code """ # call the extension's main method try: # catch exceptions raised in verb extension rc = context.args.main(context=context) except RuntimeError as e: # noqa: F841 # only log the error message for "known" exceptions logger.error(f'{context.command_name} {context.args.verb_name}: {e}') return 1 except Exception as e: # noqa: F841 # log the error message and a traceback for "unexpected" exceptions exc = traceback.format_exc() logger.error( f'{context.command_name} {context.args.verb_name}: {e}\n{exc}') return 1 return rc colcon-core-0.17.1/colcon_core/dependency_descriptor.py000066400000000000000000000026151465053734400232350ustar00rootroot00000000000000# Copyright 2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import copy class DependencyDescriptor(str): """ A descriptor for a package dependency. Beside the name of the dependency the 'metadata' dictionary can store any additional information. A dependency is identified by its name. The 'metadata' dictionary can store any additional information. Currently the class is inheriting from str for backwards compatibility. You should not rely on this but use the `name` property instead. """ @staticmethod def __new__(cls, name, *, metadata=None): # noqa: D102 return str.__new__(cls, name) def __init__(self, name, *, metadata=None): # noqa: D107 self.metadata = metadata if metadata is not None else {} @property def name(self): """ Name of the dependency. The property exists for future compatibility when the base class is removed. :rtype: str """ return str(self) def __deepcopy__(self, memo=None): # noqa: D105 # surprisingly this is significantly faster than the default if not self.metadata: # explicitly skipping the deep copy of an empty dict is also faster return DependencyDescriptor(self.name) return DependencyDescriptor( self.name, metadata=copy.deepcopy(self.metadata, memo=memo)) colcon-core-0.17.1/colcon_core/distutils/000077500000000000000000000000001465053734400203275ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/distutils/__init__.py000066400000000000000000000000001465053734400224260ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/distutils/commands/000077500000000000000000000000001465053734400221305ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/distutils/commands/__init__.py000066400000000000000000000000001465053734400242270ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/distutils/commands/symlink_data.py000066400000000000000000000016051465053734400251630ustar00rootroot00000000000000# Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from distutils.command.install_data import install_data import os class symlink_data(install_data): # noqa: N801 """Like install_data, but symlink files instead of copying.""" def copy_file(self, src, dst, **kwargs): # noqa: D102 if kwargs.get('link'): return super().copy_file(src, dst, **kwargs) if self.force: # os.symlink fails if the destination exists as a regular file if os.path.isdir(dst): target = os.path.join(dst, os.path.basename(src)) else: target = dst if os.path.exists(dst) and not os.path.islink(dst): os.remove(target) kwargs['link'] = 'sym' src = os.path.abspath(src) return super().copy_file(src, dst, **kwargs) colcon-core-0.17.1/colcon_core/entry_point.py000066400000000000000000000147331465053734400212370ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import defaultdict import os import sys import traceback import warnings from colcon_core.environment_variable import EnvironmentVariable from colcon_core.logging import colcon_logger from pkg_resources import iter_entry_points from pkg_resources import WorkingSet """Environment variable to block extensions""" EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_EXTENSION_BLOCKLIST', 'Block extensions which should not be used') # See colcon/colcon-core#562 warnings.warn( "'colcon_core.entry_point' has been deprecated, " "use 'colcon_core.extension_point' instead", stacklevel=2) if sys.version_info[:2] >= (3, 7): def __getattr__(name): global EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE if name == 'EXTENSION_BLACKLIST_ENVIRONMENT_VARIABLE': warnings.warn( "'colcon_core.entry_point.EXTENSION_BLACKLIST_ENVIRONMENT_" "VARIABLE' has been deprecated, use 'colcon_core.entry_point." "EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE' instead", stacklevel=2) return EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE raise AttributeError( "module '%s' has no attribute '%s'" % (__name__, name)) else: # for backward compatibility but without a deprecation warning on usage EXTENSION_BLACKLIST_ENVIRONMENT_VARIABLE = \ EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE logger = colcon_logger.getChild(__name__) """ The group name for entry points identifying colcon extension points. While all entry points in this package start with `colcon_core.` other distributions might define entry points with a different prefix. Those need to be declared using this group name. """ EXTENSION_POINT_GROUP_NAME = 'colcon_core.extension_point' def get_all_entry_points(): """ Get all entry points related to `colcon` and any of its extensions. :returns: mapping of entry point names to :class:`pkg_resources.EntryPoint` instances :rtype: dict """ global EXTENSION_POINT_GROUP_NAME colcon_extension_points = get_entry_points(EXTENSION_POINT_GROUP_NAME) entry_points = defaultdict(dict) working_set = WorkingSet() for dist in sorted(working_set): entry_map = dist.get_entry_map() for group_name in entry_map.keys(): # skip groups which are not registered as extension points if group_name not in colcon_extension_points: continue group = entry_map[group_name] for entry_point_name, entry_point in group.items(): entry_point.group_name = group_name if entry_point_name in entry_points[group_name]: previous = entry_points[group_name][entry_point_name] logger.error( f"Entry point '{group_name}.{entry_point_name}' is " f"declared multiple times, '{entry_point}' " f"overwriting '{previous}'") entry_points[group_name][entry_point_name] = \ (dist, entry_point) return entry_points def get_entry_points(group_name): """ Get the entry points for a specific group. :param str group_name: the name of the `entry_point` group :returns: mapping of group names to dictionaries which map entry point names to :class:`pkg_resources.EntryPoint` instances :rtype: dict """ entry_points = {} for entry_point in iter_entry_points(group=group_name): entry_point.group_name = group_name if entry_point.name in entry_points: previous_entry_point = entry_points[entry_point.name] logger.error( f"Entry point '{group_name}.{entry_point.name}' is declared " f"multiple times, '{entry_point}' overwriting " f"'{previous_entry_point}'") entry_points[entry_point.name] = entry_point return entry_points def load_entry_points(group_name, *, exclude_names=None): """ Load the entry points for a specific group. :param str group_name: the name of the `entry_point` group :param iterable exclude_names: the names of the entry points to exclude :returns: mapping of entry point names to loaded entry points :rtype: dict """ extension_types = {} for entry_point in get_entry_points(group_name).values(): if exclude_names and entry_point.name in exclude_names: continue try: extension_type = load_entry_point(entry_point) except RuntimeError: continue except Exception as e: # noqa: F841 # catch exceptions raised when loading entry point exc = traceback.format_exc() logger.error( 'Exception loading extension ' f"'{group_name}.{entry_point.name}': {e}\n{exc}") # skip failing entry point, continue with next one continue extension_types[entry_point.name] = extension_type return extension_types def load_entry_point(entry_point): """ Load the entry point. :param entry_point: the :class:`pkg_resources.EntryPoint` instance :returns: the loaded entry point :raises RuntimeError: if either the group name or the entry point name is listed in the environment variable :const:`EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE` """ global EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE blocklist = os.environ.get( EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE.name, None) if blocklist is None: blocklist = os.environ.get('COLCON_EXTENSION_BLACKLIST', None) if blocklist is not None: warnings.warn( "The environment variable 'COLCON_EXTENSION_BLACKLIST' has " "been deprecated, use 'COLCON_EXTENSION_BLOCKLIST' instead") if blocklist: blocklist = blocklist.split(os.pathsep) env_var_name = EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE.name if entry_point.group_name in blocklist: raise RuntimeError( 'The entry point group name is listed in the environment ' f"variable '{env_var_name}'") full_name = f'{entry_point.group_name}.{entry_point.name}' if full_name in blocklist: raise RuntimeError( 'The entry point name is listed in the environment variable ' f"'{env_var_name}'") return entry_point.load() colcon-core-0.17.1/colcon_core/environment/000077500000000000000000000000001465053734400206475ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/environment/__init__.py000066400000000000000000000157201465053734400227650ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections.abc import Iterable import os from pathlib import Path import traceback from colcon_core.location import get_relative_package_index_path from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority from colcon_core.shell import get_shell_extensions logger = colcon_logger.getChild(__name__) class EnvironmentExtensionPoint: """ The interface for environment extensions. An environment extension creates environment hooks for a specific environment variable and uses the shell extensions to generate scripts for each supported shell. For each instance the attribute `ENVIRONMENT_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the environment extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of environment extensions.""" PRIORITY = 100 def create_environment_hooks(self, prefix_path, pkg_name): """ Create the environment hooks for a package. This method must be overridden in a subclass. :param prefix_path: The prefix path of the package :param pkg_name: The package name :returns: iterable of generated hook paths :rtype: Iterable """ raise NotImplementedError() def get_environment_extensions(*, group_name=None): """ Get the available environment extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name in list(extensions.keys()): extension = extensions[name] extension.ENVIRONMENT_NAME = name return order_extensions_by_priority(extensions) def create_environment_scripts( pkg, args, *, default_hooks=None, additional_hooks=None ): """ Create the environment scripts for a package. Also create a file with the runtime dependencies of each packages which can be used by the prefix scripts to source all packages in topological order. :param pkg: The package descriptor :param args: The parsed command line arguments :param list default_hooks: If none are parsed explicitly the hooks provided by :function:`create_environment_hooks` are used :param list additional_hooks: Any additional hooks which should be referenced by the generated scripts :returns: iterable of generated file paths :rtype: Iterable """ prefix_path = Path(args.install_base) files = [] files += create_environment_scripts_only( prefix_path, pkg, default_hooks=default_hooks, additional_hooks=additional_hooks) files.append( create_file_with_runtime_dependencies(prefix_path, pkg)) return files def create_environment_scripts_only( prefix_path, pkg, *, default_hooks=None, additional_hooks=None ): """ Create the environment scripts for a package. :param prefix_path: The prefix path :param pkg: The package descriptor :param list default_hooks: If none are parsed explicitly the hooks provided by :function:`create_environment_hooks` are used :param list additional_hooks: Any additional hooks which should be referenced by the generated scripts :returns: iterable of generated script paths :rtype: Iterable """ logger.log(1, 'create_environment_scripts_only(%s)', pkg.name) hooks = [] if default_hooks is None: default_hooks = create_environment_hooks(prefix_path, pkg.name) hooks += default_hooks if additional_hooks: hooks += additional_hooks hooks += pkg.hooks # ensure each hook is presented by a tuple # with the first element being a relative path # and the second element being a list of arguments hook_tuples = [] for hook in hooks: hook_args = [] if isinstance(hook, list) or isinstance(hook, tuple): hook_args = hook[1:] hook = hook[0] if os.path.isabs(str(hook)): hook = os.path.relpath(str(hook), start=str(prefix_path)) hook_tuples.append((hook, hook_args)) all_scripts = [] extensions = get_shell_extensions() for priority in extensions.keys(): extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): try: scripts = extension.create_package_script( prefix_path, pkg.name, hook_tuples) # TODO: Disallow 'None' in v3.0 of ShellExtensionPoint if scripts is not None: assert isinstance(scripts, list), \ 'create_package_script() should return a list' all_scripts += scripts except Exception as e: # noqa: F841 # catch exceptions raised in shell extension exc = traceback.format_exc() logger.error( f"Exception in shell extension '{extension.SHELL_NAME}': " f'{e}\n{exc}') # skip failing extension, continue with next one return all_scripts def create_file_with_runtime_dependencies(prefix_path, pkg): """ Create a file with the runtime dependencies of the package. It can be used by the prefix scripts to source all packages in topological order. :param prefix_path: The prefix path :param pkg: The package descriptor :returns: generated file path :rtype: Path """ path = prefix_path / get_relative_package_index_path() / pkg.name logger.log(1, 'create_file_with_runtime_dependencies(%s)', path) path.parent.mkdir(parents=True, exist_ok=True) path.write_text( os.pathsep.join(sorted(pkg.dependencies.get('run', set())))) return path def create_environment_hooks(prefix_path, pkg_name): """ Create the environment hooks for a package. :param prefix_path: The prefix path of the package :param pkg_name: The package name :returns: iterable of generated hook paths :rtype: Iterable """ prefix_path = Path(prefix_path) all_hooks = [] extensions = get_environment_extensions() for extension in extensions.values(): try: hooks = extension.create_environment_hooks(prefix_path, pkg_name) assert isinstance(hooks, Iterable), \ 'create_environment_hooks() should return an iterable' except Exception as e: # noqa: F841 # catch exceptions raised in environment extension exc = traceback.format_exc() logger.error( 'Exception in environment extension ' f"'{extension.ENVIRONMENT_NAME}': {e}\n{exc}") # skip failing extension, continue with next one continue all_hooks += hooks return all_hooks colcon-core-0.17.1/colcon_core/environment/path.py000066400000000000000000000036541465053734400221650ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core import shell from colcon_core.environment import EnvironmentExtensionPoint from colcon_core.environment import logger from colcon_core.plugin_system import satisfies_version from colcon_core.python_install_path import get_python_install_path def _has_file(path): logger.log(1, "checking '%s'" % path) if not path.is_dir(): return False for child in path.iterdir(): if child.is_file(): return True return False class PathEnvironment(EnvironmentExtensionPoint): """Extend the `PATH` variable to find executables.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( EnvironmentExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def create_environment_hooks(self, prefix_path, pkg_name): # noqa: D102 subdirectory = 'bin' hooks = [] bin_path = prefix_path / subdirectory if _has_file(bin_path): hooks += shell.create_environment_hook( 'path', prefix_path, pkg_name, 'PATH', subdirectory, mode='prepend') return hooks class PythonScriptsPathEnvironment(EnvironmentExtensionPoint): """Extend the `PATH` variable to find python scripts.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( EnvironmentExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def create_environment_hooks(self, prefix_path, pkg_name): # noqa: D102 hooks = [] bin_path = get_python_install_path('scripts', {'base': prefix_path}) if _has_file(bin_path): rel_bin_path = bin_path.relative_to(prefix_path) hooks += shell.create_environment_hook( 'pythonscriptspath', prefix_path, pkg_name, 'PATH', str(rel_bin_path), mode='prepend') return hooks colcon-core-0.17.1/colcon_core/environment/pythonpath.py000066400000000000000000000022061465053734400234170ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core import shell from colcon_core.environment import EnvironmentExtensionPoint from colcon_core.environment import logger from colcon_core.plugin_system import satisfies_version from colcon_core.python_install_path import get_python_install_path class PythonPathEnvironment(EnvironmentExtensionPoint): """Extend the `PYTHONPATH` variable to find Python modules.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( EnvironmentExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def create_environment_hooks(self, prefix_path, pkg_name): # noqa: D102 hooks = [] python_path = get_python_install_path('purelib', {'base': prefix_path}) logger.log(1, "checking '%s'" % python_path) if python_path.exists(): rel_python_path = python_path.relative_to(prefix_path) hooks += shell.create_environment_hook( 'pythonpath', prefix_path, pkg_name, 'PYTHONPATH', str(rel_python_path), mode='prepend') return hooks colcon-core-0.17.1/colcon_core/environment_variable.py000066400000000000000000000003171465053734400230670ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import namedtuple EnvironmentVariable = namedtuple( 'EnvironmentVariable', ('name', 'description')) colcon-core-0.17.1/colcon_core/event/000077500000000000000000000000001465053734400174245ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/event/__init__.py000066400000000000000000000005171465053734400215400ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 """ Event classes have no common base class. They are being pushed to the queue of the :py:class:`colcon_core.event_reactor.EventReactor` and handled by instances of the :py:class:`colcon_core.event_handler.EventHandlerExtensionPoint` interface. """ colcon-core-0.17.1/colcon_core/event/command.py000066400000000000000000000072131465053734400214170ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os import sys from colcon_core.subprocess import escape_shell_argument class Command: """An event containing an invoked command.""" __slots__ = ('cmd', 'cwd', 'env', 'shell') def __init__(self, cmd, *, cwd, env=None, shell=False): """ Construct a Command. :param cmd: the sequence of program arguments :param cwd: the working directory :param env: a dictionary with environment variables :param shell: whether to use the shell as the program to execute """ self.cmd = cmd self.cwd = cwd self.env = env self.shell = shell def to_string(self): """ Get a string describing how to invoke the command. The message includes the working directory, modifications to environment variable and the command including the arguments. """ string = f"Invoking command in '{self.cwd}': " string += self._get_env_string() string += self._get_cmd_string() return string def _get_env_string(self): # determine differences in environment env = {} for var_name, new_value in (self.env or {}).items(): # ignore some environment variables if sys.platform != 'win32': if var_name in ('PWD', ): continue org_value = os.environ.get(var_name, None) # skip environment variables with the same value if new_value == org_value: continue # platform specific variable syntax if sys.platform != 'win32': var = '${' + var_name + '}' else: var = '%' + var_name + '%' if not org_value: # added environment variable value = new_value elif new_value.startswith(org_value): # appended environment variable value = var + new_value[len(org_value):] elif new_value.endswith(org_value): # prepended environment variable value = new_value[:-len(org_value)] + var else: # otherwise modified environment variable value = new_value env[var_name] = value # append variable assignments necessary for custom environment string = '' if env: for name in sorted(env.keys()): value = env[name] string += f'{name}={value} ' return string def _get_cmd_string(self): return ' '.join([ escape_shell_argument(c) if self.shell else c for c in self.cmd]) class CommandEnded(Command): """An event containing a finished command.""" __slots__ = ('returncode', ) def __init__(self, cmd, *, cwd, returncode, env=None, shell=False): """ Construct a CommandEnded. :param cmd: the sequence of program arguments :param cwd: the working directory :param returncode: the returncode of the command :param env: a dictionary with environment variables :param shell: whether to use the shell as the program to execute """ super().__init__(cmd, cwd=cwd, env=env, shell=shell) self.returncode = returncode def to_string(self): """Get a string describing the invoked command and its return code.""" string = f"Invoked command in '{self.cwd}' returned " \ f"'{self.returncode}': " string += self._get_env_string() string += self._get_cmd_string() return string colcon-core-0.17.1/colcon_core/event/job.py000066400000000000000000000043131465053734400205510ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 class JobUnselected: """An event containing the identifier of the unselected job.""" __slots__ = ('identifier', ) def __init__(self, identifier): """ Construct a JobUnselected. :param str identifier: The job identifier """ self.identifier = identifier class JobQueued: """An event containing the identifier of the queued job.""" __slots__ = ('identifier', 'dependencies') def __init__(self, identifier, dependencies=None): """ Construct a JobQueued. :param str identifier: The job identifier :param set dependencies: The name of the recursive dependencies """ self.identifier = identifier self.dependencies = dependencies class JobStarted: """An event containing the identifier of the started job.""" __slots__ = ('identifier', ) def __init__(self, identifier): """ Construct a JobStarted. :param str identifier: The job identifier """ self.identifier = identifier class JobProgress: """An event containing the identifier and progress of the job.""" __slots__ = ('identifier', 'progress') def __init__(self, identifier, progress): """ Construct a JobProgress. :param str identifier: The job identifier :param str progress: The message describing the progress of the job """ self.identifier = identifier self.progress = progress class JobEnded: """An event containing the identifier of the ended job and its rc.""" __slots__ = ('identifier', 'rc') def __init__(self, identifier, rc): """ Construct a JobEnded. :param str identifier: The job identifier :param rc: The return code of the job """ self.identifier = identifier self.rc = rc class JobSkipped: """An event containing the identifier of the skipped job.""" __slots__ = ('identifier', ) def __init__(self, identifier): """ Construct a JobSkipped. :param str identifier: The job identifier """ self.identifier = identifier colcon-core-0.17.1/colcon_core/event/output.py000066400000000000000000000013531465053734400213400ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 class StdoutLine: """ An event containing a single line of text intended for `stdout`. The line has a trailing newline. """ __slots__ = ('line', ) def __init__(self, line): """ Construct a StdoutLine. :param bytes|str line: The line of text """ self.line = line class StderrLine: """ An event containing a single line of text intended for `stderr`. The line has a trailing newline. """ __slots__ = ('line', ) def __init__(self, line): """ Construct a StderrLine. :param bytes|str line: The line of text """ self.line = line colcon-core-0.17.1/colcon_core/event/test.py000066400000000000000000000006141465053734400207560ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 class TestFailure: """An event containing the identifier of the job with test failures.""" __slots__ = ('identifier', ) def __init__(self, identifier): """ Construct a TestFailure. :param str identifier: The job identifier """ self.identifier = identifier colcon-core-0.17.1/colcon_core/event/timer.py000066400000000000000000000002651465053734400211210ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 class TimerEvent: """An event generated repeatedly based on a timer.""" __slots__ = () colcon-core-0.17.1/colcon_core/event_handler/000077500000000000000000000000001465053734400211215ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/event_handler/__init__.py000066400000000000000000000125371465053734400232420ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.plugin_system import get_first_line_doc from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority class EventHandlerExtensionPoint: """ The interface for event handler extensions. An event handler extension processes events. For each instance the attribute `EVENT_HANDLER_NAME` is being set to the basename of the entry point registering the extension. Custom event handlers don't need to be subclasses but only be callables accepting a single `event` argument. They are being registered as observers at an :py:class:`colcon_core.EventReactor` instance. The handler should check the type of the event and only act on known types. """ """The version of the event handler extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of event handler extensions.""" PRIORITY = 100 def __init__(self): # noqa: D107 super().__init__() self.context = None self.enabled = True def __call__(self, event): """ Process an event if the event type is known. This method must be overridden in a subclass. :param event: The event """ raise NotImplementedError() def get_event_handler_extensions(*, context, group_name=None): """ Get the available event handler extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.EVENT_HANDLER_NAME = name extension.context = context return order_extensions_by_priority(extensions) def add_event_handler_arguments(parser): """ Add the command line arguments for the event handler extensions. :param parser: The argument parser """ group = parser.add_argument_group(title='Event handler arguments') extensions = get_event_handler_extensions(context=None) completions = [] defaults = [] descriptions = '' for key in sorted(extensions.keys()): # only offer completion for non-default choices completions.append(key + ('-' if extensions[key].enabled else '+')) defaults.append(key + ('+' if extensions[key].enabled else '-')) extension = extensions[key] desc = get_first_line_doc(extension) # ignore extensions without a description # since they are already listed in the defaults if desc: # it requires a custom formatter to maintain the newline descriptions += f'\n* {key}: {desc}' argument = group.add_argument( '--event-handlers', nargs='*', choices=completions + defaults, metavar=('name1+', 'name2-'), help='Enable (+) or disable (-) event handlers (default: %s)%s' % (' '.join(defaults), descriptions)) def choices_completer(prefix, **kwargs): nonlocal completions return (c for c in completions if c.startswith(prefix)) argument.completer = choices_completer def apply_event_handler_arguments(extensions, args): """ Enable/disable the event handler extensions based on the passed arguments. :param extensions: The event handler extensions :param args: The parsed command line arguments """ for arg in (args.event_handlers or []): suffix = arg[-1] assert suffix in ('+', '-') key = arg[:-1] extension = extensions[key] extension.enabled = suffix == '+' def format_duration(seconds, *, fixed_decimal_points=None): """ Stringify a duration as hours, minutes, seconds and decimal points. :param float seconds: The duration in seconds :param int fixed_decimal_points: A fixed number of decimal points for the seconds, if None two, one or none will be used depending on the duration size :returns: The string representation of the duration :rtype: str """ if seconds < 0.0: raise ValueError( f"The duration '{seconds}' must be a non-negative number") minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) # determine number of decimal points for seconds if fixed_decimal_points is not None: decimal_points = fixed_decimal_points elif hours or minutes: decimal_points = 0 # compare rounded number to account for floating point imprecision elif round(seconds, 2) < 9.995: decimal_points = 2 else: decimal_points = 1 # check if rounding of seconds pushes it to a full minute if round(seconds, decimal_points) >= 60.0: # check if the desired decimal points change due to the wrapping if fixed_decimal_points is None and not minutes and not hours: decimal_points = 0 seconds = 0.0 minutes += 1 # check if rounding of minutes pushes it to a full hour if round(minutes, 0) >= 60.0: minutes = 0.0 hours += 1 format_string = f'{seconds:.{decimal_points}f}s' if hours or minutes: format_string = f'{minutes:.0f}min ' + format_string if hours: format_string = f'{hours:.0f}h ' + format_string return format_string colcon-core-0.17.1/colcon_core/event_handler/console_direct.py000066400000000000000000000031441465053734400244710ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import sys from colcon_core.event.output import StderrLine from colcon_core.event.output import StdoutLine from colcon_core.event_handler import EventHandlerExtensionPoint from colcon_core.plugin_system import satisfies_version class ConsoleDirectEventHandler(EventHandlerExtensionPoint): """ Pass output directly to stdout/err. The extension handles events of the following types: - :py:class:`colcon_core.event.output.StdoutLine` - :py:class:`colcon_core.event.output.StderrLine` """ # this handler is enabled by default # but other handlers might choose to change that presetting ENABLED_BY_DEFAULT = True def __init__(self): # noqa: D107 super().__init__() satisfies_version( EventHandlerExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') self.enabled = ConsoleDirectEventHandler.ENABLED_BY_DEFAULT self._handlers = { StdoutLine: sys.stdout, StderrLine: sys.stderr, } def __call__(self, event): # noqa: D102 data = event[0] for event_type, writable in self._handlers.items(): if isinstance(data, event_type): try: if isinstance(data.line, bytes): writable.buffer.write(data.line) else: writable.write(data.line) writable.flush() except BrokenPipeError: self._handlers.pop(event_type) raise return colcon-core-0.17.1/colcon_core/event_handler/console_start_end.py000066400000000000000000000043241465053734400252030ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import sys import time from colcon_core.event.job import JobEnded from colcon_core.event.job import JobStarted from colcon_core.event.test import TestFailure from colcon_core.event_handler import EventHandlerExtensionPoint from colcon_core.event_handler import format_duration from colcon_core.plugin_system import satisfies_version from colcon_core.subprocess import SIGINT_RESULT class ConsoleStartEndEventHandler(EventHandlerExtensionPoint): """ Output task name on start/end. The extension handles events of the following types: - :py:class:`colcon_core.event.job.JobStarted` - :py:class:`colcon_core.event.job.JobEnded` - :py:class:`colcon_core.event.test.TestFailure` """ def __init__(self): # noqa: D107 super().__init__() satisfies_version( EventHandlerExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') self._start_times = {} self._with_test_failures = set() def __call__(self, event): # noqa: D102 data = event[0] if isinstance(data, JobStarted): self._start_times[data.identifier] = time.monotonic() print(f'Starting >>> {data.identifier}', flush=True) elif isinstance(data, TestFailure): job = event[1] self._with_test_failures.add(job) elif isinstance(data, JobEnded): duration = \ time.monotonic() - self._start_times[data.identifier] duration_string = format_duration(duration) if not data.rc: msg = f'Finished <<< {data.identifier} [{duration_string}]' job = event[1] if job in self._with_test_failures: msg += '\t[ with test failures ]' writable = sys.stdout elif data.rc == SIGINT_RESULT: msg = f'Aborted <<< {data.identifier} [{duration_string}]' writable = sys.stdout else: msg = f'Failed <<< {data.identifier} ' \ f'[{duration_string}, exited with code {data.rc}]' writable = sys.stderr print(msg, file=writable, flush=True) colcon-core-0.17.1/colcon_core/event_handler/log_command.py000066400000000000000000000017631465053734400237610ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.event.command import Command from colcon_core.event_handler import EventHandlerExtensionPoint from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version logger = colcon_logger.getChild(__name__) class LogCommandEventHandler(EventHandlerExtensionPoint): """ Log a 'debug' message for each command. In order to see the invoked commands the log level needs to be changed to show messages with the severity 'debug'. The extension handles events of the following types: - :py:class:`colcon_core.event.command.Command` """ def __init__(self): # noqa: D107 super().__init__() satisfies_version( EventHandlerExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def __call__(self, event): # noqa: D102 data = event[0] if isinstance(data, Command): logger.debug(data.to_string()) colcon-core-0.17.1/colcon_core/event_reactor.py000066400000000000000000000110431465053734400215140ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from queue import Empty from queue import Queue from threading import Thread import time import traceback from colcon_core.event.timer import TimerEvent from colcon_core.event_handler import apply_event_handler_arguments from colcon_core.event_handler import get_event_handler_extensions from colcon_core.logging import colcon_logger logger = colcon_logger.getChild(__name__) class EventReactor: """Notify registered observers for events posted to the queue.""" TIMER_INTERVAL = 0.1 def __init__(self): # noqa: D107 self._thread = Thread(target=self._run) self._queue = Queue() self._observers = [] self._last_timer_event = 0 def get_queue(self): """Get the event queue.""" return self._queue def register_observer(self, observer): """ Register an observer which gets called for each event. :param callable observer: The callback """ self._observers.append(observer) def _run(self): """ Process events and notify all observers. If no events are being process for :py:attribute:`TIMER_INTERVAL` seconds a :class:`TimerEvent` is being generated and processed. An :class:`EventReactorShutdown` event will stop the loop. """ while True: # send timer events in regular interval now = time.monotonic() time_since_last_timer_event = now - self._last_timer_event if time_since_last_timer_event >= self.TIMER_INTERVAL: self._notify_observers((TimerEvent(), None)) self._last_timer_event = now timeout = self.TIMER_INTERVAL else: timeout = self.TIMER_INTERVAL - time_since_last_timer_event # wait for next event or timeout try: event = self._queue.get(timeout=timeout) except Empty: continue # publish event self._notify_observers(event) self._queue.task_done() # the signal to end the processing thread if len(event) > 1 and isinstance(event[0], EventReactorShutdown): break def _notify_observers(self, event): for observer in self._observers: try: retval = observer(event) assert retval is None, 'event handler should return None' except Exception as e: # noqa: F841 # catch exceptions raised in event handler extension msg = 'Exception in event handler extension ' \ f"'{observer.EVENT_HANDLER_NAME}': {e}" if not isinstance(e, RuntimeError): msg += '\n' + traceback.format_exc() logger.error(msg) # skip failing extension, continue with next one def flush(self): """Wait until the queue is empty.""" while self._thread.is_alive(): if self._queue.empty(): return time.sleep(0.01) def start(self): """Start the event reactor.""" self._thread.start() def stop(self): """ Stop this event reactor and block until done. An :class:`EventReactorShutdown` event is added to the queue to notify all observers that the event reactor is shutting down. """ self._queue.put((EventReactorShutdown(), None)) logger.debug('joining thread') self._thread.join() logger.debug('joined thread') def __enter__(self): """ Start the event reactor. :returns: self """ self.start() return self def __exit__(self, exc_type, exc_val, exc_tb): """Stop the event reactor.""" self.stop() class EventReactorShutdown: """An event generated before the event reactor is shut down.""" __slots__ = () def create_event_reactor(context): """ Create an event reactor and add all event handlers as observers. :param context: The context is passed to all event handlers :returns: The event reactor """ event_reactor = EventReactor() event_handlers = get_event_handler_extensions(context=context) apply_event_handler_arguments(event_handlers, context.args) # register enabled event handlers for event_handler in event_handlers.values(): if not event_handler.enabled: continue event_reactor.register_observer(event_handler) return event_reactor colcon-core-0.17.1/colcon_core/executor/000077500000000000000000000000001465053734400201415ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/executor/__init__.py000066400000000000000000000314251465053734400222570ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from asyncio import CancelledError from enum import Enum import inspect import os import traceback import warnings from colcon_core.environment_variable import EnvironmentVariable from colcon_core.event.job import JobEnded from colcon_core.event.job import JobQueued from colcon_core.event.job import JobSkipped from colcon_core.event.job import JobStarted from colcon_core.event.output import StderrLine from colcon_core.event_reactor import create_event_reactor from colcon_core.logging import colcon_logger from colcon_core.plugin_system import get_first_line_doc from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_grouped_by_priority from colcon_core.subprocess import SIGINT_RESULT logger = colcon_logger.getChild(__name__) """Environment variable to override the default executor""" DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_DEFAULT_EXECUTOR', 'Select the default executor extension') class Job: """A job describes a unit of work.""" def __init__(self, *, identifier, dependencies, task, task_context): """ Construct a Job. :param str identifier: The job identifier :param set dependencies: The identifiers of other jobs which this job depends on :param task: The task extension :param task_context: The task context """ self._event_queue = None self.identifier = identifier self.dependencies = dependencies self.task = task self.returncode = None self.task_context = task_context def set_event_queue(self, event_queue): """ Set the event queue. Using the event queue the job can post events. A :class:`JobQueued` event with the package name from the task context is posted to the event queue. :param event_queue: The event queue """ self._event_queue = event_queue self.put_event_into_queue(JobQueued( self.task_context.pkg.name, self.task_context.dependencies)) async def __call__(self, *args, **kwargs): """ Perform the unit of work. The overview of the process: * Put a :class:`JobStarted` event into the queue * Pass the task context to the task * Invoke the task * In case the task is canceled return a :attribute:`SIGINT_RESULT` code * In case of an exception within the task put a :class:`StderrLine` event into the queue and re-raise the exception * Put a :class:`JobEnded` event into the queue :returns: The return code of the invoked task :raises Exception: Any exception the invoked task raises """ self.put_event_into_queue(JobStarted(self.task_context.pkg.name)) # replace function to use this job as the event context self.task_context.put_event_into_queue = self.put_event_into_queue self.task.set_context(context=self.task_context) rc = 0 try: rc = await self.task(*args, **kwargs) except CancelledError: rc = SIGINT_RESULT except Exception: # noqa: B902 rc = 1 self.put_event_into_queue( StderrLine(traceback.format_exc().encode())) raise finally: if self.returncode is None: self.returncode = rc or 0 self.put_event_into_queue( JobEnded(self.task_context.pkg.name, self.returncode)) return self.returncode def put_event_into_queue(self, event): """ Post a message event into the event queue. :param event: The event """ self._event_queue.put((event, self)) def __str__(self): """Use the identifier as the string representation of a job.""" return self.identifier class OnError(Enum): """Decision how to proceed when one job fails.""" # ongoing jobs will continue, pending jobs will be executed continue_ = 1 # ongoing jobs will be cancelled, pending jobs won't be executed interrupt = 2 # ongoing jobs will continue, pending jobs won't be executed skip_pending = 3 # ongoing jobs will continue, pending jobs will only be executed if they # don't (recursively) depend on a failed job skip_downstream = 4 class ExecutorExtensionPoint: """ The interface for executor extensions. An executor extension runs a set of jobs. For each instance the attribute `EXECUTOR_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the executor extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of executor extensions.""" PRIORITY = 100 def __init__(self): # noqa: D107 super().__init__() self._event_controller = None def add_arguments(self, *, parser): """ Add command line arguments specific to the executor. The method is intended to be overridden in a subclass. :param parser: The argument parser """ pass def set_event_controller(self, event_controller): """ Set the event controller. Using the event controller the executor can force a flush of all events. """ self._event_controller = event_controller def execute( self, args, jobs, *, on_error: OnError = None, abort_on_error=None ): """ Execute the passed jobs. This method must be overridden in a subclass. Subclass should not include the deprecated keyword argument `abort_on_error` in their signature. :param arguments: The passed arguments The deprecated API accepts the following separate arguments: :param args: The parsed command line arguments :param jobs: The jobs :param on_error: The decision how to proceed when one job fails :param abort_on_error: The flag if pending jobs should be aborted in case of any errors or individual jobs failing (deprecated, use `on_error` instead) """ raise NotImplementedError() def _flush(self): if self._event_controller is None: return self._event_controller.flush() def get_executor_extensions(*, group_name=None): """ Get the available executor extensions. The extensions are grouped by their priority and each group is ordered by the entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.EXECUTOR_NAME = name return order_extensions_grouped_by_priority(extensions) def add_executor_arguments(parser): """ Add the command line arguments for the executor extensions. :param parser: The argument parser """ group = parser.add_argument_group(title='Executor arguments') extensions = get_executor_extensions() keys = [] descriptions = '' for priority in extensions.keys(): extensions_same_prio = extensions[priority] assert len(extensions_same_prio) == 1, \ 'Executor extensions must have unique priorities' for key, extension in extensions_same_prio.items(): keys.append(key) desc = get_first_line_doc(extension) if not desc: # show extensions without a description # to mention the available options desc = '' # it requires a custom formatter to maintain the newline descriptions += f'\n* {key}: {desc}' assert keys, 'No executor extensions found' default = os.environ.get(DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE.name) if default not in keys: default = keys[0] group.add_argument( '--executor', type=str, choices=keys, default=default, help=f'The executor to process all packages (default: {default})' f'{descriptions}') # noqa: E131 for priority in extensions.keys(): extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): try: retval = extension.add_arguments(parser=group) assert retval is None, 'add_arguments() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in executor extension exc = traceback.format_exc() logger.error( 'Exception in executor extension ' f"'{extension.EXECUTOR_NAME}': {e}\n{exc}") # skip failing extension, continue with next one def execute_jobs( context, jobs, *, on_error: OnError = None, abort_on_error=None, pre_execution_callback=None ): """ Execute jobs. The overview of the process: * One executor extension is being chosen based on the command line arguments. * Create an event controller. * Pass the event controller to the executor extension. * Pass the event queue to all jobs. * Start the event controller. * Invoke the executor extension to execute the jobs. * Join the event controller. :param jobs: The ordered dictionary of jobs :param on_error: The decision how to proceed when one job fails :param abort_on_error: The flag if pending jobs should be aborted in case of individual jobs failing (deprecated, use `on_error` instead) :param pre_execution_callback: An optional callable taking a keyword argument `event_queue` which will be invoked before the executors `execute()` method :returns: The return code """ assert on_error is None or abort_on_error is None, \ 'Only one of the two keyword arguments can be passed' # keep default behavior of deprecated keyword argument if on_error is None and abort_on_error is None: on_error = OnError.interrupt if abort_on_error is not None: # pragma: no cover warnings.warn( "'colcon_core.executor.execute_jobs' was called with the " "deprecated keyword argument 'abort_on_error'", stacklevel=2) on_error = OnError.interrupt if abort_on_error else OnError.continue_ executor = select_executor_extension(context.args) assert executor logger.info("Executing jobs using '%s' executor", executor.EXECUTOR_NAME) # create event reactor with handlers specified by the args with create_event_reactor(context) as event_controller: executor.set_event_controller(event_controller) # allow the caller to post additional events if pre_execution_callback is not None: pre_execution_callback(event_queue=event_controller.get_queue()) # pass queue to jobs to publish events for job in jobs.values(): job.set_event_queue(event_controller.get_queue()) func = executor.execute signature = inspect.signature(func) kwargs = {} if 'on_error' in signature.parameters: kwargs['on_error'] = on_error else: # pragma: no cover # fallback to legacy API assert 'abort_on_error' in signature.parameters warnings.warn( f"The ExecutorExtensionPoint '{executor.EXECUTOR_NAME}' uses " "a deprecated signature for the 'execute' method") kwargs['abort_on_error'] = on_error == OnError.interrupt try: rc = func(context.args, jobs, **kwargs) except Exception as e: # noqa: F841 # catch exceptions raised in executor extension exc = traceback.format_exc() logger.error( f"Exception in executor extension '{executor.EXECUTOR_NAME}': " f'{e}\n{exc}') rc = 1 finally: # generate an event for every skipped job for job in jobs.values(): if job.returncode is not None: continue event_controller.get_queue().put( (JobSkipped(job.identifier), job)) return rc def select_executor_extension(args): """ Get the executor extension. :param args: The parsed command line arguments :returns: The executor extension """ executor_extensions = get_executor_extensions() for priority in executor_extensions.keys(): extensions_same_prio = executor_extensions[priority] for key, extension in extensions_same_prio.items(): if key == args.executor: return extension # one executor should always be selected by the default value # in case their are no executor extensions available the add argument # function should have already failed assert False colcon-core-0.17.1/colcon_core/executor/sequential.py000066400000000000000000000102531465053734400226660ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import asyncio import logging import signal import sys import traceback from colcon_core.executor import ExecutorExtensionPoint from colcon_core.executor import OnError from colcon_core.logging import colcon_logger from colcon_core.logging import get_effective_console_level from colcon_core.plugin_system import satisfies_version from colcon_core.subprocess import new_event_loop from colcon_core.subprocess import SIGINT_RESULT logger = colcon_logger.getChild(__name__) class SequentialExecutor(ExecutorExtensionPoint): """ Process one package at a time. The sequence follows the topological ordering. """ def __init__(self): # noqa: D107 super().__init__() satisfies_version( ExecutorExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def execute(self, args, jobs, *, on_error=OnError.interrupt): # noqa: D102 # avoid debug message from asyncio when colcon uses debug log level asyncio_logger = logging.getLogger('asyncio') log_level = get_effective_console_level(colcon_logger) asyncio_logger.setLevel(log_level) rc = 0 loop = new_event_loop() asyncio.set_event_loop(loop) jobs = jobs.copy() try: while jobs: name, job = jobs.popitem(last=False) coro = job() future = asyncio.ensure_future(coro, loop=loop) try: logger.debug(f"run_until_complete '{name}'") loop.run_until_complete(future) except KeyboardInterrupt: logger.debug( f"run_until_complete '{name}' was interrupted") # override job rc with special SIGINT value job.returncode = SIGINT_RESULT # ignore further SIGINTs signal.signal(signal.SIGINT, signal.SIG_IGN) # wait for job which has also received a SIGINT if not future.done(): logger.debug(f"run_until_complete '{name}' again") loop.run_until_complete(future) assert future.done() # read potential exception to avoid asyncio error _ = future.exception() # noqa: F841 logger.debug(f"run_until_complete '{name}' finished") return signal.SIGINT except Exception as e: # noqa: F841 exc = traceback.format_exc() logger.error( f"Exception in job execution '{name}': {e}\n{exc}") return 1 result = future.result() logger.debug( f"run_until_complete '{name}' finished with '{result}'") if result: if not rc: rc = result if on_error in (OnError.interrupt, OnError.skip_pending): # skip pending jobs return rc if on_error == OnError.skip_downstream: # skip downstream jobs of failed one for pending_name, pending_job in list(jobs.items()): if job.identifier in pending_job.dependencies: del jobs[pending_name] finally: try: # new in Python 3.7 all_tasks = asyncio.all_tasks except AttributeError: all_tasks = asyncio.Task.all_tasks for task in all_tasks(loop): if not task.done(): logger.error(f"Task '{task}' not done") # HACK on Windows closing the event loop seems to hang after Ctrl-C # even though no futures are pending, but appears fixed in py3.8 if sys.platform != 'win32' or sys.version_info >= (3, 8): logger.debug('closing loop') loop.close() logger.debug('loop closed') else: logger.debug('skipping loop closure') return rc colcon-core-0.17.1/colcon_core/extension_point.py000066400000000000000000000203101465053734400220760ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from collections import defaultdict from itertools import chain import os import sys import traceback import warnings try: from importlib.metadata import distributions from importlib.metadata import EntryPoint from importlib.metadata import entry_points except ImportError: # TODO: Drop this with Python 3.7 support from importlib_metadata import distributions from importlib_metadata import EntryPoint from importlib_metadata import entry_points from colcon_core.environment_variable import EnvironmentVariable from colcon_core.logging import colcon_logger _EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_EXTENSION_BLOCKLIST', 'Block extensions which should not be used') """Environment variable to block extensions""" EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE = \ _EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE logger = colcon_logger.getChild(__name__) """ The group name for entry points identifying colcon extension points. While all entry points in this package start with `colcon_core.` other distributions might define entry points with a different prefix. Those need to be declared using this group name. """ EXTENSION_POINT_GROUP_NAME = 'colcon_core.extension_point' _ENTRY_POINTS_CACHE = [] def _get_unique_distributions(): seen = set() for dist in distributions(): dist_name = dist.metadata['Name'] if dist_name not in seen: seen.add(dist_name) yield dist def _get_entry_points(): for dist in _get_unique_distributions(): for entry_point in dist.entry_points: # Modern EntryPoint instances should already have this set if not hasattr(entry_point, 'dist'): entry_point.dist = dist yield entry_point def _get_cached_entry_points(): if not _ENTRY_POINTS_CACHE: if sys.version_info >= (3, 10): # We prefer using importlib.metadata.entry_points because it # has an internal optimization which allows us to load the entry # points without reading the individual PKG-INFO files, while # still visiting each unique distribution only once. all_entry_points = entry_points() if isinstance(all_entry_points, dict): # Prior to Python 3.12, entry_points returned a (deprecated) # dict. Unfortunately, the "future-proof" recommended # pattern is to add filter parameters, but we actually # want to cache everything so that doesn't work here. with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', 'SelectableGroups dict interface is deprecated', DeprecationWarning, module=__name__) all_entry_points = chain.from_iterable( all_entry_points.values()) _ENTRY_POINTS_CACHE.extend(all_entry_points) else: # If we don't have Python 3.10, we must read each PKG-INFO to # get the name of the distribution so that we can skip the # "shadowed" distributions properly. _ENTRY_POINTS_CACHE.extend(_get_entry_points()) return _ENTRY_POINTS_CACHE def clear_entry_point_cache(): """Purge the entry point cache.""" _ENTRY_POINTS_CACHE.clear() def get_all_extension_points(): """ Get all extension points related to `colcon` and any of its extensions. :returns: mapping of extension point groups to dictionaries which map extension point names to a tuple of extension point values, dist name, and dist version :rtype: dict """ global EXTENSION_POINT_GROUP_NAME colcon_extension_points = get_extension_points(EXTENSION_POINT_GROUP_NAME) colcon_extension_points.setdefault(EXTENSION_POINT_GROUP_NAME, None) extension_points = defaultdict(dict) for entry_point in _get_cached_entry_points(): if entry_point.group not in colcon_extension_points: continue dist_metadata = entry_point.dist.metadata ep_tuple = ( entry_point.value, dist_metadata['Name'], dist_metadata['Version'], ) if entry_point.name in extension_points[entry_point.group]: previous = extension_points[entry_point.group][entry_point.name] logger.error( f"Entry point '{entry_point.group}.{entry_point.name}' is " f"declared multiple times, '{ep_tuple}' " f"overwriting '{previous}'") extension_points[entry_point.group][entry_point.name] = ep_tuple return extension_points def get_extension_points(group): """ Get the extension points for a specific group. :param str group: the name of the extension point group :returns: mapping of extension point names to extension point values :rtype: dict """ extension_points = {} for entry_point in _get_cached_entry_points(): if entry_point.group != group: continue if entry_point.name in extension_points: previous_entry_point = extension_points[entry_point.name] logger.error( f"Entry point '{group}.{entry_point.name}' is declared " f"multiple times, '{entry_point.value}' overwriting " f"'{previous_entry_point}'") extension_points[entry_point.name] = entry_point.value return extension_points def load_extension_points(group, *, excludes=None): """ Load the extension points for a specific group. :param str group: the name of the extension point group :param iterable excludes: the names of the extension points to exclude :returns: mapping of entry point names to loaded entry points :rtype: dict """ extension_types = {} for name, value in get_extension_points(group).items(): if excludes and name in excludes: continue try: extension_type = load_extension_point(name, value, group) except RuntimeError: continue except Exception as e: # noqa: F841 # catch exceptions raised when loading entry point exc = traceback.format_exc() logger.error( 'Exception loading extension ' f"'{group}.{name}': {e}\n{exc}") # skip failing entry point, continue with next one continue extension_types[name] = extension_type return extension_types def load_extension_point(name, value, group): """ Load the extension point. :param name: the name of the extension entry point. :param value: the value of the extension entry point. :param group: the name of the group the extension entry point is a part of. :returns: the loaded entry point :raises RuntimeError: if either the group name or the entry point name is listed in the environment variable :const:`EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE` """ global EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE blocklist = os.environ.get( EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE.name, None) if blocklist: blocklist = blocklist.split(os.pathsep) if group in blocklist: raise RuntimeError( 'The entry point group name is listed in the environment ' f"variable '{EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE.name}'") full_name = f'{group}.{name}' if full_name in blocklist: raise RuntimeError( 'The entry point name is listed in the environment variable ' f"'{EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE.name}'") return EntryPoint(name, value, group).load() def override_blocklist_variable(variable): """ Override the blocklist environment variable. :param EnvironmentVariable variable: The new blocklist environment variable, or None to reset to default. """ if variable is None: variable = _EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE global EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE = variable colcon-core-0.17.1/colcon_core/feature_flags.py000066400000000000000000000041631465053734400214700ustar00rootroot00000000000000# Copyright 2024 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import os from colcon_core.environment_variable import EnvironmentVariable from colcon_core.logging import colcon_logger logger = colcon_logger.getChild(__name__) """Environment variable to enable feature flags""" FEATURE_FLAGS_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_FEATURE_FLAGS', 'Enable pre-production features and behaviors') _REPORTED_USES = set() IMPLEMENTED_FLAGS = set() def check_implemented_flags(): """Check for and warn about flags which have been implemented.""" implemented = IMPLEMENTED_FLAGS.intersection(get_feature_flags()) if implemented: logger.warning( 'The following feature flags have been implemented and should no ' 'longer be specified in ' f'{FEATURE_FLAGS_ENVIRONMENT_VARIABLE.name}: ' f"{','.join(implemented)}") def get_feature_flags(): """ Retrieve all enabled feature flags. :returns: List of enabled flags :rtype: list """ return [ flag for flag in ( os.environ.get(FEATURE_FLAGS_ENVIRONMENT_VARIABLE.name) or '' ).split(os.pathsep) if flag ] def is_feature_flag_set(flag): """ Determine if a specific feature flag is enabled. Feature flags are case-sensitive and separated by the os-specific path separator character. :param str flag: Name of the flag to search for :returns: True if the flag is set :rtype: bool """ if flag in IMPLEMENTED_FLAGS: return True elif flag in get_feature_flags(): if flag not in _REPORTED_USES: if not _REPORTED_USES: logger.warning( 'One or more feature flags have been enabled using the ' f'{FEATURE_FLAGS_ENVIRONMENT_VARIABLE.name} environment ' 'variable. These features may be unstable and may change ' 'API or behavior at any time.') logger.warning(f'Enabling feature: {flag}') _REPORTED_USES.add(flag) return True return False colcon-core-0.17.1/colcon_core/generic_decorator.py000066400000000000000000000031441465053734400223350ustar00rootroot00000000000000# Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 class GenericDecorator: """A generic class decorator.""" def __init__(self, decoree, **kwargs): """ Create a new decorated class instance. :param decoree: The instance to decorate :param **kwargs: The keyword arguments are set as attributes on this instance """ assert '_decoree' not in kwargs kwargs['_decoree'] = decoree for k, v in kwargs.items(): self.__dict__[k] = v def __getattr__(self, name): """ Get an attribute from this decorator if it exists or the decoree. :param str name: The name of the attribute :returns: The attribute value :raises AttributeError: if the attribute doesn't exist in either of the two instances """ if '_decoree' not in self.__dict__: raise AttributeError(name) return getattr(self.__dict__['_decoree'], name) def __setattr__(self, name, value): """ Set an attribute value on this decorator if it exists or the decoree. :param str name: The name of the attribute :param value: The attribute value """ assert name != '_decoree' # overwrite existing attribute if name in self.__dict__: self.__dict__[name] = value return if '_decoree' not in self.__dict__: self.__dict__[name] = value return # set attribute on decoree setattr(self.__dict__['_decoree'], name, value) colcon-core-0.17.1/colcon_core/location.py000066400000000000000000000170351465053734400204730ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from contextlib import suppress import os from pathlib import Path import uuid from colcon_core.logging import colcon_logger logger = colcon_logger.getChild(__name__) _config_path = None _config_path_env_var = None def get_config_path(): """ Get the base path for configuration files. :function:`set_default_config_path` must have been called before. :returns: The base path for configuration files :rtype: Path """ global _config_path_env_var if _config_path_env_var is not None: path = os.environ.get(_config_path_env_var) if path: return Path(str(path)) global _config_path assert _config_path is not None return _config_path def set_default_config_path(*, path, env_var=None): """ Set the base path for configuration files. Optionally an environment variable name can be provided which if set will override the configured base path. An info message is logged which states the used path. :param path: The base path :param str env_var: The name of the environment variable """ global _config_path global _config_path_env_var from colcon_core.command import register_command_exit_handler register_command_exit_handler(_reset_config_path_globals) _config_path = Path(str(path)) _config_path_env_var = env_var config_path = get_config_path() logger.info(f"Using config path '{config_path}'") def _reset_config_path_globals(): global _config_path global _config_path_env_var _config_path = None _config_path_env_var = None _log_base_path = None _log_base_path_default = None _log_base_path_env_var = None _log_subdirectory = None def get_log_path(): """ Get the base path for logging. :function:`set_default_log_path` must have been called before. :returns: The base path for logging or None if logging is disabled :rtype: Path or None """ global _log_base_path global _log_base_path_env_var path = None if _log_base_path is not None: path = _log_base_path elif ( _log_base_path_env_var is not None and os.environ.get(_log_base_path_env_var) ): path = os.environ.get(_log_base_path_env_var) else: global _log_base_path_default path = _log_base_path_default if path == os.devnull: return None return Path(str(path)) / _log_subdirectory def set_default_log_path( *, base_path, env_var=None, subdirectory=None, default='log' ): """ Set the base path for logging. Optionally an environment variable name can be provided which if set will override the configured base path. An info message is logged which states the used path. :param base_path: The base path :param str env_var: The name of the environment variable :param str subdirectory: The name of the subdirectory, if not provided a random uuid will be used instead :param default: The default base path if the passed base path is None and the environment variable is not set """ global _log_base_path global _log_base_path_default global _log_base_path_env_var global _log_subdirectory from colcon_core.command import register_command_exit_handler register_command_exit_handler(_reset_log_path_globals) _log_base_path = base_path _log_base_path_default = default _log_base_path_env_var = env_var assert subdirectory is None or subdirectory _log_subdirectory = subdirectory \ if subdirectory is not None \ else str(uuid.uuid4()) def _reset_log_path_globals(): global _log_base_path global _log_base_path_default global _log_base_path_env_var global _log_subdirectory _log_base_path = None _log_base_path_default = None _log_base_path_env_var = None _log_subdirectory = None _create_log_path_called = False def create_log_path(verb_name): """ Create a not yet existing logging directory. The logging directory returned by :function:`get_log_path` must not yet exist on the first call of this function. If it does exist the function will append a serial number to the path until the path doesn't exist and can be created. Subsequent invocations of this function are noops. A `COLCON_IGNORE` marker file is being placed in the parent directory of the logging directory to avoid it being crawled for packages. Two symlinks are created as siblings of the log path: * `latest_` linking to the log path * `latest` linking to `latest_` On Windows platforms Administrator privileges are required to create these symlinks. Otherwise they are being skipped. :param str verb_name: The verb name """ global _create_log_path_called if _create_log_path_called: return _create_log_path_called = True from colcon_core.command import register_command_exit_handler register_command_exit_handler(_reset_log_path_creation_global) path = get_log_path() try: # try to create the directory os.makedirs(str(path)) except FileExistsError: # if it already exists try again with serial number suffixes global _log_subdirectory assert path.name == _log_subdirectory suffix = 2 while True: path_with_suffix = path.with_name(path.name + '_' + str(suffix)) try: os.makedirs(str(path_with_suffix)) except FileExistsError: suffix += 1 assert suffix < 1000, 'Prevent infinite loop' continue _log_subdirectory = path_with_suffix.name assert get_log_path() == path_with_suffix path = path_with_suffix break logger.info(f"Using log path '{path}'") # ensure the base log path has an ignore marker file # to avoid recursively crawling through log directories from colcon_core.package_identification.ignore import IGNORE_MARKER ignore_marker = path.parent / IGNORE_MARKER ignore_marker.touch() # create latest symlinks if verb_name is None: _create_symlink(path, path.parent / 'latest') else: _create_symlink(path, path.parent / f'latest_{verb_name}') _create_symlink( path.parent / f'latest_{verb_name}', path.parent / 'latest') def _reset_log_path_creation_global(): global _create_log_path_called _create_log_path_called = False def _create_symlink(src, dst): if dst.exists(): # directory exists or valid symlink if not dst.is_symlink(): # do not change non symlink paths return if dst.resolve() == src.resolve(): # desired symlink already exists return # remove valid symlink to wrong destination (previous if, no return) # or invalid symlink (non-existing else from previous if) if dst.is_symlink(): with suppress(FileNotFoundError): dst.unlink() # use relative path when possible with suppress(ValueError): src = src.relative_to(dst.parent) # Administrator privileges are required on Windows with suppress(FileNotFoundError, OSError): os.symlink(str(src), str(dst)) def get_relative_package_index_path(): """ Get the prefix-relative path to the package index. :returns: The relative path to the package index :rtype: Path """ # the value is also being hard coded in shell/template/prefix_util.py return Path('share', 'colcon-core', 'packages') colcon-core-0.17.1/colcon_core/logging.py000066400000000000000000000113141465053734400203030ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import logging import os logging.basicConfig() colcon_logger = logging.getLogger('colcon') try: import coloredlogs except ImportError: # pragma: no cover pass else: log_format = os.environ.get( 'COLOREDLOGS_LOG_FORMAT', '%(name)s %(levelname)s %(message)s') coloredlogs.install(level=1, logger=colcon_logger, fmt=log_format) def set_logger_level_from_env(logger, env_name): """ Set the log level based on an environment variable. A warning message is logged if the environment variable has an unsupported value. :param logger: The logger :param str env_var: The name of the environment variable """ log_level = os.environ.get(env_name) if log_level: try: numeric_log_level = get_numeric_log_level(log_level) except ValueError as e: # noqa: F841 logger.warning( f"environment variable '{env_name}' has unsupported value " f"'{log_level}', {e}") else: logger.setLevel(numeric_log_level) def get_numeric_log_level(value): """ Convert a log level into a numeric value. :param value: The log level can be either a string (case insensitive) or a positive number :returns: The numeric value :rtype: int :raises ValueError: if the log level string is not one of the valid names (`CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`) or if the numeric value is zero or negative """ try: value = int(value) except ValueError: string_value = value.upper() value = logging.getLevelName(string_value) if value == 'Level ' + string_value: raise ValueError( 'valid names are: CRITICAL, ERROR, WARNING, INFO, DEBUG ' '(case-insensitive)') else: if value < 1: raise ValueError('numeric log levels must be positive') return value def add_file_handler(logger, path): """ Add a file handler to the logger which logs messages of all levels. :param logger: The logger to add the file handler to :param path: The path of the generated log file :returns: The added file handler :rtype: logging.FileHandler """ class Filter(logging.Filter): def __init__(self, ignored_name): super().__init__() self._ignored_name = ignored_name def filter(self, record): # noqa: A003 if ( record.name == self._ignored_name or record.name.startswith(self._ignored_name) ): return 0 return super().filter(record) # get stream handler formatter from root logger to reuse for file handler formatter = None for handler in logging.getLogger().handlers: if isinstance(handler, logging.StreamHandler): formatter = handler.formatter # filter colcon specific log messages from default stream handler handler.addFilter(Filter(logger.name)) # add a stream handler replacing the one filtered on the root logger handler = logging.StreamHandler() if formatter: # use same formatter as for stream handler handler.setFormatter(formatter) handler.setLevel(logger.getEffectiveLevel()) logger.addHandler(handler) # add a file handler writing all log levels handler = logging.FileHandler(str(path)) if formatter: # if the format string doesn't use the time information # prepend the relative time to every message if not formatter.usesTime(): format_message = formatter.formatMessage def format_message_with_relative_time(record): nonlocal format_message return '[%.3fs] ' % (record.created - logging._startTime) + \ format_message(record) formatter.formatMessage = format_message_with_relative_time # use same formatter as for stream handler handler.setFormatter(formatter) handler.setLevel(1) logger.addHandler(handler) # change the logger to handle all levels logger.setLevel(1) return handler def get_effective_console_level(logger): """ Determine the effective log level of to the console. On a typical logger, this is the same as getEffectiveLevel(). After a call to add_file_handler, this will continue to return the same level though getEffectiveLevel() will now always return ``1``. :param logger: The logger to inspect :returns: the log level """ for handler in logger.handlers: if isinstance(handler, logging.StreamHandler): return handler.level return logger.getEffectiveLevel() colcon-core-0.17.1/colcon_core/package_augmentation/000077500000000000000000000000001465053734400224515ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/package_augmentation/__init__.py000066400000000000000000000200671465053734400245670ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict import copy import traceback from colcon_core.logging import colcon_logger from colcon_core.package_descriptor import PackageDescriptor from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority logger = colcon_logger.getChild(__name__) class PackageAugmentationExtensionPoint: """ The interface for package augmentation extensions. A package augmentation extension adds additional information to a package descriptor. For each instance the attribute `PACKAGE_AUGMENTATION_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the package augmentation extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of package augmentation extensions.""" PRIORITY = 100 def augment_packages( self, descs, *, additional_argument_names=None ): """ Augment the package descriptors with additional information. The method is intended to be overridden in a subclass. The default implementation invokes `augment_package` for each package descriptor. :param descs: The package descriptors :param additional_argument_names: A list of additional arguments to consider """ for desc in descs: self.augment_package( desc, additional_argument_names=additional_argument_names) def augment_package( self, desc: PackageDescriptor, *, additional_argument_names=None ): """ Augment the package descriptor with additional information. The method is intended to be overridden in a subclass. If the `augment_packages` method is being overridden and never calls this method it doesn't have to be implemented. :param desc: The package descriptor :param additional_argument_names: A list of additional arguments to consider """ raise NotImplementedError() def get_package_augmentation_extensions(*, group_name=None): """ Get the available package augmentation extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.PACKAGE_AUGMENTATION_NAME = name return order_extensions_by_priority(extensions) def augment_packages( descs, *, additional_argument_names=None, augmentation_extensions=None ): """ Augment package descriptors with additional information. :param descs: the packages :type descs: set of :py:class:`colcon_core.package_descriptor.PackageDescriptor` """ if augmentation_extensions is None: augmentation_extensions = get_package_augmentation_extensions() for extension in augmentation_extensions.values(): try: retval = extension.augment_packages( descs, additional_argument_names=additional_argument_names) assert retval is None, 'augment_packages() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in augmentation extension exc = traceback.format_exc() logger.error( 'Exception in package augmentation extension ' f"'{extension.PACKAGE_AUGMENTATION_NAME}': {e}\n{exc}") # skip failing extension, continue with next one def update_descriptor( desc: PackageDescriptor, data: dict, *, additional_argument_names=None ): """ Update the package descriptor with additional information. For the keys `name` and `type` the values from `data` are set on the descriptor attributes with the same names. For the key `dependencies` the values are being added to the dependencies in each of the following categories: `build`, `run`, and `test`. For the keys `build_dependencies`, `run_dependencies`, and `test_dependencies` the values are being added to the dependencies in the category with the same name. For the key `hooks` the values are being added to the list of hooks. Any key-value pair not explicitly mentioned above is being used to update the metadata if the key is in the list of additional argument names. See :function:`update_metadata` for details how the metadata is updated. If the additional argument names is a list with the single value `*` all keys not explicitly mentioned above are being used to update the metadata. :param desc: the package descriptor :param data: The dictionary with additional information :param additional_argument_names: A dict of option names to destination names or a list of argument names """ dep_types = ('build', 'run', 'test') # transfer generic dependencies to each specific type if 'dependencies' in data: for d in data['dependencies']: for dep_type in dep_types: desc.dependencies[dep_type].add(d) # transfer type specific dependencies for dep_type in dep_types: key = f'{dep_type}-dependencies' if key in data: for d in data[key]: desc.dependencies[dep_type].add(d) # transfer hooks if 'hooks' in data: for d in data['hooks']: desc.hooks.append(d) # transfer any other metadata if additional_argument_names == ['*']: additional_argument_names = [] # skip any of the already explicitly handled names ignored_names = ['name', 'type', 'dependencies', 'hooks'] for dep_type in dep_types: ignored_names.append(f'{dep_type}-dependencies') for name in data.keys(): if name in ignored_names: continue additional_argument_names.append(name) if isinstance(additional_argument_names, list): additional_argument_names = OrderedDict([ (name, name) for name in additional_argument_names]) for option, dest in (additional_argument_names or {}).items(): if option in data: update_metadata(desc, dest, data[option]) def update_metadata(desc, key, value): """ Update the metadata of a package descriptor. If the key doesn't exist in the metadata yet the key-value pair is added. If the key exists and the existing value as well as the passed value are dictionaries the existing value is updated with the passed value. If the key exists and the existing value as well as the passed value are lists the existing value is extended with the passed value. If the key exists and the existing value as well as the passed value are sets the existing value is union updated with the passed value. Otherwise the existing value is overwritten with the passed value. If the types were different a warning message is logged. :param desc: the package descriptor :param key: The key :param value: The value """ if key not in desc.metadata: # add value to the metadata # copy value to avoid changes to either of them to affect each other desc.metadata[key] = copy.deepcopy(value) return old_value = desc.metadata[key] if isinstance(old_value, dict) and isinstance(value, dict): # update dictionary old_value.update(value) return if isinstance(old_value, list) and isinstance(value, list): # extend list old_value += value return if isinstance(old_value, set) and isinstance(value, set): # union update set old_value |= value return if type(old_value) is not type(value): logger.warning( f"update package '{desc.name}' metadata '{key}' from value " f"'{old_value}' to '{value}'") # overwrite existing value # copy value to avoid changes to either of them to affect each other desc.metadata[key] = copy.deepcopy(value) colcon-core-0.17.1/colcon_core/package_augmentation/python.py000066400000000000000000000136751465053734400243600ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.dependency_descriptor import DependencyDescriptor from colcon_core.package_augmentation import logger from colcon_core.package_augmentation \ import PackageAugmentationExtensionPoint from colcon_core.package_identification.python import get_configuration from colcon_core.package_identification.python import is_reading_cfg_sufficient from colcon_core.plugin_system import satisfies_version from distlib.util import parse_requirement from distlib.version import NormalizedVersion class PythonPackageAugmentation(PackageAugmentationExtensionPoint): """ Augment Python packages with information from `setup.cfg` files. Only packages which pass no arguments (or only a ``cmdclass``) to the ``setup()`` function in their ``setup.py`` file are being considered. """ def __init__(self): # noqa: D107 super().__init__() satisfies_version( PackageAugmentationExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def augment_package( # noqa: D102 self, desc, *, additional_argument_names=None ): if desc.type != 'python': return setup_py = desc.path / 'setup.py' if not setup_py.is_file(): return setup_cfg = desc.path / 'setup.cfg' if not setup_cfg.is_file(): return if not is_reading_cfg_sufficient(setup_py): return config = get_configuration(setup_cfg) metadata = config.get('metadata', {}) version = metadata.get('version') desc.metadata['version'] = version options = config.get('options', {}) dependencies = extract_dependencies(options) for k, v in dependencies.items(): desc.dependencies[k] |= v def getter(env): nonlocal options return options desc.metadata['get_python_setup_options'] = getter maintainers = _extract_maintainers_with_emails(metadata) if maintainers: desc.metadata.setdefault('maintainers', []) desc.metadata['maintainers'] += maintainers def extract_dependencies(options): """ Get the dependencies of the package. :param options: The dictionary from the options section of the setup.cfg file :returns: The dependencies :rtype: dict(string, set(DependencyDescriptor)) """ mapping = { 'setup_requires': 'build', 'install_requires': 'run', 'tests_require': 'test', } dependencies = {} _map_dependencies(options, mapping, dependencies) extras_mapping = { 'test': 'test', 'tests': 'test', 'testing': 'test', } _map_dependencies( options.get('extras_require') or {}, extras_mapping, dependencies) return dependencies def _map_dependencies(options, mapping, dependencies): for option_name, dependency_type in mapping.items(): dependencies.setdefault(dependency_type, set()) for dep in options.get(option_name) or []: dependencies[dependency_type].add( create_dependency_descriptor(dep)) def create_dependency_descriptor(requirement_string): """ Create a DependencyDescriptor from a PEP440 compliant string. See https://www.python.org/dev/peps/pep-0440/#version-specifiers :param str requirement_string: a PEP440 compliant requirement string :return: A descriptor with version constraints from the requirement string :rtype: DependencyDescriptor """ symbol_mapping = { '==': 'version_eq', '!=': 'version_neq', '<=': 'version_lte', '>=': 'version_gte', '>': 'version_gt', '<': 'version_lt', } requirement = parse_requirement(requirement_string) metadata = { 'origin': 'python', } for symbol, version in (requirement.constraints or []): if symbol in symbol_mapping: metadata[symbol_mapping[symbol]] = version elif symbol == '~=': metadata['version_gte'] = version metadata['version_lt'] = _next_incompatible_version(version) else: logger.warning( f"Ignoring unknown symbol '{symbol}' in '{requirement}'") return DependencyDescriptor(requirement.name, metadata=metadata) def _next_incompatible_version(version): """ Find the next non-compatible version. This is for use with the ~= compatible syntax. It will provide the first version that this version must be less than in order to be compatible. :param str version: PEP 440 compliant version number :return: The first version after this version that is not compatible :rtype: str """ normalized = NormalizedVersion(version) parse_tuple = normalized.parse(version) version_tuple = parse_tuple[1] *unchanged, increment, dropped = version_tuple incremented = increment + 1 version = unchanged version.append(incremented) # versions have a minimum length of 2 if len(version) == 1: version.append(0) return '.'.join(map(str, version)) def _extract_maintainers_with_emails(metadata): if 'maintainer' in metadata: maintainer = metadata['maintainer'] maintainer_email = metadata.get('maintainer_email') else: # If no explicit maintainer is given then it is likely that the # original author is maintaining the package following python # recommendations # https://packaging.python.org/en/latest/specifications/core-metadata/#maintainer maintainer = metadata.get('author') maintainer_email = metadata.get('author_email') # We're only interested in entries with emails if maintainer and maintainer_email: maintainers = [ (m[0].strip(), m[1].strip()) for m in zip( maintainer.split(','), maintainer_email.split(','))] return ['{} <{}>'.format(*m) for m in maintainers] colcon-core-0.17.1/colcon_core/package_decorator.py000066400000000000000000000035451465053734400223210ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.package_descriptor import PackageDescriptor class PackageDecorator: """Decorator of a package descriptor to collect recursive dependencies.""" __slots__ = ( 'descriptor', 'recursive_dependencies', 'selected', ) def __init__(self, descriptor: PackageDescriptor): """ Decorate a package descriptor. The function :function:`add_recursive_dependencies` should be invoked to populate the recursive dependencies. :param descriptor: The package descriptor """ self.descriptor = descriptor self.recursive_dependencies = None self.selected = True def get_decorators(descriptors): """ Get decorators for package descriptors. :param Iterable descriptors: The package descriptors :returns: The package decorators :rtype: list """ return [PackageDecorator(d) for d in descriptors] def add_recursive_dependencies( decorators, direct_categories=None, recursive_categories=None, ): """ Update the recursive dependencies of the decorators. :param set decorators: The known packages to consider :param Iterable[str] direct_categories: The names of the direct categories :param Iterable[str]|Mapping[str, Iterable[str]] recursive_categories: The names of the recursive categories, optionally mapped from the immediate upstream category which included the dependency """ descriptors = [decorator.descriptor for decorator in decorators] for decorator in decorators: decorator.recursive_dependencies = \ decorator.descriptor.get_recursive_dependencies( descriptors, direct_categories=direct_categories, recursive_categories=recursive_categories) colcon-core-0.17.1/colcon_core/package_descriptor.py000066400000000000000000000145651465053734400225210ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import defaultdict from collections.abc import Mapping from copy import deepcopy import os from pathlib import Path from colcon_core.dependency_descriptor import DependencyDescriptor class PackageDescriptor: """ A descriptor for a package. A packages is identified by the following triplet: * the 'path' which must be an existing path * the 'type' which must be a non-empty string * the 'name' which must be a non-empty string Packages with the same type and name but different path are considered equal if their realpath is te same. 'dependencies' are grouped by their category as `DependencyDescriptor` or `str`. Each item in 'hooks' must be a relative path in the installation space. The 'metadata' dictionary can store any additional information. """ __slots__ = ( 'path', 'type', 'name', 'dependencies', 'hooks', 'metadata', ) def __init__(self, path): """ Descriptor for a package in a specific path. :param str|Path path: The location of the package """ self.path = Path(str(path)) self.type = None self.name = None self.dependencies = defaultdict(set) # IDEA category specific hooks self.hooks = [] self.metadata = {} def identifies_package(self): """ Check if the package has a path, type and name. :returns: True if the descriptor has a path, type, and name :rtype: bool """ return self.path and self.type and self.name def get_dependencies(self, *, categories=None): """ Get the dependencies for specific categories or for all categories. :param Iterable[str] categories: The names of the specific categories :returns: The dependencies :rtype: set[DependencyDescriptor] :raises AssertionError: if the package name is listed as a dependency """ dependencies = set() categories_by_dependency = defaultdict(list) if categories is None: categories = self.dependencies.keys() for category in sorted(categories): for dependency in self.dependencies[category]: categories_by_dependency[dependency].append(category) for dependency, categories in categories_by_dependency.items(): if isinstance(dependency, DependencyDescriptor): # duplicate the descriptor and metadata dependency = deepcopy(dependency) else: dependency = DependencyDescriptor(dependency) # note that the category list is not merged when a dependency # appears multiple times in a package's tree, and the most shallow # instance prevails. dependency.metadata['categories'] = categories dependencies.add(dependency) assert self.name not in dependencies, \ f"The package '{self.name}' has a dependency with the same name" return dependencies def get_recursive_dependencies( self, descriptors, direct_categories=None, recursive_categories=None, ): """ Get the recursive dependencies. Dependencies which are not in the set of package descriptor names are ignored. :param set descriptors: The known packages to consider :param Iterable[str] direct_categories: The names of the direct categories :param Iterable[str]|Mapping[str, Iterable[str]] recursive_categories: The names of the recursive categories, optionally mapped from the immediate upstream category which included the dependency :returns: The dependencies :rtype: set[DependencyDescriptor] :raises AssertionError: if a package lists itself as a dependency """ if not isinstance(recursive_categories, Mapping): recursive_categories = defaultdict(lambda: recursive_categories) # the following variable only exists for faster access within the loop descriptors_by_name = defaultdict(set) for d in descriptors: descriptors_by_name[d.name].add(d) queue = self.get_dependencies(categories=direct_categories) dependencies = set() depth = 0 while queue: # ignore redundant dependencies level_queue = queue - dependencies queue.clear() depth += 1 for dep in level_queue: # ignore circular dependencies if dep == self.name: continue # ignore unknown dependencies # explicitly allow multiple packages with the same name descs = descriptors_by_name[dep] if not descs: continue categories = set() for category in dep.metadata['categories']: cats = recursive_categories.get(category) if cats is None: categories = None break categories.update(cats) # recursing into the same function of the dependency descriptor # queue recursive dependencies for d in descs: queue |= d.get_dependencies(categories=categories) # add the depth dep.metadata['depth'] = depth # add dependency to result set dependencies.add(dep) return dependencies def __hash__(self): # noqa: D105 # the hash doesn't include the path since different paths are # considered equal if their realpath is the same return hash((self.type, self.name)) def __eq__(self, other): # noqa: D105 if type(self) is not type(other): return NotImplemented if (self.type, self.name) != (other.type, other.name): return False if self.path == other.path: return True # check realpath last since it is the most expensive to compute return os.path.realpath(str(self.path)) == \ os.path.realpath(str(other.path)) def __str__(self): # noqa: D105 return '{' + ', '.join( ['%s: %s' % (s, getattr(self, s)) for s in self.__slots__]) + '}' colcon-core-0.17.1/colcon_core/package_discovery/000077500000000000000000000000001465053734400217655ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/package_discovery/__init__.py000066400000000000000000000230301465053734400240740ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict from glob import glob import os import traceback from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority logger = colcon_logger.getChild(__name__) class PackageDiscoveryExtensionPoint: """ The interface for discovery extensions. A discovery extension provides potential locations of packages which are then being check by the identification extensions. For each instance the attribute `PACKAGE_DISCOVERY_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the discovery extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of discovery extensions.""" PRIORITY = 100 def has_default(self): """ Check if the extension has a default parameter is none are provided. The method is intended to be overridden in a subclass. :param args: The parsed command line arguments :returns: True if `discover()` should be called even if no parameters are provided, False otherwise :rtype: bool """ return False def add_arguments(self, *, parser, with_default): """ Add command line arguments specific to the package discovery. The method is intended to be overridden in a subclass. :param parser: The argument parser :param bool with_default: if True the extension should add a default value to at least one of the added arguments, otherwise not """ pass def has_parameters(self, *, args): """ Check if parameters have been passed for this extension. This method must be overridden in a subclass. :param args: The parsed command line arguments :returns: True if `discover()` should be called, False otherwise :rtype: bool """ raise NotImplementedError() def discover(self, *, args, identification_extensions): """ Discover packages using the passed identification extensions. The method is intended to be overridden in a subclass. If the `has_parameters` method never returns True this method is never invoked and therefore doesn't have to be implemented. :param args: The parsed command line arguments :param identification_extensions: The identification extensions :returns: set of :py:class:`colcon_core.package_descriptor.PackageDescriptor` :rtype: set """ raise NotImplementedError() def get_package_discovery_extensions(*, group_name=None): """ Get the available package discovery extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.PACKAGE_DISCOVERY_NAME = name return order_extensions_by_priority(extensions) def add_package_discovery_arguments(parser, *, extensions=None): """ Add the command line arguments for the package discovery extensions. :param parser: The argument parser :param extensions: The package discovery extensions to use, if `None` is passed use the extensions provided by :function:`get_package_discovery_extensions` """ if extensions is None: extensions = get_package_discovery_extensions() group = parser.add_argument_group(title='Discovery arguments') # find the first extension which has default values first_extension_with_default = None for name, extension in extensions.items(): try: has_default = extension.has_default() except Exception as e: # noqa: F841 # catch exceptions raised in discovery extension exc = traceback.format_exc() logger.error( 'Exception in package discovery extension ' f"'{extension.PACKAGE_DISCOVERY_NAME}': {e}\n{exc}") # skip failing extension, continue with next one else: if has_default: first_extension_with_default = name break # collect arguments from the extensions for name, extension in extensions.items(): with_default = name == first_extension_with_default try: retval = extension.add_arguments( parser=group, with_default=with_default) assert retval is None, 'add_arguments() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in discovery extension exc = traceback.format_exc() logger.error( 'Exception in package discovery extension ' f"'{extension.PACKAGE_DISCOVERY_NAME}': {e}\n{exc}") # skip failing extension, continue with next one def discover_packages( args, identification_extensions, *, discovery_extensions=None ): """ Discover and identify packages. All discovery extensions which report to have parameters are being used to discover packages. If none report to have parameters all discovery extensions are being used but only the one with a default value should discover packages. Each discovery extension uses the passed identification extensions to check each potential location for the existence of a package. :param args: The parsed command line arguments :param identification_extensions: The package identification extensions to pass to each invocation of :function:`PackageDiscoveryExtensionPoint.discover` :param discovery_extensions: The package discovery extensions to use, if `None` is passed use the extensions provided by :function:`get_package_discovery_extensions` :returns: set of :py:class:`colcon_core.package_descriptor.PackageDescriptor` :rtype: set """ if discovery_extensions is None: discovery_extensions = get_package_discovery_extensions() if not discovery_extensions: logger.warning('No package discovery extensions found') return set() # use only the discovery extensions which have parameters if any # otherwise use all discovery_extensions_with_parameters = _get_extensions_with_parameters( args, discovery_extensions) if discovery_extensions_with_parameters: discovery_extensions = discovery_extensions_with_parameters return _discover_packages( args, identification_extensions, discovery_extensions) def expand_dir_wildcards(paths): """ Expand wildcards explicitly to match directories. This function does not match files. Also, unlike shells, it does not keep patterns that yield no matches. This is only necessary on Windows or when the wildcards are not expanded by the shell. :param list paths: The paths to update in place """ i = 0 while i < len(paths): path = paths[i] if '*' not in path: i += 1 continue expanded_paths = [ p for p in sorted(glob(path)) if os.path.isdir(p)] logger.log( 5, "expand_dir_wildcards() expanding '%s' to %s", path, expanded_paths) paths[i:i + 1] = expanded_paths i += len(expanded_paths) def _get_extensions_with_parameters( args, discovery_extensions ): with_parameters = OrderedDict() for extension in discovery_extensions.values(): logger.log( 1, f'discover_packages({extension.PACKAGE_DISCOVERY_NAME}) check ' 'parameters') try: has_parameter = extension.has_parameters(args=args) except Exception as e: # noqa: F841 # catch exceptions raised in discovery extension exc = traceback.format_exc() logger.error( 'Exception in package discovery extension ' f"'{extension.PACKAGE_DISCOVERY_NAME}': {e}\n{exc}") # skip failing extension, continue with next one else: if has_parameter: with_parameters[extension.PACKAGE_DISCOVERY_NAME] = extension return with_parameters def _discover_packages( args, identification_extensions, discovery_extensions ): all_descs = set() # if none had explicit parameters use the first which has defaults for extension in discovery_extensions.values(): logger.log( 1, 'discover_packages(%s) discover', extension.PACKAGE_DISCOVERY_NAME) try: descs = extension.discover( args=args, identification_extensions=identification_extensions) assert isinstance(descs, set), 'discover() should return a set' except NotImplementedError: # skip extension not implementing discovery continue except Exception as e: # noqa: F841 # catch exceptions raised in discovery extension exc = traceback.format_exc() logger.error( 'Exception in package discovery extension ' f"'{extension.PACKAGE_DISCOVERY_NAME}': {e}\n{exc}") # skip failing extension, continue with next one continue else: logger.log( 1, 'discover_packages(%s) using defaults', extension.PACKAGE_DISCOVERY_NAME) all_descs |= descs return all_descs colcon-core-0.17.1/colcon_core/package_discovery/path.py000066400000000000000000000047671465053734400233110ustar00rootroot00000000000000# Copyright 2016-2020 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from colcon_core.argument_default import is_default_value from colcon_core.argument_default import wrap_default_value from colcon_core.argument_type import get_cwd_path_resolver from colcon_core.package_discovery import expand_dir_wildcards from colcon_core.package_discovery import logger from colcon_core.package_discovery import PackageDiscoveryExtensionPoint from colcon_core.package_identification import identify from colcon_core.package_identification import IgnoreLocationException from colcon_core.plugin_system import satisfies_version class PathPackageDiscovery(PackageDiscoveryExtensionPoint): """Check specific paths for packages.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( PackageDiscoveryExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def has_default(self): # noqa: D102 return True def add_arguments( # noqa: D102 self, *, parser, with_default, single_path=False ): parser.add_argument( '--paths', nargs='*' if not single_path else '?', metavar='PATH', default=wrap_default_value(['.']) if with_default else None, type=get_cwd_path_resolver(), help='The paths to check for a package. Use shell wildcards ' '(e.g. `src/*`) to select all direct subdirectories' + (' (default: .)' if with_default else '')) def has_parameters(self, *, args): # noqa: D102 return not is_default_value(args.paths) and \ bool(args.paths) def discover(self, *, args, identification_extensions): # noqa: D102 if args.paths is None: return set() # manually check for wildcards and expand them in case # the values were not provided through the shell expand_dir_wildcards(args.paths) logger.log(1, 'PathPackageDiscovery.discover(%s)', args.paths) visited_paths = set() descs = set() for path in args.paths: real_path = os.path.realpath(path) # avoid recrawling same paths if real_path in visited_paths: continue visited_paths.add(real_path) try: result = identify(identification_extensions, path) except IgnoreLocationException: continue if result: descs.add(result) return descs colcon-core-0.17.1/colcon_core/package_identification/000077500000000000000000000000001465053734400227475ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/package_identification/__init__.py000066400000000000000000000145021465053734400250620ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import copy import traceback from typing import Dict from typing import Union from colcon_core.logging import colcon_logger from colcon_core.package_descriptor import PackageDescriptor from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_grouped_by_priority logger = colcon_logger.getChild(__name__) class IgnoreLocationException(Exception): """ Exception to signal that a path should be skipped. This also excludes all recursive subdirectories from being considered. It should be raised in the :function:`PackageIdentificationExtensionPoint.identify` method of package identification extensions. """ pass class PackageIdentificationExtensionPoint: """ The interface for package identification extensions. A package identification extension populates a package descriptor if a given location contains a package. For each instance the attribute `PACKAGE_IDENTIFICATION_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the package identification extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of package identification extensions.""" PRIORITY = 100 def identify(self, desc: PackageDescriptor): """ Check if the given path contains a package. If yes, update the package descriptor with additional information. If the descriptor already has a package type only act if the type matches this extension. This method must be overridden in a subclass. :param desc: The package descriptor with the directory to check :type desc: :py:class:`colcon_core.package_descriptor.PackageDescriptor` :raises IgnoreLocationException: Skip the path as well as all recursive subdirectories """ raise NotImplementedError() def get_package_identification_extensions(*, group_name=None): """ Get the available package identification extensions. The extensions are grouped by their priority and each group is ordered by the entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.PACKAGE_IDENTIFICATION_NAME = name return order_extensions_grouped_by_priority(extensions) def identify( extensions: Dict[ # actually an OrderedDict int, # priority Dict[str, PackageIdentificationExtensionPoint], # an OrderedDict ], path: str, ) -> Union[None, bool, PackageDescriptor]: """ Identify the package in the given path. :param extensions: dict of extensions :param path: The path """ desc = PackageDescriptor(path) for extensions_same_prio in extensions.values(): result = _identify(extensions_same_prio, desc) # continue with next priority level if no information was contributed if result is None: continue # skip location since identification is ambiguous if result is False: raise IgnoreLocationException() assert isinstance(result, PackageDescriptor), result if result.identifies_package(): return result # use incrementally populated descriptor for next priority level desc = result if getattr(desc, 'type', None) or getattr(desc, 'name', None): logger.warning( f"package '{desc.path}' has type or name but is incomplete") return None # the following variable only exists to avoid repeatedly copying descriptors _reused_descriptor_instance = None def _identify(extensions_same_prio, desc): global _reused_descriptor_instance logger.log( 1, '_identify(%s) by extensions %s', desc.path, sorted(extensions_same_prio.keys())) # collect the optionally modified descriptors from all extensions results = {desc} for key, extension in extensions_same_prio.items(): # create copy of the descriptor if the reusable instance is different if ( _reused_descriptor_instance is None or not _are_descriptors_equal(desc, _reused_descriptor_instance) ): _reused_descriptor_instance = copy.deepcopy(desc) logger.log(1, "_identify(%s) by extension '%s'", desc.path, key) try: retval = extension.identify(_reused_descriptor_instance) assert retval is None, 'identify() should return None' except IgnoreLocationException: logger.log(1, '_identify(%s) ignored', desc.path) raise except Exception as e: # noqa: F841 # catch exceptions raised in identification extension exc = traceback.format_exc() logger.error( 'Exception in package identification extension ' f"'{extension.PACKAGE_IDENTIFICATION_NAME}' in '{desc.path}': " f'{e}\n{exc}') # skip failing extension, continue with next one continue # only add the descriptor if it is different from the existing result # if it is equal it can be attempted to reuse the instance instead if not _are_descriptors_equal(desc, _reused_descriptor_instance): results.add(_reused_descriptor_instance) # a new copy of the descriptor needs to be created next cycle _reused_descriptor_instance = None # multiple extensions populated the descriptor with different values if len(results) > 2: logger.warning( f'_identify({desc.path}) has multiple matches and therefore is ' 'being ignored: ' + ', '.join(sorted(d.type for d in results if d.type is not None))) return False # no extension populated the descriptor with any values if len(results) == 1: return None results.remove(desc) desc = results.pop() logger.debug( f"Package '{desc.path}' with type '{desc.type}' and name " f"'{desc.name}'") return desc def _are_descriptors_equal(desc1, desc2): for s in PackageDescriptor.__slots__: if getattr(desc1, s) != getattr(desc2, s): return False return True colcon-core-0.17.1/colcon_core/package_identification/ignore.py000066400000000000000000000016531465053734400246110ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os.path from colcon_core.package_identification import IgnoreLocationException from colcon_core.package_identification \ import PackageIdentificationExtensionPoint from colcon_core.plugin_system import satisfies_version IGNORE_MARKER = 'COLCON_IGNORE' class IgnorePackageIdentification(PackageIdentificationExtensionPoint): """Ignore paths containing a `COLCON_IGNORE` file.""" # the priority needs to be higher than all other extensions PRIORITY = 1000 def __init__(self): # noqa: D107 super().__init__() satisfies_version( PackageIdentificationExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def identify(self, desc): # noqa: D102 colcon_ignore = desc.path / IGNORE_MARKER if os.path.lexists(str(colcon_ignore)): raise IgnoreLocationException() colcon-core-0.17.1/colcon_core/package_identification/python.py000066400000000000000000000126411465053734400246460ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 import warnings from colcon_core.package_identification import logger from colcon_core.package_identification \ import PackageIdentificationExtensionPoint from colcon_core.plugin_system import satisfies_version class PythonPackageIdentification(PackageIdentificationExtensionPoint): """ Identify Python packages with `setup.cfg` files. Only packages which pass no arguments (or only a ``cmdclass``) to the ``setup()`` function in their ``setup.py`` file are being considered. """ def __init__(self): # noqa: D107 super().__init__() satisfies_version( PackageIdentificationExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def identify(self, desc): # noqa: D102 if desc.type is not None and desc.type != 'python': return setup_py = desc.path / 'setup.py' if not setup_py.is_file(): return setup_cfg = desc.path / 'setup.cfg' if not setup_cfg.is_file(): return if not is_reading_cfg_sufficient(setup_py): logger.debug( f"Python package in '{desc.path}' passes arguments to the " 'setup() function which requires a different identification ' f"extension than '{self.PACKAGE_IDENTIFICATION_NAME}'") return config = get_configuration(setup_cfg) name = config.get('metadata', {}).get('name') if not name: return desc.type = 'python' if desc.name is not None and desc.name != name: msg = 'Package name already set to different value' logger.error(msg) raise RuntimeError(msg) desc.name = name def is_reading_cfg_sufficient(setup_py): """ Check the content of the setup.py file. If the ``setup()`` function is called with no arguments or only a ``cmdclass`` it is sufficient to only read the content of the ``setup.cfg`` file. :param setup_py: The path of the setup.py file :returns: The flag if reading the setup.cfg file is sufficient :rtype: bool """ setup_py_content = setup_py.read_text() # the setup function must be called with no arguments # or only a ``cmdclass``to be considered by this extension otherwise # only reading the content of the setup.cfg file isn't sufficient return 'setup()' in setup_py_content or \ 'setup(cmdclass=cmdclass)' in setup_py_content def get_configuration(setup_cfg): """ Read the setup.cfg file. :param setup_cfg: The path of the setup.cfg file :returns: The configuration data :rtype: dict """ try: # import locally to allow other functions in this module to be usable try: from setuptools.config.setupcfg import read_configuration except ImportError: from setuptools.config import read_configuration except ImportError as e: try: from importlib.metadata import distribution except ImportError: from importlib_metadata import distribution from packaging.version import Version try: setuptools_version = distribution('setuptools').version except ModuleNotFoundError: setuptools_version = '0' minimum_version = '30.3.0' if Version(setuptools_version) < Version(minimum_version): e.msg += ', ' \ "'setuptools' needs to be at least version " \ f'{minimum_version}, if a newer version is not available ' \ "from the package manager use 'pip3 install -U setuptools' " \ 'to update to the latest version' raise return read_configuration(str(setup_cfg)) def extract_dependencies(options): """ Get the dependencies of the package. This function has been depreated, use ``colcon_core.package_augmentation.python.extract_dependencies()`` instead. :param options: The dictionary from the options section of the setup.cfg file :returns: The dependencies :rtype: dict(string, set(DependencyDescriptor)) """ warnings.warn( "'colcon_core.package_identification.python.extract_dependencies()' " 'has been deprecated, use ' "'colcon_core.package_augmentation.python.extract_dependencies()' " 'instead', stacklevel=2) from colcon_core.package_augmentation.python import \ extract_dependencies as function return function(options) def create_dependency_descriptor(requirement_string): """ Create a DependencyDescriptor from a PEP440 compliant string. See https://www.python.org/dev/peps/pep-0440/#version-specifiers This function has been depreated, use ``colcon_core.package_augmentation.python.create_dependency_descriptor()`` instead. :param str requirement_string: a PEP440 compliant requirement string :return: A descriptor with version constraints from the requirement string :rtype: DependencyDescriptor """ warnings.warn( "'colcon_core.package_identification.python." "create_dependency_descriptor()' has been deprecated, use " "'colcon_core.package_augmentation.python." "create_dependency_descriptor()' instead", stacklevel=2) from colcon_core.package_augmentation.python import \ create_dependency_descriptor as function return function(requirement_string) colcon-core-0.17.1/colcon_core/package_selection/000077500000000000000000000000001465053734400217435ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/package_selection/__init__.py000066400000000000000000000206611465053734400240610ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import defaultdict import traceback from colcon_core.logging import colcon_logger from colcon_core.package_augmentation import augment_packages from colcon_core.package_discovery import add_package_discovery_arguments from colcon_core.package_discovery import discover_packages from colcon_core.package_identification \ import get_package_identification_extensions from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority from colcon_core.topological_order import topological_order_packages logger = colcon_logger.getChild(__name__) class PackageSelectionExtensionPoint: """ The interface for package selection extensions. A package selection extension determines the subset of packages to be processed. For each instance the attribute `PACKAGE_SELECTION_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the package selection extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of package selection extensions.""" PRIORITY = 100 def add_arguments(self, *, parser): """ Add command line arguments specific to the package selection. The method is intended to be overridden in a subclass. :param parser: The argument parser """ pass def check_parameters(self, *, args, pkg_names): """ Check is the passed arguments have valid values. The method is intended to be overridden in a subclass. It should either warn about invalid values and gracefully continue or raise a `SystemExit` exception. :param args: The parsed command line arguments :param pkg_names: The set of package names """ pass def select_packages(self, *, args, decorators): """ Identify the packages which should be skipped. By default all package decorators are marked as "selected". This method must be overridden in a subclass. :param args: The parsed command line arguments :param list decorators: The package decorators in topological order """ raise NotImplementedError() def add_arguments(parser): """ Add the command line arguments for the package selection extensions. The function will call :function:`add_package_discovery_arguments` to add the package discovery arguments. :param parser: The argument parser """ add_package_discovery_arguments(parser) _add_package_selection_arguments(parser) def get_package_selection_extensions(*, group_name=None): """ Get the available package selection extensions. The extensions are ordered by their entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.PACKAGE_SELECTION_NAME = name return order_extensions_by_priority(extensions) def _add_package_selection_arguments(parser): """ Add the command line arguments for the package selection extensions. :param parser: The argument parser """ package_selection_extensions = get_package_selection_extensions() group = parser.add_argument_group(title='Package selection arguments') for extension in package_selection_extensions.values(): try: retval = extension.add_arguments(parser=group) assert retval is None, 'add_arguments() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in package selection extension exc = traceback.format_exc() logger.error( 'Exception in package selection extension ' f"'{extension.PACKAGE_SELECTION_NAME}': {e}\n{exc}") # skip failing extension, continue with next one def get_packages( args, *, additional_argument_names=None, direct_categories=None, recursive_categories=None ): """ Get the selected package decorators in topological order. The overview of the process: * Get the package descriptors * Order them topologically * Select the packages based on the command line arguments :param additional_argument_names: A list of additional arguments to consider :param Iterable[str] direct_categories: The names of the direct categories :param Iterable[str]|Mapping[str, Iterable[str]] recursive_categories: The names of the recursive categories, optionally mapped from the immediate upstream category which included the dependency :rtype: list :raises RuntimeError: if the returned set of packages contains duplicates package names """ descriptors = get_package_descriptors( args, additional_argument_names=additional_argument_names) decorators = topological_order_packages( descriptors, direct_categories=direct_categories, recursive_categories=recursive_categories) select_package_decorators(args, decorators) # check for duplicate package names pkgs = [m.descriptor for m in decorators if m.selected] if len({d.name for d in pkgs}) < len(pkgs): pkg_paths = defaultdict(list) for d in pkgs: pkg_paths[d.name].append(f' - {d.path}') raise RuntimeError( 'Duplicate package names not supported:\n' + '\n'.join( ('- ' + name + ':\n' + '\n'.join(sorted(pkg_paths[name]))) for name in sorted(pkg_paths.keys()) if len(pkg_paths[name]) > 1)) return decorators def get_package_descriptors(args, *, additional_argument_names=None): """ Get the package descriptors. The overview of the process: * Discover the package descriptors using the package discovery and identification extensions * Check is the passed package selection arguments have valid values * Augment the package descriptors :param additional_argument_names: A list of additional arguments to consider :returns: set of :py:class:`colcon_core.package_descriptor.PackageDescriptor` :rtype: set """ extensions = get_package_identification_extensions() descriptors = discover_packages(args, extensions) pkg_names = {d.name for d in descriptors} _check_package_selection_parameters(args, pkg_names) augment_packages( descriptors, additional_argument_names=additional_argument_names) return descriptors def _check_package_selection_parameters(args, pkg_names): package_selection_extensions = get_package_selection_extensions() for extension in package_selection_extensions.values(): try: retval = extension.check_parameters(args=args, pkg_names=pkg_names) assert retval is None, 'check_parameters() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in package selection extension exc = traceback.format_exc() logger.error( 'Exception in package selection extension ' f"'{extension.PACKAGE_SELECTION_NAME}': {e}\n{exc}") # skip failing extension, continue with next one def select_package_decorators(args, decorators): """ Select the package decorators based on the command line arguments. The `selected` attribute of each decorator is updated by this function. :param args: The parsed command line arguments :param list decorators: The package decorators in topological order """ # filtering must happen after the topological ordering since otherwise # packages in the middle of the dependency graph might be missing package_selection_extensions = get_package_selection_extensions() for extension in package_selection_extensions.values(): try: retval = extension.select_packages( args=args, decorators=decorators) assert retval is None, 'select_packages() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in package selection extension exc = traceback.format_exc() logger.error( 'Exception in package selection extension ' f"'{extension.PACKAGE_SELECTION_NAME}': {e}\n{exc}") # skip failing extension, continue with next one colcon-core-0.17.1/colcon_core/plugin_system.py000066400000000000000000000145671465053734400215740ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict import traceback from colcon_core.extension_point import load_extension_points from colcon_core.logging import colcon_logger from packaging.version import Version logger = colcon_logger.getChild(__name__) class SkipExtensionException(Exception): """ Exception to signal that an extension should be skipped. It should be raised in the constructor of an extension. """ pass def instantiate_extensions( group_name, *, exclude_names=None, unique_instance=False ): """ Instantiate all extensions within a group. :param str group_name: the name of the `entry_point` group :param exclude_names: a list of entry point names to exclude :param bool unique_instance: The flag if each extensions should be instantiated even when it has been created and cached before :returns: dict of extensions """ extension_types = load_extension_points( group_name, excludes=exclude_names) extension_instances = {} for extension_name, extension_class in extension_types.items(): extension_instance = _instantiate_extension( group_name, extension_name, extension_class, unique_instance=unique_instance) if extension_instance is None: continue extension_instances[extension_name] = extension_instance return extension_instances _extension_instances = {} def _instantiate_extension( group_name, extension_name, extension_class, *, unique_instance=False ): global _extension_instances if not unique_instance and extension_class in _extension_instances: return _extension_instances[extension_class] try: extension_instance = extension_class() assert isinstance(extension_instance, object), \ 'invocation should return an object' except SkipExtensionException as e: # noqa: F841 logger.info( f"Skipping extension '{group_name}.{extension_name}': {e}") extension_instance = None except Exception as e: # noqa: F841 # catch exceptions raised in extension constructor exc = traceback.format_exc() logger.error( 'Exception instantiating extension ' f"'{group_name}.{extension_name}': {e}\n{exc}") extension_instance = None if not unique_instance: _extension_instances[extension_class] = extension_instance return extension_instance def order_extensions_by_name(extensions): """ Order the extensions based on the entry point name. :param extensions: a dict mapping the entry point name to the extension instance :returns: ordered dict mapping the entry point name to the extension instance """ return OrderedDict( sorted(extensions.items(), key=lambda pair: pair[0])) def order_extensions_by_priority(extensions): """ Order the extensions based on their `PRIORITY` attribute. :param extensions: a dict mapping the entry point name to the extension instance :returns: ordered dict mapping the entry point name to the extension instance """ # use negative priority and inverted reverse flag # to get highest priorities first and # extensions withing the same priority group in alphabetical order return OrderedDict( sorted( extensions.items(), key=lambda pair: (-pair[1].PRIORITY, pair[0]))) def order_extensions_grouped_by_priority(extensions): """ Group the extensions based on their `PRIORITY` attribute. Each priority group is ordered by the entry point name. :param extensions: a dict mapping the entry point name to the extension instance :returns: ordered dict mapping the priority to an ordered dict which maps the entry point name to the extension instance """ # use negative priority and inverted reverse flag # to get highest priorities first and # extensions withing the same priority group in alphabetical order ordered_extensions = OrderedDict( sorted( extensions.items(), key=lambda pair: (-pair[1].PRIORITY, pair[0]))) grouped_extensions = OrderedDict() for name, extension in ordered_extensions.items(): priority = extension.PRIORITY if priority not in grouped_extensions: grouped_extensions[priority] = OrderedDict() grouped_extensions[priority][name] = extension return grouped_extensions def get_first_line_doc(any_type): """ Get the first line of the docstring of an object. Leading spaces are ignored and a trailing dot is being removed. :param any_type: the object :returns: the first line of the `__doc__` attribute of the object, otherwise an empty string :rtype: str """ if not any_type.__doc__: return '' lines = list(filter( lambda line: line.lstrip(), any_type.__doc__.splitlines())) if not lines: return '' line = lines[0].lstrip() if line.endswith('.'): line = line[:-1] return line def satisfies_version(version, caret_range): """ Check if a version is within a caret range. :param str version: the version number to check (e.g. `1.2.3`) :param str caret_range: the caret range (e.g. `^1.2`) :raises RuntimeError: if the version doesn't match the caret range """ assert caret_range.startswith('^'), 'Only supports caret ranges' extension_point_version = Version(version) extension_version = Version(caret_range[1:]) next_extension_version = _get_upper_bound_caret_version( extension_version) if extension_point_version < extension_version: raise RuntimeError( 'Extension point is too old (%s), the extension requires ' "'%s'" % (extension_point_version, extension_version)) if extension_point_version >= next_extension_version: raise RuntimeError( 'Extension point is newer (%s), than what the extension ' "supports '%s'" % (extension_point_version, extension_version)) def _get_upper_bound_caret_version(version): parts = version.base_version.split('.') if len(parts) < 2: parts += [0] * (2 - len(parts)) major, minor = [int(p) for p in parts[:2]] if major > 0: major += 1 minor = 0 else: minor += 1 return Version('%d.%d.0' % (major, minor)) colcon-core-0.17.1/colcon_core/prefix_path/000077500000000000000000000000001465053734400206145ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/prefix_path/__init__.py000066400000000000000000000060011465053734400227220ustar00rootroot00000000000000# Copyright 2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 import traceback from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_grouped_by_priority logger = colcon_logger.getChild(__name__) class PrefixPathExtensionPoint: """ The interface for prefix path extensions. A prefix path extension populates a list of chained prefix paths. For each instance the attribute `PREFIX_PATH_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the prefix path extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of prefix path extensions.""" PRIORITY = 100 def extend_prefix_path(self, paths): """ Extend the given list of prefix paths. The items are ordered from higher to lower priority paths. This method must be overridden in a subclass. :param paths: The list of prefix paths to be extended :type paths: list """ raise NotImplementedError() def get_prefix_path_extensions(*, group_name=None): """ Get the available prefix path extensions. The extensions are grouped by their priority and each group is ordered by the entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.PREFIX_PATH_NAME = name return order_extensions_grouped_by_priority(extensions) def get_chained_prefix_path(*, skip=None): """ Get the chained prefix paths. The items are ordered from higher to lower priority paths. Repeated paths are skipped. :param skip: The current prefix path to be skipped and not be included in the return value :returns: The list of prefix paths :rtype: list """ chained_prefix_path = [] extensions = get_prefix_path_extensions() for priority in extensions.keys(): extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): try: retval = extension.extend_prefix_path(chained_prefix_path) assert retval is None, \ 'extend_prefix_path() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in prefix path extension exc = traceback.format_exc() logger.error( 'Exception in prefix path extension ' f"'{extension.PREFIX_PATH_NAME}': {e}\n{exc}") # skip failing extension, continue with next one unique_prefix_path = [] for p in chained_prefix_path: if skip is not None and str(p) == str(skip): continue if p in unique_prefix_path: continue unique_prefix_path.append(p) return unique_prefix_path colcon-core-0.17.1/colcon_core/prefix_path/colcon.py000066400000000000000000000024321465053734400224440ustar00rootroot00000000000000# Copyright 2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.prefix_path import PrefixPathExtensionPoint logger = colcon_logger.getChild(__name__) _get_colcon_prefix_path_warnings = set() class ColconPrefixPath(PrefixPathExtensionPoint): """Prefix path defined in the `COLCON_PREFIX_PATH` environment variable.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( PrefixPathExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def extend_prefix_path(self, paths): # noqa: D102 global _get_colcon_prefix_path_warnings colcon_prefix_path = os.environ.get('COLCON_PREFIX_PATH', '') for path in colcon_prefix_path.split(os.pathsep): if not path: continue if not os.path.exists(path): if path not in _get_colcon_prefix_path_warnings: logger.warning( f"The path '{path}' in the environment variable " "COLCON_PREFIX_PATH doesn't exist") _get_colcon_prefix_path_warnings.add(path) continue paths.append(path) colcon-core-0.17.1/colcon_core/pytest/000077500000000000000000000000001465053734400176335ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/pytest/__init__.py000066400000000000000000000000001465053734400217320ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/pytest/hooks.py000066400000000000000000000014051465053734400213300ustar00rootroot00000000000000# Copyright 2020 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os import sys import types import pytest @pytest.hookimpl(hookwrapper=True, tryfirst=True) def pytest_terminal_summary(terminalreporter, exitstatus=None, config=None): """Redirect the summary warnings to stderr when run within colcon.""" summary_warnings = terminalreporter.summary_warnings def redirect_to_stderr(self): tw = self._tw import _pytest.config self._tw = _pytest.config.create_terminal_writer( self.config, sys.stderr) summary_warnings() self._tw = tw if 'COLCON' in os.environ: terminalreporter.summary_warnings = types.MethodType( redirect_to_stderr, terminalreporter) yield colcon-core-0.17.1/colcon_core/python_install_path.py000066400000000000000000000023201465053734400227350ustar00rootroot00000000000000# Copyright 2022 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from pathlib import Path import sysconfig def get_python_install_path(name, vars_=()): """ Get Python install paths matching Colcon's preferred scheme. See sysconfig.get_path for more info about the arguments. :param name: Name of the path type :param vars_: A dictionary of variables updating the values of sysconfig.get_config_vars() :rtype: Pathlib.Path """ kwargs = {} kwargs['vars'] = dict(vars_) # Avoid deb_system because it means using --install-layout deb # which ignores --prefix and hardcodes it to /usr if 'deb_system' in sysconfig.get_scheme_names() or \ 'osx_framework_library' in sysconfig.get_scheme_names(): kwargs['scheme'] = 'posix_prefix' # The presence of the rpm_prefix scheme indicates that posix_prefix # has been patched to inject `local` into the installation locations. # The rpm_prefix scheme is a backup of what posix_prefix was before it was # patched. elif 'rpm_prefix' in sysconfig.get_scheme_names(): kwargs['scheme'] = 'rpm_prefix' return Path(sysconfig.get_path(name, **kwargs)) colcon-core-0.17.1/colcon_core/shell/000077500000000000000000000000001465053734400174125ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/shell/__init__.py000066400000000000000000000600761465053734400215340ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from asyncio import CancelledError from collections import OrderedDict import locale import os from pathlib import Path import re import sys import traceback import warnings from colcon_core.environment_variable import EnvironmentVariable from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_grouped_by_priority from colcon_core.plugin_system import SkipExtensionException from colcon_core.prefix_path import get_chained_prefix_path from colcon_core.subprocess import check_output logger = colcon_logger.getChild(__name__) """Environment variable to enable all shell extensions.""" ALL_SHELLS_ENVIRONMENT_VARIABLE = EnvironmentVariable( 'COLCON_ALL_SHELLS', 'Flag to enable all shell extensions') use_all_shell_extensions = os.environ.get( ALL_SHELLS_ENVIRONMENT_VARIABLE.name, False) class ShellExtensionPoint: """ The interface for shell extensions. An shell extension generates the scripts for a specific shell to setup the environment. For each instance the attribute `SHELL_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the shell extension interface.""" EXTENSION_POINT_VERSION = '2.2' """ The default priority of shell extensions. A shell extension must use a higher priority than the default if and only if it is a "primary" shell. A "primary" shell does not depend on another shell to setup the environment, e.g. `sh`. An example for a "non-primary" shell would be `bash` which relies on the `sh` shell extension to setup environment variables and only contributes additional information like completion. All "non-primary" shell extensions must use a priority equal to or lower than the default. """ PRIORITY = 100 """ The format string for a comment line. It must have the placeholder {comment}. This attribute must be defined in a subclass. """ FORMAT_STR_COMMENT_LINE = None """ The format string to set an environment variable. It must have the placeholder {name} and {value}. This attribute must be defined in a subclass. """ FORMAT_STR_SET_ENV_VAR = None """ The format string to use an environment variable. It must have the placeholder {name}. This attribute must be defined in a subclass. """ FORMAT_STR_USE_ENV_VAR = None """ The format string to invoke a script file. It must have the placeholder {prefix} and {script_path}. This attribute must be defined in a subclass. """ FORMAT_STR_INVOKE_SCRIPT = None """ The format string to remove a leading separator. When appending to an environment variable, a leading separator will be left behind if the variable was not set previously. This command is used to cleanup the leading separarator. It must have the placeholder {name} for the environment variable name. This attribute is optionally defined in subclasses. """ FORMAT_STR_REMOVE_LEADING_SEPARATOR = None """ The format string to remove a trailing separator. When prepending to an environment variable, a trailing separator will be left behind if the variable was not set previously. This command is used to cleanup the trailing separarator. It must have the placeholder {name} for the environment variable name. This attribute is optionally defined in subclasses. """ FORMAT_STR_REMOVE_TRAILING_SEPARATOR = None def get_file_extensions(self): """ Get the file extensions provided by this extension. By default the extension name will be returned. The method is intended to be overridden in a subclass. :returns: the file extensions :rtype: tuple """ return (self.SHELL_NAME, ) def create_prefix_script(self, prefix_path, merge_install): """ Create a script in the install prefix path. The script should call each package specific script in order. This method must be overridden in a subclass. :param Path prefix_path: The path of the install prefix :param bool merge_install: The flag if all packages share the same install prefix :returns: The relative paths to the created scripts :rtype: list """ raise NotImplementedError() def _get_prefix_util_path(self): """ Get the absolute path of the `prefix_util.py` module. :returns: The path of the module file :rtype: Path """ warnings.warn( 'colcon_core.shell._get_prefix_util_path() will be removed in the ' 'future', DeprecationWarning, stacklevel=2) return Path(__file__).parent / 'template' / 'prefix_util.py' def _get_prefix_util_template_path(self): """ Get the absolute path of the `prefix_util.py.em` module template. :returns: The path of the module file :rtype: Path """ return Path(__file__).parent / 'template' / 'prefix_util.py.em' def create_package_script(self, prefix_path, pkg_name, hooks): """ Create a script for a specific package. The script should call each hook script in order. This method must be overridden in a subclass. :param Path prefix_path: The package specific install prefix :param str pkg_name: The package name :param list hooks: The relative paths to the hook scripts :returns: The relative paths to the created scripts :rtype: list """ raise NotImplementedError() def create_hook_set_value( self, env_hook_name, prefix_path, pkg_name, name, value, ): """ Create a hook script to set an environment variable value. This method must be overridden in a subclass. :param str env_hook_name: The name of the hook script :param Path prefix_path: The path of the install prefix :param str pkg_name: The package name :param str name: The name of the environment variable :param str value: The value to be set. If an empty string is passed the environment variable should be set to the prefix path at the time the hook is sourced (from COLCON_CURRENT_PREFIX). Note that the install-space may have been relocated, and the final value may differ from the value of argument prefix_path, where the hook was originally installed to. :returns: The relative path to the created hook script :rtype: Path """ raise NotImplementedError() def create_hook_append_value( self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): """ Create a hook script to append a value to an environment variable. This method must be overridden in a subclass. :param str env_hook_name: The name of the hook script :param Path prefix_path: The path of the install prefix :param str pkg_name: The package name :param str name: The name of the environment variable :param str subdirectory: The subdirectory of the prefix path :returns: The relative path to the created hook script :rtype: Path """ raise NotImplementedError() def create_hook_prepend_value( self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): """ Create a hook script to prepend a value to an environment variable. This method must be overridden in a subclass. :param str env_hook_name: The name of the hook script :param Path prefix_path: The path of the install prefix :param str pkg_name: The package name :param str name: The name of the environment variable :param str subdirectory: The subdirectory of the prefix path :returns: The relative path to the created hook script :rtype: Path """ raise NotImplementedError() def create_hook_include_file( self, env_hook_name, prefix_path, pkg_name, relative_path, ): """ Create a hook script to include another script. This method must be overridden in a subclass. :param str env_hook_name: The name of the hook script :param Path prefix_path: The path of the install prefix :param str pkg_name: The package name :param str relative_path: The path of the included scripts :returns: The relative path to the created hook script :rtype: Path """ raise NotImplementedError() async def generate_command_environment( self, task_name, build_base, dependencies, ): """ Get the environment variables to invoke commands. The method must be overridden in a subclass if and only if the shell extension represents a "primary" shell (as defined in :attribute:`ShellExtensionPoint.PRIORITY`). :param str task_name: The name of the task :param Path build_base: The base path of the build directory :param set dependencies: The ordered dictionary mapping dependency names to their paths :returns: The environment :rtype: dict :raises SkipExtensionException: if the shell is not usable on the current platform """ raise NotImplementedError() def get_shell_extensions(*, group_name=None): """ Get the available shell extensions. The extensions are grouped by their priority and each group is ordered by the entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.SHELL_NAME = name return order_extensions_grouped_by_priority(extensions) async def get_command_environment(task_name, build_base, dependencies): """ Get the environment variables to invoke commands. :param str task_name: The task name identifying a group of task extensions :param str build_base: The path of the build base :param dependencies: The ordered dictionary mapping dependency names to their paths """ extensions = get_shell_extensions() for priority in extensions.keys(): extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): try: # use the environment of the first successful shell extension return await extension.generate_command_environment( task_name, Path(build_base), dependencies) except NotImplementedError: # skip extension, continue with next one logger.debug( f"Skip shell extension '{extension.SHELL_NAME}' for " 'command environment') except SkipExtensionException as e: # noqa: F841 # skip extension, continue with next one logger.info( f"Skip shell extension '{extension.SHELL_NAME}' for " f'command environment: {e}') except (CancelledError, RuntimeError): # re-raise same exception to handle it in the executor # without a traceback raise except Exception as e: # noqa: F841 # catch exceptions raised in shell extension exc = traceback.format_exc() logger.error( 'Exception in shell extension ' f"'{extension.SHELL_NAME}': {e}\n{exc}") # skip failing extension, continue with next one raise RuntimeError( 'Could not find a shell extension for the command environment') async def get_environment_variables(cmd, *, cwd=None, shell=True): """ Get the environment variables from the output of the command. :param args: the sequence of program arguments :param cwd: the working directory for the subprocess :param shell: whether to use the shell as the program to execute :rtype: dict """ output = await check_output(cmd, cwd=cwd, shell=shell) env = OrderedDict() for line in output.splitlines(): line = line.rstrip() if not line: continue encoding = locale.getpreferredencoding() try: line = line.decode(encoding) except UnicodeDecodeError: line_replaced = line.decode(encoding=encoding, errors='replace') logger.warning( 'Failed to decode line from the environment using the ' f"encoding '{encoding}': {line_replaced}") continue parts = line.split('=', 1) if sys.platform != 'win32': regex = '^[a-zA-Z_][a-zA-Z0-9_]*$' else: regex = '^[a-zA-Z0-9%_' + ''.join( '\\' + c for c in r'(){}[]$*+-\/"#\',;.@!?' ) + ']+$' if len(parts) == 2 and re.match(regex, parts[0]): # add new environment variable env[parts[0]] = parts[1] else: # assume a line without an equal sign or with a "key" which is not # a valid name is a continuation of the previous line if env: env[list(env.keys())[-1]] += '\n' + line assert len(env) > 0, "The environment shouldn't be empty" return env def create_environment_hook( env_hook_name, prefix_path, pkg_name, name, subdirectory, *, mode='prepend' ): """ Create a hook script for each primary shell. :param str env_hook_name: The name of the hook script :param Path prefix_path: The path of the install prefix :param str pkg_name: The package name :param str name: The name of the environment variable :param str subdirectory: The value to be appended :param str mode: The mode how the new value should be combined with an existing value, currently only the values `append` and `prepend` are supported :returns: The relative paths to the created hook scripts :rtype: list """ logger.log( 1, "create_environment_hook('%s', '%s')" % (pkg_name, env_hook_name)) hooks = [] extensions = get_shell_extensions() for priority in extensions.keys(): # only consider primary shell extensions if priority <= ShellExtensionPoint.PRIORITY: break extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): if mode == 'append': try: hook = extension.create_hook_append_value( env_hook_name, prefix_path, pkg_name, name, subdirectory) assert isinstance(hook, Path), \ 'create_hook_append_value() should return a Path ' \ 'object' except Exception as e: # noqa: F841 # catch exceptions raised in shell extension exc = traceback.format_exc() logger.error( 'Exception in shell extension ' f"'{extension.SHELL_NAME}': {e}\n{exc}") # skip failing extension, continue with next one continue hooks.append(hook) elif mode == 'prepend': try: hook = extension.create_hook_prepend_value( env_hook_name, prefix_path, pkg_name, name, subdirectory) assert isinstance(hook, Path), \ 'create_hook_prepend_value() should return a Path ' \ 'object' except Exception as e: # noqa: F841 # catch exceptions raised in shell extension exc = traceback.format_exc() logger.error( 'Exception in shell extension ' f"'{extension.SHELL_NAME}': {e}\n{exc}") # skip failing extension, continue with next one continue hooks.append(hook) else: raise NotImplementedError() if not hooks: raise RuntimeError( 'Could not find a primary shell extension for creating an ' 'environment hook') return hooks _get_colcon_prefix_path_warnings = set() def get_colcon_prefix_path(*, skip=None): """ Get the paths from the COLCON_PREFIX_PATH environment variable. For not existing paths a warning is being printed and the path is being skipped. Even for repeated invocation a warning is only being printed once for each non existing path. :param skip: The current prefix path to be skipped and not be included in the return value :returns: The list of prefix paths :rtype: list """ warnings.warn( "'colcon_core.shell.get_colcon_prefix_path()' has been deprecated, " "use 'colcon_core.prefix_path.get_chained_prefix_path()' instead", stacklevel=2) global _get_colcon_prefix_path_warnings prefix_path = [] colcon_prefix_path = os.environ.get('COLCON_PREFIX_PATH', '') for path in colcon_prefix_path.split(os.pathsep): if not path: continue if skip is not None and path == str(skip): continue if not os.path.exists(path): if path not in _get_colcon_prefix_path_warnings: logger.warning( f"The path '{path}' in the environment variable " "COLCON_PREFIX_PATH doesn't exist") _get_colcon_prefix_path_warnings.add(path) continue prefix_path.append(path) return prefix_path def check_dependency_availability(dependencies, *, script_filename): """ Check if all dependencies are available. First the install base of the workspace is being checked. Second all prefix paths set in the environment are considered. In the second case a warning is logged to notify the user. :param dependencies: The ordered dictionary mapping dependency names to their paths. Packages which have been found in the environment are being removed from the dictionary. :param str script_filename: The filename of the package specific script to check for :raises RuntimeError: if any package isn't found in either of the locations """ missing = OrderedDict() # check if the dependency exists in the install base of this workspace for pkg_name, pkg_install_base in dependencies.items(): pkg_script = Path( pkg_install_base) / 'share' / pkg_name / script_filename if not pkg_script.exists(): missing[pkg_name] = pkg_script # check if the dependency exists in any other prefix path packages_in_env = find_installed_packages_in_environment() env_packages = OrderedDict() for pkg_name, pkg_install_base in list(missing.items()): if pkg_name in packages_in_env: env_packages[pkg_name] = packages_in_env[pkg_name] # no need to source any script for this package del dependencies[pkg_name] del missing[pkg_name] # warn about using packages from the environment if env_packages: logger.warning( "The following packages are in the workspace but haven't been " 'built:' + ''.join('\n- %s' % name for name in env_packages.keys()) + '\nThey are being used from the following locations instead:' + ''.join('\n- %s' % path for path in env_packages.values()) + '\nTo suppress this warning ignore these packages in the ' + 'workspace:\n--packages-ignore ' + ' '.join(env_packages.keys())) # raise error in case any dependencies are not matched if missing: raise RuntimeError( 'Failed to find the following files:' + ''.join('\n- %s' % path for path in missing.values()) + '\nCheck that the following packages have been built:' + ''.join('\n- %s' % name for name in missing.keys())) def find_installed_packages_in_environment(): """ Find packages under the COLCON_PREFIX_PATH. For each prefix path the package index is being read and the first time a package is being found its install prefix is being added to the result. :returns: The mapping from a package name to the prefix path :rtype: OrderedDict """ packages = OrderedDict() for prefix_path in get_chained_prefix_path(): prefix_path = Path(prefix_path) pkgs = find_installed_packages(prefix_path) if pkgs is None: logger.debug(f"Ignoring prefix path '{prefix_path}'") continue for pkg_name in sorted(pkgs.keys()): # ignore packages with the same name in "lower" prefix path if pkg_name in packages: continue packages[pkg_name] = pkgs[pkg_name] return packages class FindInstalledPackagesExtensionPoint: """ The interface for extensions to find installed packages. This type of extension locates installed packages inside a prefix path. """ """The version of this extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of an extension.""" PRIORITY = 100 def find_installed_packages(self, install_base: Path): """ Find installed packages in an install path. This method must be overridden in a subclass. :param Path prefix_path: The path of the install prefix :returns: The mapping from a package name to the prefix path, or None if the path is not an install layout supported by this extension. :rtype: Dict or None """ raise NotImplementedError() def get_find_installed_packages_extensions(*, group_name=None): """ Get the available package identification extensions. The extensions are grouped by their priority and each group is ordered by the entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions( group_name + '.find_installed_packages') for name, extension in extensions.items(): extension.PACKAGE_IDENTIFICATION_NAME = name return order_extensions_grouped_by_priority(extensions) def find_installed_packages(install_base: Path): """ Find install packages under the install base path. Based on the install layout the packages may be discovered in different locations. :param Path install_base: The base path to find installed packages in :returns: The mapping from a package name to the prefix path, None if the path is not a supported install layout or it doesn't exist :rtype: Dict or None """ # priority means getting invoked first, but maybe that doesn't matter extensions = [] prioritized_extensions = get_find_installed_packages_extensions() for ext_list in prioritized_extensions.values(): extensions.extend(ext_list.values()) # Combine packages found by all extensions packages = {} valid_prefix = False for ext in extensions: ext_packages = ext.find_installed_packages(install_base) if ext_packages is None: continue valid_prefix = True for pkg, path in ext_packages.items(): if not path.exists(): logger.warning( f"Ignoring '{pkg}' found at '{path}' because the path" ' does not exist.') continue if pkg in packages and not path.samefile(packages[pkg]): # Same package found at different paths in the same prefix first_path = packages[pkg] logger.warning( f"The package '{pkg}' previously found at " f"'{first_path}' was found again at '{path}'." f" Ignoring '{path}'") else: packages[pkg] = path if not valid_prefix: # No extension said this was a valid prefix return None return packages colcon-core-0.17.1/colcon_core/shell/bat.py000066400000000000000000000142331465053734400205350ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path import sys from colcon_core import shell from colcon_core.plugin_system import satisfies_version from colcon_core.plugin_system import SkipExtensionException from colcon_core.prefix_path import get_chained_prefix_path from colcon_core.shell import check_dependency_availability from colcon_core.shell import get_environment_variables from colcon_core.shell import logger from colcon_core.shell import ShellExtensionPoint from colcon_core.shell.template import expand_template class BatShell(ShellExtensionPoint): """Generate `.bat` scripts to extend the environment.""" # the priority needs to be higher than the default for primary shells PRIORITY = 200 FORMAT_STR_COMMENT_LINE = ':: {comment}' FORMAT_STR_SET_ENV_VAR = 'set "{name}={value}"' FORMAT_STR_USE_ENV_VAR = '%{name}%' FORMAT_STR_INVOKE_SCRIPT = 'call:_colcon_prefix_bat_call_script ' \ '"{script_path}"' # can't use `if` here since each line is being `call`-ed FORMAT_STR_REMOVE_LEADING_SEPARATOR = \ 'call:_colcon_prefix_bat_strip_leading_semicolon "{name}"' FORMAT_STR_REMOVE_TRAILING_SEPARATOR = \ 'call:_colcon_prefix_bat_strip_trailing_semicolon "{name}"' def __init__(self): # noqa: D107 super().__init__() satisfies_version(ShellExtensionPoint.EXTENSION_POINT_VERSION, '^2.2') if sys.platform != 'win32' and not shell.use_all_shell_extensions: raise SkipExtensionException('Not used on non-Windows systems') def create_prefix_script(self, prefix_path, merge_install): # noqa: D102 prefix_env_path = prefix_path / 'local_setup.bat' logger.info("Creating prefix script '%s'" % prefix_env_path) expand_template( Path(__file__).parent / 'template' / 'prefix.bat.em', prefix_env_path, { 'python_executable': sys.executable, 'merge_install': merge_install, 'package_script_no_ext': 'package', }) prefix_util_path = prefix_path / '_local_setup_util_bat.py' logger.info("Creating prefix util module '%s'" % prefix_util_path) expand_template( self._get_prefix_util_template_path(), prefix_util_path, {'shell_extension': self}) prefix_chain_env_path = prefix_path / 'setup.bat' logger.info( "Creating prefix chain script '%s'" % prefix_chain_env_path) expand_template( Path(__file__).parent / 'template' / 'prefix_chain.bat.em', prefix_chain_env_path, { 'chained_prefix_path': get_chained_prefix_path( skip=prefix_path), 'prefix_script_no_ext': 'local_setup', }) return [ prefix_env_path, prefix_util_path, prefix_chain_env_path, ] def create_package_script( # noqa: D102 self, prefix_path, pkg_name, hooks ): pkg_env_path = prefix_path / 'share' / pkg_name / 'package.bat' logger.info("Creating package script '%s'" % pkg_env_path) expand_template( Path(__file__).parent / 'template' / 'package.bat.em', pkg_env_path, { 'hooks': list(filter( lambda hook: str(hook[0]).endswith('.bat'), hooks)), }) return [pkg_env_path] def create_hook_set_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, value, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.bat' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) if value == '': value = '%COLCON_CURRENT_PREFIX%' expand_template( Path(__file__).parent / 'template' / 'hook_set_value.bat.em', hook_path, {'name': name, 'value': value}) return hook_path def create_hook_append_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.bat' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_append_value.bat.em', hook_path, { 'name': name, 'subdirectory': subdirectory, }) return hook_path def create_hook_prepend_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.bat' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_prepend_value.bat.em', hook_path, { 'name': name, 'subdirectory': subdirectory, }) return hook_path async def generate_command_environment( # noqa: D102 self, task_name, build_base, dependencies, ): if sys.platform != 'win32': raise SkipExtensionException('Not usable on non-Windows systems') # check if all dependencies are available # removes dependencies available in the environment from the parameter check_dependency_availability( dependencies, script_filename='package.bat') hook_path = build_base / ('colcon_command_prefix_%s.bat' % task_name) expand_template( Path(__file__).parent / 'template' / 'command_prefix.bat.em', hook_path, {'dependencies': dependencies}) cmd = [str(hook_path), '&&', 'set'] env = await get_environment_variables(cmd, cwd=str(build_base)) # write environment variables to file for debugging env_path = build_base / ( 'colcon_command_prefix_%s.bat.env' % task_name) with env_path.open('w') as h: for key in sorted(env.keys()): value = env[key] h.write(f'{key}={value}\n') return env colcon-core-0.17.1/colcon_core/shell/dsv.py000066400000000000000000000055761465053734400205750ustar00rootroot00000000000000# Copyright 2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from colcon_core.plugin_system import satisfies_version from colcon_core.shell import logger from colcon_core.shell import ShellExtensionPoint from colcon_core.shell.template import expand_template class DsvShell(ShellExtensionPoint): """Generate `.dsv` files describing the intended environment change.""" # the priority needs to be higher than the default for primary shells PRIORITY = 200 def __init__(self): # noqa: D107 super().__init__() satisfies_version(ShellExtensionPoint.EXTENSION_POINT_VERSION, '^2.2') def create_prefix_script(self, prefix_path, merge_install): # noqa: D102 return [] def create_package_script( # noqa: D102 self, prefix_path, pkg_name, hooks ): pkg_env_path = prefix_path / 'share' / pkg_name / 'package.dsv' logger.info("Creating package descriptor '%s'" % pkg_env_path) expand_template( Path(__file__).parent / 'template' / 'package.dsv.em', pkg_env_path, { 'hooks': hooks, }) return [pkg_env_path] def create_hook_set_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, value, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.dsv' % env_hook_name) logger.info("Creating environment descriptor '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_set_value.dsv.em', hook_path, { 'name': name, 'value': value, }) return hook_path def create_hook_append_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.dsv' % env_hook_name) logger.info("Creating environment descriptor '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_append_value.dsv.em', hook_path, { 'type_': 'append-non-duplicate', 'name': name, 'value': subdirectory, }) return hook_path def create_hook_prepend_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.dsv' % env_hook_name) logger.info("Creating environment descriptor '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_prepend_value.dsv.em', hook_path, { 'type_': 'prepend-non-duplicate', 'name': name, 'value': subdirectory, }) return hook_path colcon-core-0.17.1/colcon_core/shell/installed_packages.py000066400000000000000000000041741465053734400236070ustar00rootroot00000000000000# Copyright 2016-2021 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from colcon_core.location import get_relative_package_index_path from colcon_core.shell import FindInstalledPackagesExtensionPoint class IsolatedInstalledPackageFinder(FindInstalledPackagesExtensionPoint): """Find installed packages in colcon isolated install spaces.""" def find_installed_packages(self, install_base: Path): """Find installed packages in colcon isolated install spaces.""" marker_file = install_base / '.colcon_install_layout' if not marker_file.is_file(): return None install_layout = marker_file.read_text().rstrip() if install_layout != 'isolated': return None packages = {} # for each subdirectory look for the package specific file for p in install_base.iterdir(): if not p.is_dir(): continue if p.name.startswith('.'): continue marker = p / get_relative_package_index_path() / p.name if marker.is_file(): packages[p.name] = p return packages class MergedInstalledPackageFinder(FindInstalledPackagesExtensionPoint): """Find installed packages in colcon merged install spaces.""" def find_installed_packages(self, install_base: Path): """Find installed packages in colcon isolated install spaces.""" marker_file = install_base / '.colcon_install_layout' if not marker_file.is_file(): return None install_layout = marker_file.read_text().rstrip() if install_layout != 'merged': return None packages = {} # find all files in the subdirectory if (install_base / get_relative_package_index_path()).is_dir(): package_index = install_base / get_relative_package_index_path() for p in package_index.iterdir(): if not p.is_file(): continue if p.name.startswith('.'): continue packages[p.name] = install_base return packages colcon-core-0.17.1/colcon_core/shell/sh.py000066400000000000000000000144301465053734400204000ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path import sys from colcon_core import shell from colcon_core.plugin_system import satisfies_version from colcon_core.plugin_system import SkipExtensionException from colcon_core.prefix_path import get_chained_prefix_path from colcon_core.shell import check_dependency_availability from colcon_core.shell import get_environment_variables from colcon_core.shell import logger from colcon_core.shell import ShellExtensionPoint from colcon_core.shell.template import expand_template class ShShell(ShellExtensionPoint): """Generate `.sh` scripts to extend the environment.""" # the priority needs to be higher than the default for primary shells PRIORITY = 200 FORMAT_STR_COMMENT_LINE = '# {comment}' FORMAT_STR_SET_ENV_VAR = 'export {name}="{value}"' FORMAT_STR_USE_ENV_VAR = '${name}' FORMAT_STR_INVOKE_SCRIPT = 'COLCON_CURRENT_PREFIX="{prefix}" ' \ '_colcon_prefix_sh_source_script "{script_path}"' FORMAT_STR_REMOVE_LEADING_SEPARATOR = 'if [ "$(echo -n ${name} | ' \ 'head -c 1)" = ":" ]; then export {name}=${{{name}#?}} ; fi' FORMAT_STR_REMOVE_TRAILING_SEPARATOR = 'if [ "$(echo -n ${name} | ' \ 'tail -c 1)" = ":" ]; then export {name}=${{{name}%?}} ; fi' def __init__(self): # noqa: D107 super().__init__() satisfies_version(ShellExtensionPoint.EXTENSION_POINT_VERSION, '^2.2') if sys.platform == 'win32' and not shell.use_all_shell_extensions: raise SkipExtensionException('Not used on Windows systems') def create_prefix_script(self, prefix_path, merge_install): # noqa: D102 prefix_env_path = prefix_path / 'local_setup.sh' logger.info("Creating prefix script '%s'" % prefix_env_path) expand_template( Path(__file__).parent / 'template' / 'prefix.sh.em', prefix_env_path, { 'prefix_path': prefix_path, 'python_executable': sys.executable, 'merge_install': merge_install, 'package_script_no_ext': 'package', }) prefix_util_path = prefix_path / '_local_setup_util_sh.py' logger.info("Creating prefix util module '%s'" % prefix_util_path) expand_template( self._get_prefix_util_template_path(), prefix_util_path, {'shell_extension': self}) prefix_chain_env_path = prefix_path / 'setup.sh' logger.info( "Creating prefix chain script '%s'" % prefix_chain_env_path) expand_template( Path(__file__).parent / 'template' / 'prefix_chain.sh.em', prefix_chain_env_path, { 'prefix_path': prefix_path, 'chained_prefix_path': get_chained_prefix_path( skip=prefix_path), 'prefix_script_no_ext': 'local_setup', }) return [ prefix_env_path, prefix_util_path, prefix_chain_env_path, ] def create_package_script( # noqa: D102 self, prefix_path, pkg_name, hooks ): pkg_env_path = prefix_path / 'share' / pkg_name / 'package.sh' logger.info("Creating package script '%s'" % pkg_env_path) expand_template( Path(__file__).parent / 'template' / 'package.sh.em', pkg_env_path, { 'prefix_path': prefix_path, 'hooks': list(filter( lambda hook: str(hook[0]).endswith('.sh'), hooks)), }) return [pkg_env_path] def create_hook_set_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, value, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.sh' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) if value == '': value = '$COLCON_CURRENT_PREFIX' expand_template( Path(__file__).parent / 'template' / 'hook_set_value.sh.em', hook_path, {'name': name, 'value': value}) return hook_path def create_hook_append_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.sh' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_append_value.sh.em', hook_path, { 'name': name, 'subdirectory': subdirectory, }) return hook_path def create_hook_prepend_value( # noqa: D102 self, env_hook_name, prefix_path, pkg_name, name, subdirectory, ): hook_path = prefix_path / 'share' / pkg_name / 'hook' / \ ('%s.sh' % env_hook_name) logger.info("Creating environment hook '%s'" % hook_path) expand_template( Path(__file__).parent / 'template' / 'hook_prepend_value.sh.em', hook_path, { 'name': name, 'subdirectory': subdirectory, }) return hook_path async def generate_command_environment( # noqa: D102 self, task_name, build_base, dependencies, ): if sys.platform == 'win32': raise SkipExtensionException('Not usable on Windows systems') # check if all dependencies are available # removes dependencies available in the environment from the parameter check_dependency_availability( dependencies, script_filename='package.sh') hook_path = build_base / ('colcon_command_prefix_%s.sh' % task_name) expand_template( Path(__file__).parent / 'template' / 'command_prefix.sh.em', hook_path, {'dependencies': dependencies}) cmd = ['.', str(hook_path), '&&', 'env'] env = await get_environment_variables(cmd, cwd=str(build_base)) # write environment variables to file for debugging env_path = build_base / ('colcon_command_prefix_%s.sh.env' % task_name) with env_path.open('w') as h: for key in sorted(env.keys()): value = env[key] h.write(f'{key}={value}\n') return env colcon-core-0.17.1/colcon_core/shell/template/000077500000000000000000000000001465053734400212255ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/shell/template/__init__.py000066400000000000000000000060051465053734400233370ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from io import StringIO import os from colcon_core.logging import colcon_logger try: from em import Interpreter from em import OVERRIDE_OPT except ImportError as e: try: import em # noqa: F401 except ImportError: e.msg += " The Python package 'empy' must be installed" raise e from None e.msg += " The Python package 'empy' must be installed and 'em' must " \ 'not be installed since both packages share the same namespace' raise e from None logger = colcon_logger.getChild(__name__) def expand_template(template_path, destination_path, data): """ Expand an EmPy template. The directory of the destination path is created if necessary. :param template_path: The patch of the template file :param destination_path: The path of the generated expanded file :param dict data: The data used for expanding the template :raises: Any exception which `em.Interpreter.string` might raise """ output = StringIO() try: # disable OVERRIDE_OPT to avoid saving / restoring stdout interpreter = CachingInterpreter( output=output, options={OVERRIDE_OPT: False}) with template_path.open('r') as h: content = h.read() interpreter.string(content, str(template_path), locals=data) output = output.getvalue() except Exception as e: # noqa: F841 logger.error( f"{e.__class__.__name__} processing template '{template_path}'") raise else: os.makedirs(str(destination_path.parent), exist_ok=True) # if the destination_path is a symlink remove the symlink # to avoid writing to the symlink destination if destination_path.is_symlink(): destination_path.unlink() with destination_path.open('w') as h: h.write(output) finally: interpreter.shutdown() class BypassStdoutInterpreter(Interpreter): """Interpreter for EmPy which keeps `stdout` unchanged.""" def installProxy(self): # noqa: D102 N802 # avoid replacing stdout with ProxyFile pass cached_tokens = {} class CachingInterpreter(BypassStdoutInterpreter): """Interpreter for EmPy which which caches parsed tokens.""" def parse(self, scanner, locals=None): # noqa: A002 D102 global cached_tokens data = scanner.buffer # try to use cached tokens tokens = cached_tokens.get(data) if tokens is None: # collect tokens and cache them tokens = [] while True: token = scanner.one() if token is None: break tokens.append(token) cached_tokens[data] = tokens # reimplement the parse method using the (cached) tokens self.invoke('atParse', scanner=scanner, locals=locals) for token in tokens: self.invoke('atToken', token=token) token.run(self, locals) colcon-core-0.17.1/colcon_core/shell/template/command_prefix.bat.em000066400000000000000000000004101465053734400253030ustar00rootroot00000000000000:: generated from colcon_core/shell/template/command_prefix.bat.em @@echo off @[for pkg_name, pkg_install_base in dependencies.items()]@ @{ import os pkg_script = os.path.join(pkg_install_base, 'share', pkg_name, 'package.bat') }@ call "@(pkg_script)" @[end for]@ colcon-core-0.17.1/colcon_core/shell/template/command_prefix.sh.em000066400000000000000000000003671465053734400251620ustar00rootroot00000000000000# generated from colcon_core/shell/template/command_prefix.sh.em @[for pkg_name, pkg_install_base in dependencies.items()]@ @{ import os pkg_script = os.path.join(pkg_install_base, 'share', pkg_name, 'package.sh') }@ . "@(pkg_script)" @[end for]@ colcon-core-0.17.1/colcon_core/shell/template/hook_append_value.bat.em000066400000000000000000000030601465053734400257770ustar00rootroot00000000000000:: generated from colcon_core/shell/template/hook_append_value.bat.em @@echo off @{ import os if os.path.isabs(subdirectory): value = subdirectory else: value = '%COLCON_CURRENT_PREFIX%' if subdirectory: value += '\\' + subdirectory }@ call:colcon_append_unique_value @(name) "@(value)" goto :eof :: function to append a value to a variable :: which uses semicolons as separators :: duplicates as well as trailing separators are avoided :: first argument: the name of the result variable :: second argument: the value to be appended :colcon_append_unique_value setlocal enabledelayedexpansion :: arguments set "listname=%~1" set "value=%~2" :: get values from variable set "values=!%listname%!" set "is_duplicate=" :: skip loop if values is empty if "%values%" NEQ "" ( :: iterate over existing values in the variable for %%v in ("%values:;=";"%") do ( :: ignore empty strings if "%%~v" NEQ "" ( :: ignore value if already present if "%%~v" EQU "%value%" ( set "is_duplicate=1" ) if "!all_values!" NEQ "" ( set "all_values=!all_values!;%%~v" ) else ( set "all_values=%%~v" ) ) ) ) :: if it is not a duplicate append it if "%is_duplicate%" == "" ( :: if not empty, append a semi-colon if "!all_values!" NEQ "" ( set "all_values=!all_values!;" ) :: append the value set "all_values=!all_values!%value%" ) :: set result variable in parent scope endlocal & ( set "%~1=%all_values%" ) goto:eof colcon-core-0.17.1/colcon_core/shell/template/hook_append_value.dsv.em000066400000000000000000000000321465053734400260210ustar00rootroot00000000000000@(type_);@(name);@(value) colcon-core-0.17.1/colcon_core/shell/template/hook_append_value.sh.em000066400000000000000000000035201465053734400256440ustar00rootroot00000000000000# generated from colcon_core/shell/template/hook_append_value.sh.em @{ import os if os.path.isabs(subdirectory): value = subdirectory else: value = '$COLCON_CURRENT_PREFIX' if subdirectory: value += '/' + subdirectory }@ # function to append a value to a variable # which uses colons as separators # duplicates as well as trailing separators are avoided # first argument: the name of the result variable # second argument: the value to be appended _colcon_append_unique_value() { # arguments _listname="$1" _value="$2" # get values from variable eval _values=\"\$$_listname\" _duplicate= # backup the field separator _colcon_append_unique_value_IFS=$IFS IFS=":" # start with no values _all_values= # workaround SH_WORD_SPLIT not being set in zsh if [ "$(command -v colcon_zsh_convert_to_array)" ]; then colcon_zsh_convert_to_array _values fi # iterate over existing values in the variable for _item in $_values; do # ignore empty strings if [ -z "$_item" ]; then continue fi # ignore _value if already present if [ "$_item" = "$_value" ]; then _duplicate=1 fi if [ -z "$_all_values" ]; then _all_values="$_item" else _all_values="$_all_values:$_item" fi done unset _item # append only non-duplicates if [ -z "$_duplicate" ]; then # avoid leading separator if [ -z "$_all_values" ]; then _all_values="$_value" else _all_values="$_all_values:$_value" fi fi # restore the field separator IFS=$_colcon_append_unique_value_IFS unset _colcon_append_unique_value_IFS # export the updated variable eval export $_listname=\"$_all_values\" unset _all_values unset _duplicate unset _values unset _value unset _listname } _colcon_append_unique_value @(name) "@(value)" unset _colcon_append_unique_value colcon-core-0.17.1/colcon_core/shell/template/hook_prepend_value.bat.em000066400000000000000000000023771465053734400261770ustar00rootroot00000000000000:: generated from colcon_core/shell/template/hook_prepend_value.bat.em @@echo off @{ import os if os.path.isabs(subdirectory): value = subdirectory else: value = '%COLCON_CURRENT_PREFIX%' if subdirectory: value += '\\' + subdirectory }@ call:colcon_prepend_unique_value @(name) "@(value)" goto :eof :: function to prepend a value to a variable :: which uses semicolons as separators :: duplicates as well as trailing separators are avoided :: first argument: the name of the result variable :: second argument: the value to be prepended :colcon_prepend_unique_value setlocal enabledelayedexpansion :: arguments set "listname=%~1" set "value=%~2" :: get values from variable set "values=!%listname%!" :: start with the new value set "all_values=%value%" :: skip loop if values is empty if "%values%" NEQ "" ( :: iterate over existing values in the variable for %%v in ("%values:;=";"%") do ( :: ignore empty strings if "%%~v" NEQ "" ( :: ignore duplicates of value if "%%~v" NEQ "%value%" ( :: keep non-duplicate values set "all_values=!all_values!;%%~v" ) ) ) ) :: set result variable in parent scope endlocal & ( set "%~1=%all_values%" ) goto:eof colcon-core-0.17.1/colcon_core/shell/template/hook_prepend_value.dsv.em000066400000000000000000000000321465053734400262070ustar00rootroot00000000000000@(type_);@(name);@(value) colcon-core-0.17.1/colcon_core/shell/template/hook_prepend_value.sh.em000066400000000000000000000004431465053734400260330ustar00rootroot00000000000000# generated from colcon_core/shell/template/hook_prepend_value.sh.em @{ import os if os.path.isabs(subdirectory): value = subdirectory else: value = '$COLCON_CURRENT_PREFIX' if subdirectory: value += '/' + subdirectory }@ _colcon_prepend_unique_value @(name) "@(value)" colcon-core-0.17.1/colcon_core/shell/template/hook_set_value.bat.em000066400000000000000000000001461465053734400253250ustar00rootroot00000000000000:: generated from colcon_core/shell/template/hook_set_value.bat.em @@echo off set "@(name)=@(value)" colcon-core-0.17.1/colcon_core/shell/template/hook_set_value.dsv.em000066400000000000000000000000251465053734400253470ustar00rootroot00000000000000set;@(name);@(value) colcon-core-0.17.1/colcon_core/shell/template/hook_set_value.sh.em000066400000000000000000000001341465053734400251660ustar00rootroot00000000000000# generated from colcon_core/shell/template/hook_set_value.sh.em export @(name)="@(value)" colcon-core-0.17.1/colcon_core/shell/template/package.bat.em000066400000000000000000000013651465053734400237150ustar00rootroot00000000000000:: generated from colcon_core/shell/template/package.bat.em @@echo off @[if hooks]@ :: a batch script is able to determine its own path :: the prefix is two levels up from the package specific share directory for %%p in ("%~dp0..\..") do set "COLCON_CURRENT_PREFIX=%%~fp" @[end if]@ @[for hook in hooks]@ call:call_file "%%COLCON_CURRENT_PREFIX%%\@(hook[0])"@ @[ for hook_arg in hook[1]]@ @(hook_arg)@ @[ end for] @[end for]@ @[if hooks]@ set COLCON_CURRENT_PREFIX= @[end if]@ goto :eof :: call the specified batch file and output the name when tracing is requested :: first argument: the batch file :call_file if exist "%~1" ( if "%COLCON_TRACE%" NEQ "" echo call "%~1" call "%~1%" ) else ( echo not found: "%~1" 1>&2 ) goto:eof colcon-core-0.17.1/colcon_core/shell/template/package.dsv.em000066400000000000000000000001041465053734400237310ustar00rootroot00000000000000@{ import os }@ @[for hook in hooks]@ source;@(hook[0]) @[end for]@ colcon-core-0.17.1/colcon_core/shell/template/package.sh.em000066400000000000000000000054211465053734400235560ustar00rootroot00000000000000# generated from colcon_core/shell/template/package.sh.em # This script extends the environment for this package. # function to prepend a value to a variable # which uses colons as separators # duplicates as well as trailing separators are avoided # first argument: the name of the result variable # second argument: the value to be prepended _colcon_prepend_unique_value() { # arguments _listname="$1" _value="$2" # get values from variable eval _values=\"\$$_listname\" # backup the field separator _colcon_prepend_unique_value_IFS=$IFS IFS=":" # start with the new value _all_values="$_value" # workaround SH_WORD_SPLIT not being set in zsh if [ "$(command -v colcon_zsh_convert_to_array)" ]; then colcon_zsh_convert_to_array _values fi # iterate over existing values in the variable for _item in $_values; do # ignore empty strings if [ -z "$_item" ]; then continue fi # ignore duplicates of _value if [ "$_item" = "$_value" ]; then continue fi # keep non-duplicate values _all_values="$_all_values:$_item" done unset _item # restore the field separator IFS=$_colcon_prepend_unique_value_IFS unset _colcon_prepend_unique_value_IFS # export the updated variable eval export $_listname=\"$_all_values\" unset _all_values unset _values unset _value unset _listname } @[if hooks]@ # since a plain shell script can't determine its own path when being sourced # either use the provided COLCON_CURRENT_PREFIX # or fall back to the build time prefix (if it exists) _colcon_package_sh_COLCON_CURRENT_PREFIX="@(prefix_path)" if [ -z "$COLCON_CURRENT_PREFIX" ]; then if [ ! -d "$_colcon_package_sh_COLCON_CURRENT_PREFIX" ]; then echo "The build time path \"$_colcon_package_sh_COLCON_CURRENT_PREFIX\" doesn't exist. Either source a script for a different shell or set the environment variable \"COLCON_CURRENT_PREFIX\" explicitly." 1>&2 unset _colcon_package_sh_COLCON_CURRENT_PREFIX return 1 fi COLCON_CURRENT_PREFIX="$_colcon_package_sh_COLCON_CURRENT_PREFIX" fi unset _colcon_package_sh_COLCON_CURRENT_PREFIX # function to source another script with conditional trace output # first argument: the path of the script # additional arguments: arguments to the script _colcon_package_sh_source_script() { if [ -f "$1" ]; then if [ -n "$COLCON_TRACE" ]; then echo "# . \"$1\"" fi . "$@@" else echo "not found: \"$1\"" 1>&2 fi } # source sh hooks @[ for hook in hooks]@ _colcon_package_sh_source_script "$COLCON_CURRENT_PREFIX/@(hook[0])"@ @[ for hook_arg in hook[1]]@ @(hook_arg)@ @[ end for] @[ end for]@ unset _colcon_package_sh_source_script unset COLCON_CURRENT_PREFIX @[end if]@ # do not unset _colcon_prepend_unique_value since it might be used by non-primary shell hooks colcon-core-0.17.1/colcon_core/shell/template/prefix.bat.em000066400000000000000000000074711465053734400236230ustar00rootroot00000000000000:: generated from colcon_core/shell/template/prefix.bat.em @@echo off :: This script extends the environment with all packages contained in this :: prefix path. :: add this prefix to the COLCON_PREFIX_PATH call:_colcon_prefix_bat_prepend_unique_value COLCON_PREFIX_PATH "%%~dp0" :: get and run all commands in topological order call:_colcon_run_ordered_commands "%~dp0" goto:eof :: function to prepend a value to a variable :: which uses semicolons as separators :: duplicates as well as trailing separators are avoided :: first argument: the name of the result variable :: second argument: the value to be prepended :_colcon_prefix_bat_prepend_unique_value setlocal enabledelayedexpansion :: arguments set "listname=%~1" set "value=%~2" :: get values from variable set "values=!%listname%!" :: start with the new value set "all_values=%value%" :: skip loop if values is empty if "%values%" NEQ "" ( :: iterate over existing values in the variable for %%v in ("%values:;=";"%") do ( :: ignore empty strings if "%%~v" NEQ "" ( :: ignore duplicates of value if "%%~v" NEQ "%value%" ( :: keep non-duplicate values set "all_values=!all_values!;%%~v" ) ) ) ) :: set result variable in parent scope endlocal & ( set "%~1=%all_values%" ) goto:eof :: Run the commands in topological order :: first argument: the base path to look for packages :_colcon_run_ordered_commands setlocal enabledelayedexpansion :: check environment variable for custom Python executable if "%COLCON_PYTHON_EXECUTABLE%" NEQ "" ( if not exist "%COLCON_PYTHON_EXECUTABLE%" ( echo error: COLCON_PYTHON_EXECUTABLE '%COLCON_PYTHON_EXECUTABLE%' doesn't exist exit /b 1 ) set "_colcon_python_executable=%COLCON_PYTHON_EXECUTABLE%" ) else ( :: use the Python executable known at configure time set "_colcon_python_executable=@(python_executable)" :: if it doesn't exist try a fall back if not exist "!_colcon_python_executable!" ( python --version > NUL 2> NUL if errorlevel 1 ( echo error: unable to find python executable exit /b 1 ) set "_colcon_python_executable=python" ) ) endlocal & ( set "_colcon_python_executable=%_colcon_python_executable%" ) :: escape potential closing parenthesis which would break the for loop set "_colcon_python_executable=%_colcon_python_executable:)=^)%" for /f "delims=" %%c in ('""%_colcon_python_executable%" "%~1_local_setup_util_bat.py" bat@ @[if merge_install]@ --merged-install@ @[end if]@ "') do ( if "%COLCON_TRACE%" NEQ "" ( echo %%c ) :: only invoke non-comment lines echo %%c | findstr /r "^::" >nul 2>&1 if errorlevel 1 ( call %%c ) ) set _colcon_python_executable= goto:eof :: call the specified batch file and output the name when tracing is requested :: first argument: the batch file :_colcon_prefix_bat_call_script if exist "%~1" ( if "%COLCON_TRACE%" NEQ "" echo call "%~1" call "%~1%" ) else ( echo not found: "%~1" 1>&2 ) goto:eof :: strip a leading semicolon from an environment variable if applicable :: first argument: the environment variable name :_colcon_prefix_bat_strip_leading_semicolon setlocal enabledelayedexpansion set "name=%~1" set "value=!%name%!" if "%value:~0,1%"==";" set "value=%value:~1%" :: set result variable in parent scope endlocal & ( set "%~1=%value%" ) goto:eof :: strip a trailing semicolon from an environment variable if applicable :: first argument: the environment variable name :_colcon_prefix_bat_strip_trailing_semicolon setlocal enabledelayedexpansion set "name=%~1" set "value=!%name%!" if "%value:~-1%"==";" set "value=%value:~0,-1%" :: set result variable in parent scope endlocal & ( set "%~1=%value%" ) goto:eof colcon-core-0.17.1/colcon_core/shell/template/prefix.sh.em000066400000000000000000000104311465053734400234550ustar00rootroot00000000000000# generated from colcon_core/shell/template/prefix.sh.em # This script extends the environment with all packages contained in this # prefix path. # since a plain shell script can't determine its own path when being sourced # either use the provided COLCON_CURRENT_PREFIX # or fall back to the build time prefix (if it exists) _colcon_prefix_sh_COLCON_CURRENT_PREFIX="@(prefix_path)" if [ -z "$COLCON_CURRENT_PREFIX" ]; then if [ ! -d "$_colcon_prefix_sh_COLCON_CURRENT_PREFIX" ]; then echo "The build time path \"$_colcon_prefix_sh_COLCON_CURRENT_PREFIX\" doesn't exist. Either source a script for a different shell or set the environment variable \"COLCON_CURRENT_PREFIX\" explicitly." 1>&2 unset _colcon_prefix_sh_COLCON_CURRENT_PREFIX return 1 fi else _colcon_prefix_sh_COLCON_CURRENT_PREFIX="$COLCON_CURRENT_PREFIX" fi # function to prepend a value to a variable # which uses colons as separators # duplicates as well as trailing separators are avoided # first argument: the name of the result variable # second argument: the value to be prepended _colcon_prefix_sh_prepend_unique_value() { # arguments _listname="$1" _value="$2" # get values from variable eval _values=\"\$$_listname\" # backup the field separator _colcon_prefix_sh_prepend_unique_value_IFS="$IFS" IFS=":" # start with the new value _all_values="$_value" _contained_value="" # iterate over existing values in the variable for _item in $_values; do # ignore empty strings if [ -z "$_item" ]; then continue fi # ignore duplicates of _value if [ "$_item" = "$_value" ]; then _contained_value=1 continue fi # keep non-duplicate values _all_values="$_all_values:$_item" done unset _item if [ -z "$_contained_value" ]; then if [ -n "$COLCON_TRACE" ]; then if [ "$_all_values" = "$_value" ]; then echo "export $_listname=$_value" else echo "export $_listname=$_value:\$$_listname" fi fi fi unset _contained_value # restore the field separator IFS="$_colcon_prefix_sh_prepend_unique_value_IFS" unset _colcon_prefix_sh_prepend_unique_value_IFS # export the updated variable eval export $_listname=\"$_all_values\" unset _all_values unset _values unset _value unset _listname } # add this prefix to the COLCON_PREFIX_PATH _colcon_prefix_sh_prepend_unique_value COLCON_PREFIX_PATH "$_colcon_prefix_sh_COLCON_CURRENT_PREFIX" unset _colcon_prefix_sh_prepend_unique_value # check environment variable for custom Python executable if [ -n "$COLCON_PYTHON_EXECUTABLE" ]; then if [ ! -f "$COLCON_PYTHON_EXECUTABLE" ]; then echo "error: COLCON_PYTHON_EXECUTABLE '$COLCON_PYTHON_EXECUTABLE' doesn't exist" return 1 fi _colcon_python_executable="$COLCON_PYTHON_EXECUTABLE" else # try the Python executable known at configure time _colcon_python_executable="@(python_executable)" # if it doesn't exist try a fall back if [ ! -f "$_colcon_python_executable" ]; then if ! /usr/bin/env python3 --version > /dev/null 2> /dev/null; then echo "error: unable to find python3 executable" return 1 fi _colcon_python_executable=`/usr/bin/env python3 -c "import sys; print(sys.executable)"` fi fi # function to source another script with conditional trace output # first argument: the path of the script _colcon_prefix_sh_source_script() { if [ -f "$1" ]; then if [ -n "$COLCON_TRACE" ]; then echo "# . \"$1\"" fi . "$1" else echo "not found: \"$1\"" 1>&2 fi } # get all commands in topological order _colcon_ordered_commands="$(@ $_colcon_python_executable "$_colcon_prefix_sh_COLCON_CURRENT_PREFIX/_local_setup_util_sh.py" sh@ @[if merge_install]@ --merged-install@ @[end if]@ )" unset _colcon_python_executable if [ -n "$COLCON_TRACE" ]; then echo "_colcon_prefix_sh_source_script() { if [ -f \"\$1\" ]; then if [ -n \"\$COLCON_TRACE\" ]; then echo \"# . \\\"\$1\\\"\" fi . \"\$1\" else echo \"not found: \\\"\$1\\\"\" 1>&2 fi }" echo "# Execute generated script:" echo "# <<<" echo "${_colcon_ordered_commands}" echo "# >>>" echo "unset _colcon_prefix_sh_source_script" fi eval "${_colcon_ordered_commands}" unset _colcon_ordered_commands unset _colcon_prefix_sh_source_script unset _colcon_prefix_sh_COLCON_CURRENT_PREFIX colcon-core-0.17.1/colcon_core/shell/template/prefix_chain.bat.em000066400000000000000000000015551465053734400247620ustar00rootroot00000000000000:: generated from colcon_core/shell/template/prefix_chain.bat.em @@echo off :: This script extends the environment with the environment of other prefix :: paths which were sourced when this file was generated as well as all :: packages contained in this prefix path. @[if chained_prefix_path]@ :: source chained prefixes @[ for prefix in reversed(chained_prefix_path)]@ call:_colcon_prefix_chain_bat_call_script "@(prefix)\@(prefix_script_no_ext).bat" @[ end for]@ @[end if]@ :: source this prefix call:_colcon_prefix_chain_bat_call_script "%%~dp0@(prefix_script_no_ext).bat" goto:eof :: function to source another script with conditional trace output :: first argument: the path of the script :_colcon_prefix_chain_bat_call_script if exist "%~1" ( if "%COLCON_TRACE%" NEQ "" echo call "%~1" call "%~1%" ) else ( echo not found: "%~1" 1>&2 ) goto:eof colcon-core-0.17.1/colcon_core/shell/template/prefix_chain.sh.em000066400000000000000000000037331465053734400246260ustar00rootroot00000000000000# generated from colcon_core/shell/template/prefix_chain.sh.em # This script extends the environment with the environment of other prefix # paths which were sourced when this file was generated as well as all packages # contained in this prefix path. # since a plain shell script can't determine its own path when being sourced # either use the provided COLCON_CURRENT_PREFIX # or fall back to the build time prefix (if it exists) _colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX=@(prefix_path) if [ ! -z "$COLCON_CURRENT_PREFIX" ]; then _colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX="$COLCON_CURRENT_PREFIX" elif [ ! -d "$_colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX" ]; then echo "The build time path \"$_colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX\" doesn't exist. Either source a script for a different shell or set the environment variable \"COLCON_CURRENT_PREFIX\" explicitly." 1>&2 unset _colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX return 1 fi # function to source another script with conditional trace output # first argument: the path of the script _colcon_prefix_chain_sh_source_script() { if [ -f "$1" ]; then if [ -n "$COLCON_TRACE" ]; then echo "# . \"$1\"" fi . "$1" else echo "not found: \"$1\"" 1>&2 fi } @[if chained_prefix_path]@ # source chained prefixes @[ for prefix in reversed(chained_prefix_path)]@ # setting COLCON_CURRENT_PREFIX avoids relying on the build time prefix of the sourced script COLCON_CURRENT_PREFIX="@(prefix)" _colcon_prefix_chain_sh_source_script "$COLCON_CURRENT_PREFIX/@(prefix_script_no_ext).sh" @[ end for]@ @[end if]@ # source this prefix # setting COLCON_CURRENT_PREFIX avoids relying on the build time prefix of the sourced script COLCON_CURRENT_PREFIX="$_colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX" _colcon_prefix_chain_sh_source_script "$COLCON_CURRENT_PREFIX/@(prefix_script_no_ext).sh" unset _colcon_prefix_chain_sh_COLCON_CURRENT_PREFIX unset _colcon_prefix_chain_sh_source_script unset COLCON_CURRENT_PREFIX colcon-core-0.17.1/colcon_core/shell/template/prefix_util.py000066400000000000000000000112121465053734400241260ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse import os from pathlib import Path import sys def main(argv=sys.argv[1:]): # noqa: D103 parser = argparse.ArgumentParser( description='Output found packages in topological order') parser.add_argument( '--merged-install', action='store_true', help='All install prefixes are merged into a single location') args = parser.parse_args(argv) packages = get_packages(Path(__file__).parent, args.merged_install) for pkg_name in order_packages(packages): print(pkg_name) def get_packages(prefix_path, merged_install): """ Find packages based on colcon-specific files created during installation. :param Path prefix_path: The install prefix path of all packages :param bool merged_install: The flag if the packages are all installed directly in the prefix or if each package is installed in a subdirectory named after the package :returns: A mapping from the package name to the set of runtime dependencies :rtype: dict """ packages = {} # since importing colcon_core isn't feasible here the following constant # must match colcon_core.location.get_relative_package_index_path() subdirectory = 'share/colcon-core/packages' if merged_install: # return if workspace is empty if not (prefix_path / subdirectory).is_dir(): return packages # find all files in the subdirectory for p in (prefix_path / subdirectory).iterdir(): if not p.is_file(): continue if p.name.startswith('.'): continue add_package_runtime_dependencies(p, packages) else: # for each subdirectory look for the package specific file for p in prefix_path.iterdir(): if not p.is_dir(): continue if p.name.startswith('.'): continue p = p / subdirectory / p.name if p.is_file(): add_package_runtime_dependencies(p, packages) # remove unknown dependencies pkg_names = set(packages.keys()) for k in packages.keys(): packages[k] = {d for d in packages[k] if d in pkg_names} return packages def add_package_runtime_dependencies(path, packages): """ Check the path and if it exists extract the packages runtime dependencies. :param Path path: The resource file containing the runtime dependencies :param dict packages: A mapping from package names to the sets of runtime dependencies to add to """ content = path.read_text() dependencies = set(content.split(os.pathsep) if content else []) packages[path.name] = dependencies def order_packages(packages): """ Order packages topologically. :param dict packages: A mapping from package name to the set of runtime dependencies :returns: The package names :rtype: list """ # select packages with no dependencies in alphabetical order to_be_ordered = list(packages.keys()) ordered = [] while to_be_ordered: pkg_names_without_deps = [ name for name in to_be_ordered if not packages[name]] if not pkg_names_without_deps: reduce_cycle_set(packages) raise RuntimeError( 'Circular dependency between: ' + ', '.join(sorted(packages))) pkg_names_without_deps.sort() pkg_name = pkg_names_without_deps[0] to_be_ordered.remove(pkg_name) ordered.append(pkg_name) # remove item from dependency lists for k in list(packages.keys()): if pkg_name in packages[k]: packages[k].remove(pkg_name) return ordered def reduce_cycle_set(packages): """ Reduce the set of packages to the ones part of the circular dependency. :param dict packages: A mapping from package name to the set of runtime dependencies which is modified in place """ last_depended = None while len(packages) > 0: # get all remaining dependencies depended = set() for pkg_name, dependencies in packages.items(): depended = depended.union(dependencies) # remove all packages which are not dependent on for name in list(packages.keys()): if name not in depended: del packages[name] if last_depended: # if remaining packages haven't changed return them if last_depended == depended: return packages.keys() # otherwise reduce again last_depended = depended if __name__ == '__main__': # pragma: no cover main() colcon-core-0.17.1/colcon_core/shell/template/prefix_util.py.em000066400000000000000000000356361465053734400245460ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse from collections import OrderedDict import os from pathlib import Path import sys @{assert shell_extension.FORMAT_STR_COMMENT_LINE is not None}@ FORMAT_STR_COMMENT_LINE = '@(shell_extension.FORMAT_STR_COMMENT_LINE)' @{assert shell_extension.FORMAT_STR_SET_ENV_VAR is not None}@ FORMAT_STR_SET_ENV_VAR = '@(shell_extension.FORMAT_STR_SET_ENV_VAR)' @{assert shell_extension.FORMAT_STR_USE_ENV_VAR is not None}@ FORMAT_STR_USE_ENV_VAR = '@(shell_extension.FORMAT_STR_USE_ENV_VAR)' @{assert shell_extension.FORMAT_STR_INVOKE_SCRIPT is not None}@ FORMAT_STR_INVOKE_SCRIPT = '@(shell_extension.FORMAT_STR_INVOKE_SCRIPT)' # noqa: E501 FORMAT_STR_REMOVE_LEADING_SEPARATOR = '@(shell_extension.FORMAT_STR_REMOVE_LEADING_SEPARATOR)' # noqa: E501 FORMAT_STR_REMOVE_TRAILING_SEPARATOR = '@(shell_extension.FORMAT_STR_REMOVE_TRAILING_SEPARATOR)' # noqa: E501 DSV_TYPE_APPEND_NON_DUPLICATE = 'append-non-duplicate' DSV_TYPE_PREPEND_NON_DUPLICATE = 'prepend-non-duplicate' DSV_TYPE_PREPEND_NON_DUPLICATE_IF_EXISTS = 'prepend-non-duplicate-if-exists' DSV_TYPE_SET = 'set' DSV_TYPE_SET_IF_UNSET = 'set-if-unset' DSV_TYPE_SOURCE = 'source' def main(argv=sys.argv[1:]): # noqa: D103 parser = argparse.ArgumentParser( description='Output shell commands for the packages in topological ' 'order') parser.add_argument( 'primary_extension', help='The file extension of the primary shell') parser.add_argument( 'additional_extension', nargs='?', help='The additional file extension to be considered') parser.add_argument( '--merged-install', action='store_true', help='All install prefixes are merged into a single location') args = parser.parse_args(argv) packages = get_packages(Path(__file__).parent, args.merged_install) ordered_packages = order_packages(packages) for pkg_name in ordered_packages: if _include_comments(): print( FORMAT_STR_COMMENT_LINE.format_map( {'comment': 'Package: ' + pkg_name})) prefix = os.path.abspath(os.path.dirname(__file__)) if not args.merged_install: prefix = os.path.join(prefix, pkg_name) for line in get_commands( pkg_name, prefix, args.primary_extension, args.additional_extension ): print(line) for line in _remove_ending_separators(): print(line) def get_packages(prefix_path, merged_install): """ Find packages based on colcon-specific files created during installation. :param Path prefix_path: The install prefix path of all packages :param bool merged_install: The flag if the packages are all installed directly in the prefix or if each package is installed in a subdirectory named after the package :returns: A mapping from the package name to the set of runtime dependencies :rtype: dict """ packages = {} # since importing colcon_core isn't feasible here the following constant # must match colcon_core.location.get_relative_package_index_path() subdirectory = 'share/colcon-core/packages' if merged_install: # return if workspace is empty if not (prefix_path / subdirectory).is_dir(): return packages # find all files in the subdirectory for p in (prefix_path / subdirectory).iterdir(): if not p.is_file(): continue if p.name.startswith('.'): continue add_package_runtime_dependencies(p, packages) else: # for each subdirectory look for the package specific file for p in prefix_path.iterdir(): if not p.is_dir(): continue if p.name.startswith('.'): continue p = p / subdirectory / p.name if p.is_file(): add_package_runtime_dependencies(p, packages) # remove unknown dependencies pkg_names = set(packages.keys()) for k in packages.keys(): packages[k] = {d for d in packages[k] if d in pkg_names} return packages def add_package_runtime_dependencies(path, packages): """ Check the path and if it exists extract the packages runtime dependencies. :param Path path: The resource file containing the runtime dependencies :param dict packages: A mapping from package names to the sets of runtime dependencies to add to """ content = path.read_text() dependencies = set(content.split(os.pathsep) if content else []) packages[path.name] = dependencies def order_packages(packages): """ Order packages topologically. :param dict packages: A mapping from package name to the set of runtime dependencies :returns: The package names :rtype: list """ # select packages with no dependencies in alphabetical order to_be_ordered = list(packages.keys()) ordered = [] while to_be_ordered: pkg_names_without_deps = [ name for name in to_be_ordered if not packages[name]] if not pkg_names_without_deps: reduce_cycle_set(packages) raise RuntimeError( 'Circular dependency between: ' + ', '.join(sorted(packages))) pkg_names_without_deps.sort() pkg_name = pkg_names_without_deps[0] to_be_ordered.remove(pkg_name) ordered.append(pkg_name) # remove item from dependency lists for k in list(packages.keys()): if pkg_name in packages[k]: packages[k].remove(pkg_name) return ordered def reduce_cycle_set(packages): """ Reduce the set of packages to the ones part of the circular dependency. :param dict packages: A mapping from package name to the set of runtime dependencies which is modified in place """ last_depended = None while len(packages) > 0: # get all remaining dependencies depended = set() for pkg_name, dependencies in packages.items(): depended = depended.union(dependencies) # remove all packages which are not dependent on for name in list(packages.keys()): if name not in depended: del packages[name] if last_depended: # if remaining packages haven't changed return them if last_depended == depended: return packages.keys() # otherwise reduce again last_depended = depended def _include_comments(): # skipping comment lines when COLCON_TRACE is not set speeds up the # processing especially on Windows return bool(os.environ.get('COLCON_TRACE')) def get_commands(pkg_name, prefix, primary_extension, additional_extension): commands = [] package_dsv_path = os.path.join(prefix, 'share', pkg_name, 'package.dsv') if os.path.exists(package_dsv_path): commands += process_dsv_file( package_dsv_path, prefix, primary_extension, additional_extension) return commands def process_dsv_file( dsv_path, prefix, primary_extension=None, additional_extension=None ): commands = [] if _include_comments(): commands.append(FORMAT_STR_COMMENT_LINE.format_map({'comment': dsv_path})) with open(dsv_path, 'r') as h: content = h.read() lines = content.splitlines() basenames = OrderedDict() for i, line in enumerate(lines): # skip over empty or whitespace-only lines if not line.strip(): continue # skip over comments if line.startswith('#'): continue try: type_, remainder = line.split(';', 1) except ValueError: raise RuntimeError( "Line %d in '%s' doesn't contain a semicolon separating the " 'type from the arguments' % (i + 1, dsv_path)) if type_ != DSV_TYPE_SOURCE: # handle non-source lines try: commands += handle_dsv_types_except_source( type_, remainder, prefix) except RuntimeError as e: raise RuntimeError( "Line %d in '%s' %s" % (i + 1, dsv_path, e)) from e else: # group remaining source lines by basename path_without_ext, ext = os.path.splitext(remainder) if path_without_ext not in basenames: basenames[path_without_ext] = set() assert ext.startswith('.') ext = ext[1:] if ext in (primary_extension, additional_extension): basenames[path_without_ext].add(ext) # add the dsv extension to each basename if the file exists for basename, extensions in basenames.items(): if not os.path.isabs(basename): basename = os.path.join(prefix, basename) if os.path.exists(basename + '.dsv'): extensions.add('dsv') for basename, extensions in basenames.items(): if not os.path.isabs(basename): basename = os.path.join(prefix, basename) if 'dsv' in extensions: # process dsv files recursively commands += process_dsv_file( basename + '.dsv', prefix, primary_extension=primary_extension, additional_extension=additional_extension) elif primary_extension in extensions and len(extensions) == 1: # source primary-only files commands += [ FORMAT_STR_INVOKE_SCRIPT.format_map({ 'prefix': prefix, 'script_path': basename + '.' + primary_extension})] elif additional_extension in extensions: # source non-primary files commands += [ FORMAT_STR_INVOKE_SCRIPT.format_map({ 'prefix': prefix, 'script_path': basename + '.' + additional_extension})] return commands def handle_dsv_types_except_source(type_, remainder, prefix): commands = [] if type_ in (DSV_TYPE_SET, DSV_TYPE_SET_IF_UNSET): try: env_name, value = remainder.split(';', 1) except ValueError: raise RuntimeError( "doesn't contain a semicolon separating the environment name " 'from the value') try_prefixed_value = os.path.join(prefix, value) if value else prefix if os.path.exists(try_prefixed_value): value = try_prefixed_value if type_ == DSV_TYPE_SET: commands += _set(env_name, value) elif type_ == DSV_TYPE_SET_IF_UNSET: commands += _set_if_unset(env_name, value) else: assert False elif type_ in ( DSV_TYPE_APPEND_NON_DUPLICATE, DSV_TYPE_PREPEND_NON_DUPLICATE, DSV_TYPE_PREPEND_NON_DUPLICATE_IF_EXISTS ): try: env_name_and_values = remainder.split(';') except ValueError: raise RuntimeError( "doesn't contain a semicolon separating the environment name " 'from the values') env_name = env_name_and_values[0] values = env_name_and_values[1:] for value in values: if not value: value = prefix elif not os.path.isabs(value): value = os.path.join(prefix, value) if ( type_ == DSV_TYPE_PREPEND_NON_DUPLICATE_IF_EXISTS and not os.path.exists(value) ): comment = f'skip extending {env_name} with not existing ' \ f'path: {value}' if _include_comments(): commands.append( FORMAT_STR_COMMENT_LINE.format_map({'comment': comment})) elif type_ == DSV_TYPE_APPEND_NON_DUPLICATE: commands += _append_unique_value(env_name, value) else: commands += _prepend_unique_value(env_name, value) else: raise RuntimeError( 'contains an unknown environment hook type: ' + type_) return commands env_state = {} def _append_unique_value(name, value): global env_state if name not in env_state: if os.environ.get(name): env_state[name] = set(os.environ[name].split(os.pathsep)) else: env_state[name] = set() # append even if the variable has not been set yet, in case a shell script sets the # same variable without the knowledge of this Python script. # later _remove_ending_separators() will cleanup any unintentional leading separator extend = FORMAT_STR_USE_ENV_VAR.format_map({'name': name}) + os.pathsep line = FORMAT_STR_SET_ENV_VAR.format_map( {'name': name, 'value': extend + value}) if value not in env_state[name]: env_state[name].add(value) else: if not _include_comments(): return [] line = FORMAT_STR_COMMENT_LINE.format_map({'comment': line}) return [line] def _prepend_unique_value(name, value): global env_state if name not in env_state: if os.environ.get(name): env_state[name] = set(os.environ[name].split(os.pathsep)) else: env_state[name] = set() # prepend even if the variable has not been set yet, in case a shell script sets the # same variable without the knowledge of this Python script. # later _remove_ending_separators() will cleanup any unintentional trailing separator extend = os.pathsep + FORMAT_STR_USE_ENV_VAR.format_map({'name': name}) line = FORMAT_STR_SET_ENV_VAR.format_map( {'name': name, 'value': value + extend}) if value not in env_state[name]: env_state[name].add(value) else: if not _include_comments(): return [] line = FORMAT_STR_COMMENT_LINE.format_map({'comment': line}) return [line] # generate commands for removing prepended underscores def _remove_ending_separators(): # do nothing if the shell extension does not implement the logic if FORMAT_STR_REMOVE_TRAILING_SEPARATOR is None: return [] global env_state commands = [] for name in env_state: # skip variables that already had values before this script started prepending if name in os.environ: continue commands += [ FORMAT_STR_REMOVE_LEADING_SEPARATOR.format_map({'name': name}), FORMAT_STR_REMOVE_TRAILING_SEPARATOR.format_map({'name': name})] return commands def _set(name, value): global env_state env_state[name] = value line = FORMAT_STR_SET_ENV_VAR.format_map( {'name': name, 'value': value}) return [line] def _set_if_unset(name, value): global env_state line = FORMAT_STR_SET_ENV_VAR.format_map( {'name': name, 'value': value}) if env_state.get(name, os.environ.get(name)): line = FORMAT_STR_COMMENT_LINE.format_map({'comment': line}) return [line] if __name__ == '__main__': # pragma: no cover try: rc = main() except RuntimeError as e: print(str(e), file=sys.stderr) rc = 1 sys.exit(rc) colcon-core-0.17.1/colcon_core/subprocess.py000066400000000000000000000241231465053734400210470ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 """ Call a subprocess making the stdout and stderr output available via callbacks. The stdout and stderr pipes are read concurrently using the asyncio event loop to maintain the original order as closely as possible. """ import asyncio from concurrent.futures import ALL_COMPLETED from functools import partial import os import platform import shlex import subprocess import sys from typing import Any from typing import Callable from typing import Mapping from typing import Optional from typing import Sequence from colcon_core.logging import colcon_logger SIGINT_RESULT = 'SIGINT' logger = colcon_logger.getChild(__name__) def new_event_loop(): """ Create a new event loop. On Windows return a ProactorEventLoop. :returns: The created event loop """ # TODO: Drop this along with py3.7 if sys.platform == 'win32' and sys.version_info < (3, 8): return asyncio.ProactorEventLoop() return asyncio.new_event_loop() async def run( args: Sequence[str], stdout_callback: Callable[[bytes], None], stderr_callback: Callable[[bytes], None], *, use_pty: Optional[bool] = None, capture_output: Optional[bool] = None, **other_popen_kwargs: Mapping[str, Any] ) -> subprocess.CompletedProcess: """ Run the command described by args. Invokes the callbacks for every line read from the subprocess pipes. See the documentation of `subprocess.Popen() ` for other parameters. :param args: args should be a sequence of program arguments :param stdout_callback: the callable is invoked for every line read from the stdout pipe of the process :param stderr_callback: the callable is invoked for every line read from the stderr pipe of the process :param use_pty: whether to use a pseudo terminal :param capture_output: whether to store stdout and stderr :returns: the result of the completed process :rtype subprocess.CompletedProcess """ assert callable(stdout_callback) or stdout_callback is None assert callable(stderr_callback) or stderr_callback is None stdout_capture = [] def _stdout_callback(line): if stdout_callback: stdout_callback(line) if capture_output: stdout_capture.append(line) stderr_capture = [] def _stderr_callback(line): if stderr_callback: stderr_callback(line) if capture_output: stderr_capture.append(line) # if use_pty is neither True nor False choose based on isatty of stdout if use_pty is None: use_pty = sys.stdout.isatty() # the pty module is only supported on Windows if use_pty and platform.system() != 'Linux': use_pty = False rc, _, _ = await _async_check_call( args, _stdout_callback, _stderr_callback, use_pty=use_pty, **other_popen_kwargs) return subprocess.CompletedProcess( args, rc, stdout=b''.join(stdout_capture), stderr=b''.join(stderr_capture)) async def check_output( args: Sequence[str], **other_popen_kwargs: Mapping[str, Any] ) -> subprocess.CompletedProcess: """ Get the output of an invoked command. See the documentation of `subprocess.Popen() ` for other parameters. :param args: args should be a sequence of program arguments :returns: The `stdout` output of the command :rtype: str """ rc, stdout_data, stderr_data = await _async_check_call( args, subprocess.PIPE, subprocess.PIPE, use_pty=False, **other_popen_kwargs) if rc: stderr_data = stderr_data.decode(errors='replace') assert not rc, f'Expected {args} to pass: {stderr_data}' return stdout_data async def _async_check_call( args, stdout_callback, stderr_callback, *, use_pty=None, **other_popen_kwargs ): """Coroutine running the command and invoking the callbacks.""" # choose function to create subprocess if not other_popen_kwargs.pop('shell', False): create_subprocess = asyncio.create_subprocess_exec else: args = [' '.join([escape_shell_argument(a) for a in args])] create_subprocess = asyncio.create_subprocess_shell # choose stdout and stderr arguments for the subprocess stdout = subprocess.PIPE if stdout_callback else subprocess.DEVNULL stderr = subprocess.PIPE if stderr_callback else subprocess.DEVNULL # open pseudo terminals if use_pty: # only import when requested since it is not available on all platforms import pty if stdout_callback: stdout_descriptor, stdout = pty.openpty() if stderr_callback: stderr_descriptor, stderr = pty.openpty() process = await create_subprocess( *args, stdout=stdout, stderr=stderr, **other_popen_kwargs) # read pipes concurrently callbacks = [] if use_pty: if callable(stdout_callback): callbacks.append(_fd2callback(stdout_descriptor, stdout_callback)) if callable(stderr_callback): callbacks.append(_fd2callback(stderr_descriptor, stderr_callback)) else: if callable(stdout_callback): callbacks.append(_pipe2callback( process.stdout, stdout_callback, process.stderr if callable(stderr_callback) else None)) if callable(stderr_callback): callbacks.append(asyncio.ensure_future(_pipe2callback( process.stderr, stderr_callback, process.stdout if callable(stdout_callback) else None))) output = [None, None] if not stdout_callback and not stderr_callback: # asynchronously wait for the subprocess await process.wait() else: # asynchronously communicate with the subprocess callbacks.append(process.wait()) if subprocess.PIPE in (stdout_callback, stderr_callback): callbacks.append(_communicate_and_close_fds( process, # collect output in case the process uses any pipes output, # pseudo terminals need to be closed explicitly stdout if use_pty else None, stderr if use_pty else None)) else: callbacks.append(_wait_and_close_fds( process, # pseudo terminals need to be closed explicitly stdout if use_pty else None, stderr if use_pty else None)) # waiting for coroutines is deprecated as of Python 3.8 # convert coroutines into tasks for i, callback in enumerate(callbacks): if not isinstance(callback, asyncio.Task): try: callbacks[i] = asyncio.create_task(callback) except AttributeError: # fallback for Python < 3.7 callbacks[i] = asyncio.ensure_future(callback) try: done, _ = await asyncio.wait(callbacks, return_when=ALL_COMPLETED) except (asyncio.CancelledError, KeyboardInterrupt): # finish the communication with the subprocess done, _ = await asyncio.wait(callbacks, return_when=ALL_COMPLETED) raise finally: # read potential exceptions to avoid asyncio errors for task in done: _ = task.exception() # noqa: F841 return process.returncode, output[0], output[1] def escape_shell_argument(arg): """ Escape the shell arguments for an invocation through a shell. :param arg: A single command line argument :returns: The escaped command line argument :rtype: str """ # some literals must not be quoted unquoted_values = [';', '|', '&&', '||'] if arg in unquoted_values: return arg # some arguments don't need quoting if arg.startswith('`') and arg.endswith('`'): return arg if arg.startswith('$(') and arg.endswith(')'): return arg quoted = shlex.quote(arg) if sys.platform == 'win32': # Windows doesn't like paths with single quotes if len(quoted) > 1 and quoted.startswith("'") and quoted.endswith("'"): quoted = '"' + quoted[1:-1] + '"' return quoted async def _fd2callback(descriptor, callback): """Coroutine reading from fd and invoking the callback for each line.""" func = partial(_blocking_fd2callback, descriptor, callback) loop = asyncio.get_event_loop() await loop.run_in_executor(None, func) def _blocking_fd2callback(descriptor, callback): """Read all lines from the stream invoke the callback for each line.""" with os.fdopen(descriptor) as stream: while True: try: line = stream.readline() except IOError: # this is how the fd signals the EOF break callback(line.encode()) async def _pipe2callback(stream, callback, other_stream=None): """Coroutine reading from pipe and invoking the callback for each line.""" while True: line = await stream.readline() if not line: # this is how the pipe signals the EOF break callback(line) # HACK on Windows sometimes only one of the two streams gets closed # feeding an EOF explicitly ensures that the other coroutine finishes if sys.platform == 'win32' and other_stream: other_stream.feed_eof() async def _wait_and_close_fds(process, stdout=None, stderr=None): """Coroutine waiting for the process and closing all handles.""" try: await process.wait() finally: # always close handles even when a CancelledError is raised if stdout: os.close(stdout) if stderr: os.close(stderr) async def _communicate_and_close_fds( process, output, stdout=None, stderr=None ): """Coroutine communicating with the process and closing all handles.""" stdout_data, stderr_data = await process.communicate() output[0] = stdout_data output[1] = stderr_data if stdout: os.close(stdout) if stderr: os.close(stderr) colcon-core-0.17.1/colcon_core/task/000077500000000000000000000000001465053734400172455ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/__init__.py000066400000000000000000000237301465053734400213630ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os import shutil import sys import traceback import warnings from colcon_core.event.command import Command from colcon_core.event.command import CommandEnded from colcon_core.event.job import JobProgress from colcon_core.event.output import StderrLine from colcon_core.event.output import StdoutLine from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_name from colcon_core.subprocess import run as colcon_core_subprocess_run logger = colcon_logger.getChild(__name__) class TaskContext: """The context provided to tasks.""" def __init__(self, *, pkg, args, dependencies): """ Construct a TaskContext. :param pkg: The package descriptor :param args: The parsed command line arguments :param dependencies: The ordered dictionary mapping dependency names to their paths """ self.event_queue = None self.pkg = pkg self.args = args self.dependencies = dependencies def put_event_into_queue(self, event): """ Post a message event into the event queue. The method is not implemented by default and will be replaced at runtime once the event queue is known. :param event: The event """ raise NotImplementedError() class TaskExtensionPoint: """ The interface for task extensions. A task extension performs a specific logic and/or command sequence. For each instance the attributes `TASK_NAME` and `PACKAGE_TYPE` are being set to the (parent) basename of the entry point registering the extension. """ """The version of the task extension interface.""" EXTENSION_POINT_VERSION = '1.0' def add_arguments(self, *, parser): """ Add command line arguments specific to the task. The method is intended to be overridden in a subclass. :param parser: The argument parser """ pass def set_context(self, *, context): """ Set the context before the task is being `__call__`-ed. :param context: The task context """ self.context = context async def __call__(self, *args, **kwargs): """ Execute the task extension logic. This method relays the call to a method named `self.TASK_NAME` which should be implemented in subclasses. :returns: The return code """ task_method = getattr(self, self.TASK_NAME) return await task_method(*args, **kwargs) def print(self, msg, *, file=None): # noqa: A003 """ Post a message event into the event queue. :param msg: The message :param file: The sink to write the message to. An argument of `None` or `sys.stdout` posts a `StdoutLine` event, `sys.stderr` posts a `StderrLine` event. """ if file is None or file == sys.stdout: data = StdoutLine(msg + '\n') elif file == sys.stderr: data = StderrLine(msg + '\n') else: assert False, 'Unknown file object: ' + str(file) self.context.put_event_into_queue(data) def progress(self, message): """ Post a progress event into the event queue. :param msg: The message """ self.context.put_event_into_queue( JobProgress(self.context.pkg.name, message)) async def check_call( context, cmd, *, cwd=None, env=None, shell=False, use_pty=None ): # pragma: no cover """ Run the command described by cmd. Post a `Command` event to the queue describing the exact invocation in order to allow reproducing it. All output to `stdout` and `stderr` is posted as `StdoutLine` and `StderrLine` events to the event queue. This function has been depreated, use ``colcon_core.task.run()`` instead. :param cmd: The command and its arguments :param cwd: the working directory for the subprocess :param env: a dictionary with environment variables :param shell: whether to use the shell as the program to execute :param use_pty: whether to use a pseudo terminal :returns: the result of the completed process :rtype subprocess.CompletedProcess """ warnings.warn( 'colcon_core.task.check_call() has been deprecated, use ' 'colcon_core.task.run() instead', stacklevel=2) return await run( context, cmd, cwd=cwd, env=env, shell=shell, use_pty=use_pty) async def run( context, cmd, *, use_pty=None, capture_output=None, **other_popen_kwargs ): """ Run the command described by cmd. Post a `Command` event to the queue describing the exact invocation in order to allow reproducing it. All output to `stdout` and `stderr` is posted as `StdoutLine` and `StderrLine` events to the event queue. See the documentation of `subprocess.Popen() ` for other parameters. :param cmd: The command and its arguments :param use_pty: whether to use a pseudo terminal :param capture_output: whether to store stdout and stderr :returns: the result of the completed process :rtype subprocess.CompletedProcess """ def stdout_callback(line): context.put_event_into_queue(StdoutLine(line)) def stderr_callback(line): context.put_event_into_queue(StderrLine(line)) cwd = other_popen_kwargs.get('cwd', None) env = other_popen_kwargs.get('env', None) shell = other_popen_kwargs.get('shell', False) context.put_event_into_queue( Command(cmd, cwd=cwd, env=env, shell=shell)) completed = await colcon_core_subprocess_run( cmd, stdout_callback, stderr_callback, use_pty=use_pty, capture_output=capture_output, **other_popen_kwargs) context.put_event_into_queue( CommandEnded( cmd, cwd=cwd, env=env, shell=shell, returncode=completed.returncode)) return completed def get_task_extensions(task_name, *, unique_instance=False): """ Get the available task extensions. The extensions are ordered by their entry point name. :param str task_name: The entry point name identifying a group of task extensions :param bool unique_instance: The flag if the returned instances should be unique or cached instances can be returned instead :rtype: OrderedDict """ extensions = instantiate_extensions( task_name, unique_instance=unique_instance) task_basename = task_name.split('.')[-1] for name in list(extensions.keys()): extension = extensions[name] assert hasattr(extension, task_basename) extension.TASK_NAME = task_basename extension.PACKAGE_TYPE = name return order_extensions_by_name(extensions) def add_task_arguments(parser, task_name): """ Add the command line arguments for the task extensions. :param parser: The argument parser :param str task_name: The entry point name identifying a group of task extensions """ extensions = get_task_extensions(task_name, unique_instance=True) for extension_name, extension in extensions.items(): group = parser.add_argument_group( title=f"Arguments for '{extension_name}' packages") try: retval = extension.add_arguments(parser=group) assert retval is None, 'add_arguments() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in task extension exc = traceback.format_exc() logger.error( f"Exception in task extension '{extension.TASK_NAME}." f"{extension.PACKAGE_TYPE}': {e}\n{exc}") # skip failing extension, continue with next one def get_task_extension(task_name, package_type): """ Get a specific task extension. :param str task_name: The entry point name identifying a group of task extensions :param str package_type: The package type identifying a task extension within the group :returns: The task extension """ extensions = instantiate_extensions( task_name, unique_instance=True) if package_type not in extensions: return None extension = extensions[package_type] extension.TASK_NAME = task_name.split('.')[-1] extension.PACKAGE_TYPE = package_type return extension def create_file(args, rel_path, *, content=None): """ Create a file within the install base. Creates the containing directory if necessary. :param args: The parsed command line arguments containing the install base :param str rel_path: The relative path of file :param str content: The content of the created file :returns: The task extension """ dst = os.path.join(args.install_base, rel_path) os.makedirs(os.path.dirname(dst), exist_ok=True) with open(dst, 'w') as h: if content is not None: h.write(content) def install(args, rel_src, rel_dst): """ Install or symlink a file. Creates the containing directory if necessary. :param args: The parsed command line arguments containing the source path as well as the install base :param str rel_src: The source path relative to the path :param str rel_dst: The destination path relative to the install base """ src = os.path.join(args.path, rel_src) dst = os.path.join(args.install_base, rel_dst) os.makedirs(os.path.dirname(dst), exist_ok=True) if not args.symlink_install: if os.path.islink(dst): os.unlink(dst) shutil.copy(src, dst) else: if os.path.islink(dst): if not os.path.exists(dst) or not os.path.samefile(src, dst): os.unlink(dst) elif os.path.isfile(dst): os.remove(dst) elif os.path.isdir(dst): shutil.rmtree(dst) if not os.path.exists(dst): os.symlink(src, dst) colcon-core-0.17.1/colcon_core/task/python/000077500000000000000000000000001465053734400205665ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/__init__.py000066400000000000000000000027771465053734400227140ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os def get_setup_data(pkg, env): """ Get the options from the Python manifest. :param pkg: The package descriptor :param env: The environment :returns: The options :rtype: dict """ # get data from pkg metadata if available key = 'get_python_setup_options' if key not in pkg.metadata: return {} return dict(pkg.metadata[key](env)) def get_data_files_mapping(data_files): """ Transform the data_files structure into a dictionary. :param data_files: either a list of source files or a list of tuples where the first element is the destination path and the second element is a list of source files :returns: a dictionary mapping the source file to a destination file :rtype: dict """ mapping = {} for data_file in data_files: if isinstance(data_file, tuple): assert len(data_file) == 2 dest = data_file[0] assert not os.path.isabs(dest) sources = data_file[1] assert isinstance(sources, list) for source in sources: assert not os.path.isabs(source), \ f"'data_files' must be relative, '{source}' is absolute" mapping[source] = os.path.join(dest, os.path.basename(source)) else: assert not os.path.isabs(data_file) mapping[data_file] = os.path.basename(data_file) return mapping colcon-core-0.17.1/colcon_core/task/python/build.py000066400000000000000000000365171465053734400222530ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from configparser import ConfigParser from contextlib import suppress import locale import os from pathlib import Path import shutil import sys from colcon_core.environment import create_environment_hooks from colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.python_install_path import get_python_install_path from colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.subprocess import check_output from colcon_core.task import run from colcon_core.task import TaskExtensionPoint from colcon_core.task.python import get_data_files_mapping from colcon_core.task.python import get_setup_data from colcon_core.task.python.template import expand_template logger = colcon_logger.getChild(__name__) _PYTHON_CMD = [ sys.executable, '-W', 'ignore:setup.py install is deprecated', ] def _get_install_scripts(path): setup_cfg_path = os.path.join(path, 'setup.cfg') if not os.path.exists(setup_cfg_path): return parser = ConfigParser() parser.optionxform = str with open(setup_cfg_path, encoding='utf-8') as f: parser.read_file(f) return parser.get('install', 'install-scripts', fallback=None) class PythonBuildTask(TaskExtensionPoint): """Build Python packages.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') async def build(self, *, additional_hooks=None): # noqa: D102 pkg = self.context.pkg args = self.context.args logger.info(f"Building Python package in '{args.path}'") try: env = await get_command_environment( 'setup_py', args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return 1 setup_py_data = get_setup_data(self.context.pkg, env) # override installation locations prefix_override = Path(args.build_base) / 'prefix_override' expand_template( Path(__file__).parent / 'template' / 'sitecustomize.py.em', prefix_override / 'sitecustomize.py', { 'current_prefix': sys.prefix, 'site_prefix': args.install_base, }) # `setup.py develop|install` requires the python lib path to exist python_lib = os.path.join( args.install_base, self._get_python_lib(args)) os.makedirs(python_lib, exist_ok=True) distutils_commands = os.path.join( os.path.dirname(__file__), 'colcon_distutils_commands') # and being in the PYTHONPATH env = dict(env) env['PYTHONPATH'] = str(prefix_override) + os.pathsep + \ distutils_commands + os.pathsep + \ python_lib + os.pathsep + env.get('PYTHONPATH', '') # coverage capture interferes with sitecustomize # See also: https://docs.python.org/3/library/site.html#module-site # See also: colcon/colcon-core#579 env.pop('COV_CORE_SOURCE', None) # determine if setuptools specific commands are available available_commands = await self._get_available_commands( args.path, env) if not args.symlink_install or 'develop' not in available_commands: rc = await self._undo_develop(pkg, args, env) if rc: return rc # invoke `setup.py install` step with lots of arguments # to avoid placing any files in the source space cmd = _PYTHON_CMD + ['setup.py'] if 'egg_info' in available_commands: # `setup.py egg_info` requires the --egg-base to exist os.makedirs(args.build_base, exist_ok=True) # symlinks are resolved for the paths when used as cwd below cmd += [ 'egg_info', '--egg-base', os.path.relpath( os.path.realpath(args.build_base), os.path.realpath(args.path))] cmd += [ 'build', '--build-base', os.path.join( args.build_base, 'build'), 'install', '--record', os.path.join(args.build_base, 'install.log')] # Extract and explicitly pass install-scripts to setuptools. # When part of a virtual environment, this option is specifically # ignored in configuration files by setuptools, but not on the # command line. install_scripts = _get_install_scripts(args.path) if install_scripts: cmd += ['--install-scripts', install_scripts] if 'egg_info' in available_commands: # prevent installation of dependencies specified in setup.py cmd.append('--single-version-externally-managed') self._append_install_layout(args, cmd) if setup_py_data.get('data_files'): cmd += ['install_data'] if rc is not None: cmd += ['--force'] completed = await run( self.context, cmd, cwd=args.path, env=env) if completed.returncode: return completed.returncode else: rc = self._undo_install(pkg, args, setup_py_data, python_lib) if rc: return rc temp_symlinks = self._symlinks_in_build(args, setup_py_data) # invoke `setup.py develop` step in build space # to avoid placing any files in the source space try: # --editable causes this to skip creating/editing the # easy-install.pth file cmd = _PYTHON_CMD + [ 'setup.py', 'develop', '--editable', '--build-directory', os.path.join(args.build_base, 'build'), '--no-deps', ] if setup_py_data.get('data_files'): cmd += ['symlink_data'] if rc is not None: cmd += ['--force'] completed = await run( self.context, cmd, cwd=args.build_base, env=env) finally: # Remove symlinks that were only needed during build time for symlink in temp_symlinks: os.unlink(symlink) if completed.returncode: return completed.returncode # explicitly add the build directory to the PYTHONPATH # to maintain the desired order if additional_hooks is None: additional_hooks = [] base_path = args.build_base # if the Python packages are in a subdirectory # that needs to be appended to the build directory package_dir = setup_py_data.get('package_dir') or {} if '' in package_dir: base_path = os.path.join(base_path, package_dir['']) additional_hooks += create_environment_hook( 'pythonpath_develop', Path(base_path), pkg.name, 'PYTHONPATH', base_path, mode='prepend') hooks = create_environment_hooks(args.install_base, pkg.name) create_environment_scripts( pkg, args, default_hooks=hooks, additional_hooks=additional_hooks) async def _get_available_commands(self, path, env): output = await check_output( _PYTHON_CMD + ['setup.py', '--help-commands'], cwd=path, env=env) commands = set() for line in output.splitlines(): if not line.startswith(b' '): continue try: index = line.index(b' ', 2) except ValueError: continue if index == 2: continue commands.add( line[2:index].decode(locale.getpreferredencoding(False))) return commands async def _undo_develop(self, pkg, args, env): """ Undo a previously run 'develop' command. :returns: None if develop was not previously detected, otherwise an integer return code where zero indicates success. """ # undo previous develop if .egg-info is found and develop symlinks egg_info = os.path.join( args.build_base, '%s.egg-info' % pkg.name.replace('-', '_')) setup_py_build_space = os.path.join(args.build_base, 'setup.py') if os.path.exists(egg_info) and os.path.islink(setup_py_build_space): cmd = _PYTHON_CMD + [ 'setup.py', 'develop', '--uninstall', '--editable', '--build-directory', os.path.join(args.build_base, 'build') ] completed = await run( self.context, cmd, cwd=args.build_base, env=env) if not completed.returncode: os.remove(setup_py_build_space) return completed.returncode def _undo_install(self, pkg, args, setup_py_data, python_lib): """ Undo a previously run 'install' command. :returns: None if install was not previously detected, otherwise an integer return code where zero indicates success. """ # undo previous install if install.log is found install_log = os.path.join(args.build_base, 'install.log') if not os.path.exists(install_log): return with open(install_log, 'r') as h: lines = [line.rstrip() for line in h.readlines()] packages = setup_py_data.get('packages') or [] for module_name in packages: if module_name in sys.modules: logger.warning( f"Switching to 'develop' for package '{pkg.name}' while " 'it is being used might result in import errors later') break # remove previously installed files directories = set() python_lib = python_lib + os.sep for line in lines: if not os.path.exists(line): continue if not line.startswith(python_lib): logger.debug( 'While undoing a previous installation files outside the ' f'Python library path are being ignored: {line}') continue if not os.path.isdir(line): os.remove(line) # collect all parent directories until install base while True: line = os.path.dirname(line) if not line.startswith(python_lib): break directories.add(line) # remove empty directories for d in sorted(directories, reverse=True): with suppress(OSError): os.rmdir(d) os.remove(install_log) return 0 def _symlinks_in_build(self, args, setup_py_data): items = ['setup.py'] renamed_items = [] # add setup.cfg if available if os.path.exists(os.path.join(args.path, 'setup.cfg')): items.append('setup.cfg') # add all first level packages package_dir = setup_py_data.get('package_dir') or {} packages = setup_py_data.get('packages') or [] for package in packages: if '.' in package: continue if package in package_dir: items.append(package_dir[package]) renamed_items.append((package_dir[package], package)) if package_dir[package] in package_dir: package_dir_package = package_dir[package] raise RuntimeError( f"The package_dir contains a mapping from '{package}' " f"to '{package_dir_package}' which is also a key") if package_dir[package] in packages: package_dir_package = package_dir[package] raise RuntimeError( f"The value '{package_dir_package}' in package_dir is " 'also listed in packages') elif '' in package_dir: items.append(os.path.join(package_dir[''], package)) else: items.append(package) # relative python-ish paths are allowed as entries in py_modules, see: # https://docs.python.org/3.6/distutils/setupscript.html#listing-individual-modules py_modules = setup_py_data.get('py_modules') if py_modules: py_modules_list = [ p.replace('.', os.path.sep) + '.py' for p in py_modules] for py_module in py_modules_list: if not os.path.exists(os.path.join(args.path, py_module)): raise RuntimeError( f"Provided py_modules '{py_module}' does not exist") items += py_modules_list data_files = get_data_files_mapping( setup_py_data.get('data_files') or []) for source in data_files.keys(): # work around data files incorrectly defined as not relative if os.path.isabs(source): source = os.path.relpath(source, args.path) items.append(source) for script in setup_py_data.get('scripts') or []: items.append(script) symlinks = [] # symlink files / directories from source space into build space for item in items: symlinks.append(( os.path.join(args.path, item), os.path.join(args.build_base, item))) # provide a symlink within the build space if a module name is # changed by the mapping specified in package_dir temp_symlinks = [] for rel_src, rel_dst in renamed_items: symlinks.append(( os.path.join(args.path, rel_src), os.path.join(args.build_base, rel_dst))) # The other loop added an unrenamed symlink that should be removed # after the setup.py is invoked temp_symlinks.append(os.path.join(args.build_base, rel_src)) for src, dst in symlinks: os.makedirs(os.path.dirname(dst), exist_ok=True) if os.path.islink(dst): if not os.path.exists(dst) or not os.path.samefile(src, dst): os.unlink(dst) elif os.path.isfile(dst) and not os.path.samefile(src, dst): os.remove(dst) elif os.path.isdir(dst) and not os.path.samefile(src, dst): shutil.rmtree(dst) if not os.path.exists(dst): os.symlink(src, dst) return temp_symlinks def _get_python_lib(self, args): path = get_python_install_path('purelib', {'base': args.install_base}) return os.path.relpath(path, start=args.install_base) def _append_install_layout(self, args, cmd): # Debian patches sysconfig to return a path containing dist-packages # instead of site-packages when using the default install scheme. # TODO(sloretz) this is potentially unused now that # get_python_install_path avoids the deb_system scheme. if 'dist-packages' in self._get_python_lib(args): cmd += ['--install-layout', 'deb'] colcon-core-0.17.1/colcon_core/task/python/colcon_distutils_commands/000077500000000000000000000000001465053734400260305ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/colcon_distutils_commands/__init__.py000066400000000000000000000000001465053734400301270ustar00rootroot00000000000000colcon_distutils_commands-0.0.0.dist-info/000077500000000000000000000000001465053734400355375ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/colcon_distutils_commandsMETADATA000066400000000000000000000001051465053734400366360ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/colcon_distutils_commands/colcon_distutils_commands-0.0.0.dist-infoMetadata-Version: 2.1 Name: colcon_distutils_commands Version: 0.0.0 entry_points.txt000066400000000000000000000001351465053734400410340ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/colcon_distutils_commands/colcon_distutils_commands-0.0.0.dist-info[distutils.commands] symlink_data = colcon_core.distutils.commands.symlink_data:symlink_data colcon-core-0.17.1/colcon_core/task/python/template/000077500000000000000000000000001465053734400224015ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/template/__init__.py000066400000000000000000000002561465053734400245150ustar00rootroot00000000000000# Copyright 2022 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from colcon_core.shell.template import expand_template # noqa: F401 colcon-core-0.17.1/colcon_core/task/python/template/sitecustomize.py.em000066400000000000000000000002121465053734400262550ustar00rootroot00000000000000import sys if sys.prefix == @repr(current_prefix): sys.real_prefix = sys.prefix sys.prefix = sys.exec_prefix = @repr(site_prefix) colcon-core-0.17.1/colcon_core/task/python/test/000077500000000000000000000000001465053734400215455ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/task/python/test/__init__.py000066400000000000000000000202251465053734400236570ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import re import traceback from colcon_core.extension_point import load_extension_points from colcon_core.logging import colcon_logger from colcon_core.package_augmentation.python import extract_dependencies from colcon_core.plugin_system import get_first_line_doc from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_priority from colcon_core.plugin_system import satisfies_version from colcon_core.shell import get_command_environment from colcon_core.task import TaskExtensionPoint from colcon_core.task.python import get_setup_data logger = colcon_logger.getChild(__name__) class PythonTestTask(TaskExtensionPoint): """Test Python packages.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def add_arguments(self, *, parser): # noqa: D102 add_python_testing_step_arguments(parser) async def test(self, *, additional_hooks=None): # noqa: D102 args = self.context.args logger.info(f"Testing Python package in '{args.path}'") try: env = await get_command_environment( 'setup_py', args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return 1 setup_py_data = get_setup_data(self.context.pkg, env) # select the step extension which should perform the python testing if args.python_testing: key = args.python_testing extension = get_python_testing_step_extension(key) if extension is None: logger.error( f"Failed to load Python testing step extension '{key}'") return 1 else: extensions = get_python_testing_step_extensions() for key, extension in extensions.items(): logger.log(1, f"test() by extension '{key}'") try: matched = extension.match(self.context, env, setup_py_data) except Exception as e: # noqa: F841 # catch exceptions raised in python testing step extension exc = traceback.format_exc() logger.error( 'Exception in Python testing step extension ' f"'{extension.STEP_TYPE}': {e}\n{exc}") # skip failing extension, continue with next one continue if matched: break else: logger.warning( 'No Python Testing Step extension matched in ' f"'{args.path}'") return logger.log(1, f"test.step() by extension '{key}'") try: if 'PYTHONDONTWRITEBYTECODE' not in env: env = dict(env) env['PYTHONDONTWRITEBYTECODE'] = '1' return await extension.step(self.context, env, setup_py_data) except Exception as e: # noqa: F841 # catch exceptions raised in python testing step extension exc = traceback.format_exc() logger.error( 'Exception in Python testing step extension ' f"'{extension.STEP_TYPE}': {e}\n{exc}") return 1 class PythonTestingStepExtensionPoint: """ The interface for Python testing step extensions. A Python testing step extension performs testing of a Python package. For each instance the attribute `STEP_TYPE` is being set to the basename of the entry point registering the extension. """ """The version of the Python testing step extension interface.""" EXTENSION_POINT_VERSION = '1.0' """The default priority of Python testing step extensions.""" PRIORITY = 100 def add_arguments(self, *, parser): """ Add command line arguments specific to the Python testing step. The method is intended to be overridden in a subclass. :param parser: The argument parser """ pass def match(self, context, env, setup_py_data): """ Determine if this instance claims to process the specific package. This method must be overridden in a subclass. :param context: The task context describing the package :param env: The environment dictionary :param setup_py_data: The data extracted from the setup.py file :returns: True if it claims to process the package, False otherwise :rtype: bool """ raise NotImplementedError() async def step(self): """ Execute the Python testing step logic. This method must be overridden in a subclass. :returns: The return code """ raise NotImplementedError() def get_python_testing_step_extensions(*, group_name=None): """ Get the available Python testing step extensions. The extensions are ordered by their priority and entry point name. :rtype: OrderedDict """ if group_name is None: group_name = 'colcon_core.python_testing' extensions = instantiate_extensions(group_name, unique_instance=False) for name in list(extensions.keys()): extension = extensions[name] extension.STEP_TYPE = name return order_extensions_by_priority(extensions) def add_python_testing_step_arguments(parser): """ Add the command line arguments for the Python testing step extensions. :param parser: The argument parser """ extensions = get_python_testing_step_extensions() descriptions = '' for key, extension in extensions.items(): desc = get_first_line_doc(extension) if not desc: # show extensions without a description # to mention the available options desc = '' # it requires a custom formatter to maintain the newline descriptions += f'\n* {key}: {desc}' parser.add_argument( '--python-testing', type=str, choices=sorted(extensions.keys()), help='The Python testing framework to use (default: determined ' 'based on the packages `tests_require`)' f'{descriptions}') for extension in extensions.values(): try: retval = extension.add_arguments(parser=parser) assert retval is None, 'add_arguments() should return None' except Exception as e: # noqa: F841 # catch exceptions raised in package selection extension exc = traceback.format_exc() logger.error( 'Exception in Python testing step extension ' f"'{extension.STEP_TYPE}': {e}\n{exc}") # skip failing extension, continue with next one def get_python_testing_step_extension(step_name): """ Get a specific Python testing step extension. :param str step_name: The entry point name of the extension :returns: A unique instance of the extension, otherwise None """ group_name = 'colcon_core.python_testing' extension_types = load_extension_points(group_name) extension_names = list(extension_types.keys()) if step_name not in extension_names: return None extension_names.remove(step_name) extensions = instantiate_extensions( group_name, exclude_names=extension_names, unique_instance=True) if step_name not in extensions: return None extension = extensions[step_name] extension.STEP_NAME = step_name return extension def has_test_dependency(setup_py_data, name): """ Check if the package has a specific test dependency. :param dict setup_py_data: The meta information of the package :returns: True if the package has a test dependency on the given name, False otherwise :rtype: bool """ tests_require = extract_dependencies(setup_py_data).get('test') for d in tests_require or []: # the name might be followed by a version # separated by any of the following: ' ', <, >, <=, >=, ==, != parts = re.split(r' |<|=|>|!', d) if parts[0] == name: return True return False colcon-core-0.17.1/colcon_core/task/python/test/pytest.py000066400000000000000000000163111465053734400234510ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from pathlib import PurePosixPath import sys from colcon_core.event.test import TestFailure from colcon_core.plugin_system import satisfies_version from colcon_core.plugin_system import SkipExtensionException from colcon_core.task import run from colcon_core.task.python.test import has_test_dependency from colcon_core.task.python.test import PythonTestingStepExtensionPoint from colcon_core.verb.test import logger from packaging.version import Version class PytestPythonTestingStep(PythonTestingStepExtensionPoint): """Use `pytest` to test Python packages.""" # use a higher priority than the default priority # in order to become the default PRIORITY = 200 def __init__(self): # noqa: D107 super().__init__() satisfies_version( PythonTestingStepExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') try: import pytest # noqa: F401 except ImportError: raise SkipExtensionException("'pytest' not found") def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--pytest-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to pytests. ' 'Arguments matching other options must be prefixed by a space,\n' 'e.g. --pytest-args " --help" (stdout might not be shown by ' 'default, e.g. add `--event-handlers console_cohesion+`)') parser.add_argument( '--pytest-with-coverage', action='store_true', help='Generate coverage information') def match(self, context, env, setup_py_data): # noqa: D102 return has_test_dependency(setup_py_data, 'pytest') async def step(self, context, env, setup_py_data): # noqa: D102 cmd = [sys.executable, '-m', 'pytest'] junit_xml_path = Path( context.args.test_result_base if context.args.test_result_base else context.args.build_base) / 'pytest.xml' # avoid using backslashes in the PYTEST_ADDOPTS env var on Windows args = [ '--tb=short', '--junit-xml=' + str(PurePosixPath(*junit_xml_path.parts)), '--junit-prefix=' + context.pkg.name, ] # use -o option only when available # https://github.com/pytest-dev/pytest/blob/3.3.0/CHANGELOG.rst from pytest import __version__ as pytest_version if Version(pytest_version) >= Version('3.3.0'): args += [ '-o', 'cache_dir=' + str(PurePosixPath( *(Path(context.args.build_base).parts)) / '.pytest_cache'), ] env = dict(env) if ( context.args.pytest_with_coverage or has_test_dependency(setup_py_data, 'pytest-cov') ): try: from pytest_cov import __version__ as pytest_cov_version except ImportError: logger.warning( 'Test coverage will not be produced for package ' f"'{context.pkg.name}' since the pytest extension 'cov' " 'was not found') else: args += [ '--cov=' + str(PurePosixPath( *(Path(context.args.path).parts))), '--cov-report=html:' + str(PurePosixPath( *(Path(context.args.build_base).parts)) / 'coverage.html'), '--cov-report=xml:' + str(PurePosixPath( *(Path(context.args.build_base).parts)) / 'coverage.xml'), ] # use --cov-branch option only when available # https://github.com/pytest-dev/pytest-cov/blob/v2.5.0/CHANGELOG.rst if Version(pytest_cov_version) >= Version('2.5.0'): args += [ '--cov-branch', ] else: logger.warning( 'Test coverage will be produced but will not contain ' 'branch coverage information because the pytest ' "extension 'cov' does not support it (need 2.5.0, " f'have {pytest_cov_version})') env['COVERAGE_FILE'] = os.path.join( context.args.build_base, '.coverage') if context.args.retest_until_fail: try: import pytest_repeat # noqa: F401 except ImportError: logger.warning( "Ignored '--retest-until-fail' for package " f"'{context.pkg.name}' since the pytest extension " "'repeat' was not found") else: count = context.args.retest_until_fail + 1 args += [f'--count={count}'] if context.args.retest_until_pass: try: import pytest_rerunfailures # noqa: F401 except ImportError: logger.warning( "Ignored '--retest-until-pass' for package " f"'{context.pkg.name}' since pytest extension " "'rerunfailures' was not found") else: args += [f'--reruns={context.args.retest_until_pass}'] if context.args.pytest_args is not None: args += context.args.pytest_args if args: env['PYTEST_ADDOPTS'] = ' '.join( a if ' ' not in a else f'"{a}"' for a in args) # create dummy result in case the invocation fails early # and doesn't generate a result file at all junit_xml_path.parent.mkdir(parents=True, exist_ok=True) junit_xml_path.write_text(f""" """) # noqa: E501 completed = await run( context, cmd, cwd=context.args.path, env=env) # use local import to avoid a dependency on pytest try: from _pytest.main import ExitCode EXIT_CODE_TESTS_FAILED = ExitCode.TESTS_FAILED # noqa: N806 except ImportError: # support pytest < 5.0 from _pytest.main import EXIT_TESTSFAILED EXIT_CODE_TESTS_FAILED = EXIT_TESTSFAILED # noqa: N806 if completed.returncode == EXIT_CODE_TESTS_FAILED: context.put_event_into_queue( TestFailure(context.pkg.name)) try: from _pytest.main import ExitCode EXIT_CODE_NO_TESTS = ExitCode.NO_TESTS_COLLECTED # noqa: N806 except ImportError: # support pytest < 5.0 from _pytest.main import EXIT_NOTESTSCOLLECTED EXIT_CODE_NO_TESTS = EXIT_NOTESTSCOLLECTED # noqa: N806 if completed.returncode not in ( EXIT_CODE_NO_TESTS, EXIT_CODE_TESTS_FAILED ): return completed.returncode colcon-core-0.17.1/colcon_core/task/python/test/setuppy_test.py000066400000000000000000000036321465053734400246730ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from sys import executable from colcon_core.plugin_system import satisfies_version from colcon_core.task import run from colcon_core.task.python.test import PythonTestingStepExtensionPoint from colcon_core.verb.test import logger class SetuppyPythonTestingStep(PythonTestingStepExtensionPoint): """Use `unittest` to test packages.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version( PythonTestingStepExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--unittest-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to Python unittests. ' 'Arguments matching other options must be prefixed by a space,\n' 'e.g. --unittest-args " --help" (stdout might not be shown by ' 'default, e.g. add `--event-handlers console_cohesion+`)') def match(self, context, env, setup_py_data): # noqa: D102 return True async def step(self, context, env, setup_py_data): # noqa: D102 if context.args.retest_until_fail: logger.warning( "Ignored '--retest-until-fail' for package " f"'{context.pkg.name}' since 'unittest' does not support the " 'usage') if context.args.retest_until_pass: logger.warning( "Ignored '--retest-until-pass' for package " f"'{context.pkg.name}' since 'unittest' does not support the " 'usage') cmd = [executable, '-m', 'unittest', '-v'] if context.args.unittest_args is not None: cmd += context.args.unittest_args completed = await run( context, cmd, cwd=context.args.path, env=env) return completed.returncode colcon-core-0.17.1/colcon_core/topological_order.py000066400000000000000000000052551465053734400223730ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.package_decorator import add_recursive_dependencies from colcon_core.package_decorator import get_decorators def topological_order_packages( descriptors, direct_categories=None, recursive_categories=None, ): """ Order packages topologically. :param descriptors: the package descriptors :type descriptors: set of :py:class:`colcon_core.package_descriptor.PackageDescriptor` :returns: list of package decorators :rtype: list of :py:class:`colcon_core.package_decorator.PackageDecorator` """ decorators = get_decorators(descriptors) add_recursive_dependencies( decorators, direct_categories=direct_categories, recursive_categories=recursive_categories) return topological_order_decorators(decorators) def topological_order_decorators(decorators): """ Order decorated package descriptors topologically. :param decorators: the decorated package descriptors :type decorators: list of :py:class:`colcon_core.package_decorator.PackageDecorator` :returns: list of package decorators :rtype: list of :py:class:`colcon_core.package_decorator.PackageDecorator` """ # map the set of remaining dependencies for each decorator queued = {} for decorator in decorators: queued[decorator] = { d.name for d in decorator.recursive_dependencies } ordered = [] while len(ordered) < len(decorators): # remove dependencies on already ordered packages ordered_names = {d.descriptor.name for d in ordered} for q in queued.values(): q.difference_update(ordered_names) # find all queued packages without remaining dependencies ready = [decorator for decorator, r in queued.items() if not r] if not ready: lines = [ '%s: %s' % ( decorator.descriptor.name, sorted(r)) for decorator, r in queued.items()] lines.sort() raise RuntimeError( 'Unable to order packages topologically:\n' + '\n'.join(lines)) # order ready jobs alphabetically for a deterministic order ready.sort(key=lambda d: d.descriptor.name) # add all ready jobs to ordered list for r in ready: ordered.append(r) queued.pop(r) # order recursive dependencies for each decorator ordered_name_list = [d.descriptor.name for d in ordered] for decorator in ordered: decorator.recursive_dependencies = sorted( decorator.recursive_dependencies, key=ordered_name_list.index) return ordered colcon-core-0.17.1/colcon_core/verb/000077500000000000000000000000001465053734400172415ustar00rootroot00000000000000colcon-core-0.17.1/colcon_core/verb/__init__.py000066400000000000000000000146341465053734400213620ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import copy import logging import os from pathlib import Path from colcon_core.logging import colcon_logger from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_name logger = colcon_logger.getChild(__name__) class VerbExtensionPoint: """ The interface for verb extensions. A verb extension provides a verb to the command line tool. For each instance the attribute `VERB_NAME` is being set to the basename of the entry point registering the extension. """ """The version of the verb extension interface.""" EXTENSION_POINT_VERSION = '1.0' def add_arguments(self, *, parser): """ Add command line arguments specific to the verb. The method is intended to be overridden in a subclass. :param parser: The argument parser """ pass def main(self, *, context): """ Execute the verb extension logic. This method must be overridden in a subclass. :param context: The context providing the parsed command line arguments :returns: The return code """ raise NotImplementedError() def get_verb_extensions(*, group_name=None): """ Get the available verb extensions. The extensions are ordered by their entry point name. :rtype: OrderedDict """ if group_name is None: group_name = __name__ extensions = instantiate_extensions(group_name) for name, extension in extensions.items(): extension.VERB_NAME = name return order_extensions_by_name(extensions) def check_and_mark_build_tool(build_base, *, this_build_tool='colcon'): """ Check the marker file for the previous build tool, otherwise create it. The marker filename is `.built_by`. :param str build_base: The build directory :param str this_build_tool: The name of this build tool :raises RuntimeError: if the marker file contains the name of a different build tool """ marker_path = Path(build_base) / '.built_by' if marker_path.parent.is_dir(): if marker_path.is_file(): previous_build_tool = marker_path.read_text().rstrip() if previous_build_tool == this_build_tool: return raise RuntimeError( f"The build directory '{build_base}' was created by " f"'{previous_build_tool}'. Please remove the build directory " 'or pick a different one.') else: os.makedirs(build_base, exist_ok=True) marker_path.write_text(this_build_tool + '\n') def check_and_mark_install_layout(install_base, *, merge_install): """ Check the marker file for the previous install layout, otherwise create it. The marker filename is `.colcon_install_layout`. :param str install_base: The install directory :param bool merge_install: The flag if all packages share the same prefix :raises RuntimeError: if the marker file contains a different install layout """ this_install_layout = 'merged' if merge_install else 'isolated' marker_path = Path(install_base) / '.colcon_install_layout' if marker_path.parent.is_dir(): if marker_path.is_file(): previous_install_layout = marker_path.read_text().rstrip() if previous_install_layout == this_install_layout: return change_option = 'remove' if merge_install else 'add' raise RuntimeError( f"The install directory '{install_base}' was created with the " f"layout '{previous_install_layout}'. Please remove the " f'install directory, pick a different one or {change_option} ' "the '--merge-install' option.") else: try: os.makedirs(install_base, exist_ok=True) except FileExistsError: raise RuntimeError( f"The install base '{install_base}' is not a directory") marker_path.write_text(this_install_layout + '\n') def update_object( object_, key, value, package_name, argument_type, value_source ): """ Set or update an attribute of an object. If the attribute exists and the passed value as well as the current value of the attribute are dictionaries then the current values are being updated with the passed values. If the attribute exists and the passed value as well as the current value of the attribute are lists then the passed values are being appended to the current values. Otherwise the attribute is being set to the passed value potentially overwriting an existing value. :param key: The name of the attributes :param value: The value used to set or update the attribute :param str package_name: The package name, only used for log messages :param str argument_type: The argument type, only used in log messages :param str value_source: The source of the value, only used for log messages """ if not hasattr(object_, key): logger.log( 5, f"set package '{package_name}' {argument_type} argument " f"'{key}' from {value_source} to '{value}'") # add value to the object # copy value to avoid changes to either of them to affect each other setattr(object_, key, copy.deepcopy(value)) return old_value = getattr(object_, key) if isinstance(old_value, dict) and isinstance(value, dict): logger.log( 5, f"update package '{package_name}' {argument_type} argument " f"'{key}' from {value_source} with '{value}'") # update dictionary old_value.update(value) return if isinstance(old_value, list) and isinstance(value, list): logger.log( 5, f"extend package '{package_name}' {argument_type} argument " f"'{key}' from {value_source} with '{value}'") # extend list old_value += value return severity = 5 \ if old_value is None or type(old_value) is type(value) \ else logging.WARNING logger.log( severity, f"overwrite package '{package_name}' {argument_type} " f"argument '{key}' from {value_source} with '{value}' (before: " f"'{old_value}')") # overwrite existing value # copy value to avoid changes to either of them to affect each other setattr(object_, key, copy.deepcopy(value)) colcon-core-0.17.1/colcon_core/verb/build.py000066400000000000000000000224221465053734400207140ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict import os import os.path from pathlib import Path import traceback from colcon_core.argument_default import wrap_default_value from colcon_core.argument_parser.destination_collector \ import DestinationCollectorDecorator from colcon_core.argument_type import get_cwd_path_resolver from colcon_core.event.job import JobUnselected from colcon_core.event_handler import add_event_handler_arguments from colcon_core.executor import add_executor_arguments from colcon_core.executor import execute_jobs from colcon_core.executor import Job from colcon_core.executor import OnError from colcon_core.package_identification.ignore import IGNORE_MARKER from colcon_core.package_selection import add_arguments \ as add_packages_arguments from colcon_core.package_selection import get_packages from colcon_core.plugin_system import satisfies_version from colcon_core.shell import get_shell_extensions from colcon_core.task import add_task_arguments from colcon_core.task import get_task_extension from colcon_core.task import TaskContext from colcon_core.verb import check_and_mark_build_tool from colcon_core.verb import check_and_mark_install_layout from colcon_core.verb import logger from colcon_core.verb import update_object from colcon_core.verb import VerbExtensionPoint class BuildPackageArguments: """Arguments to build a specific package.""" def __init__(self, pkg, args, *, additional_destinations=None): """ Construct a BuildPackageArguments. :param pkg: The package descriptor :param args: The parsed command line arguments :param list additional_destinations: The destinations of additional arguments """ super().__init__() self.path = os.path.abspath( os.path.join(os.getcwd(), str(pkg.path))) self.build_base = os.path.abspath(os.path.join( os.getcwd(), args.build_base, pkg.name)) self.install_base = os.path.abspath(os.path.join( os.getcwd(), args.install_base)) self.merge_install = args.merge_install if not args.merge_install: self.install_base = os.path.join( self.install_base, pkg.name) self.symlink_install = args.symlink_install self.test_result_base = os.path.abspath(os.path.join( os.getcwd(), args.test_result_base, pkg.name)) \ if args.test_result_base else None # set additional arguments for dest in (additional_destinations or []): # from the command line if hasattr(args, dest): update_object( self, dest, getattr(args, dest), pkg.name, 'build', 'command line') # from the package metadata if dest in pkg.metadata: update_object( self, dest, pkg.metadata[dest], pkg.name, 'build', 'package metadata') class BuildVerb(VerbExtensionPoint): """Build a set of packages.""" def __init__(self): # noqa: D107 super().__init__() satisfies_version(VerbExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--build-base', default=wrap_default_value('build'), type=get_cwd_path_resolver(), help='The base path for all build directories (default: build)') parser.add_argument( '--install-base', default=wrap_default_value('install'), type=get_cwd_path_resolver(), help='The base path for all install prefixes (default: install)') parser.add_argument( '--merge-install', action='store_true', help='Merge all install prefixes into a single location') parser.add_argument( '--symlink-install', action='store_true', help='Use symlinks instead of copying files where possible') parser.add_argument( '--test-result-base', type=get_cwd_path_resolver(), help='The base path for all test results (default: --build-base)') parser.add_argument( '--continue-on-error', action='store_true', help='Continue other packages when a package fails to build ' '(packages recursively depending on the failed package are ' 'skipped)') add_executor_arguments(parser) add_event_handler_arguments(parser) add_packages_arguments(parser) decorated_parser = DestinationCollectorDecorator(parser) add_task_arguments(decorated_parser, 'colcon_core.task.build') self.task_argument_destinations = decorated_parser.get_destinations() def main(self, *, context): # noqa: D102 check_and_mark_build_tool(context.args.build_base) check_and_mark_install_layout( context.args.install_base, merge_install=context.args.merge_install) self._create_paths(context.args) decorators = get_packages( context.args, additional_argument_names=self.task_argument_destinations, recursive_categories=('run', )) install_base = os.path.abspath(os.path.join( os.getcwd(), context.args.install_base)) jobs, unselected_packages = self._get_jobs( context.args, decorators, install_base) on_error = OnError.interrupt \ if not context.args.continue_on_error else OnError.skip_downstream def post_unselected_packages(*, event_queue): nonlocal unselected_packages names = [pkg.name for pkg in unselected_packages] for name in sorted(names): event_queue.put( (JobUnselected(name), None)) rc = execute_jobs( context, jobs, on_error=on_error, pre_execution_callback=post_unselected_packages) self._create_prefix_scripts(install_base, context.args.merge_install) return rc def _create_paths(self, args): self._create_path(args.build_base) self._create_path(args.install_base) def _create_path(self, path): path = Path(os.path.abspath(path)) if not path.exists(): path.mkdir(parents=True, exist_ok=True) ignore_marker = path / IGNORE_MARKER if not os.path.lexists(str(ignore_marker)): with ignore_marker.open('w'): pass def _get_jobs(self, args, decorators, install_base): jobs = OrderedDict() unselected_packages = set() for decorator in decorators: pkg = decorator.descriptor if not decorator.selected: unselected_packages.add(pkg) continue extension = get_task_extension('colcon_core.task.build', pkg.type) if not extension: logger.warning( f"No task extension to 'build' a '{pkg.type}' package") continue recursive_dependencies = OrderedDict() for dep_name in decorator.recursive_dependencies: dep_path = install_base if not args.merge_install: dep_path = os.path.join(dep_path, dep_name) recursive_dependencies[dep_name] = dep_path package_args = BuildPackageArguments( pkg, args, additional_destinations=self .task_argument_destinations.values()) ordered_package_args = ', '.join([ ('%s: %s' % (repr(k), repr(package_args.__dict__[k]))) for k in sorted(package_args.__dict__.keys()) ]) logger.debug( f"Building package '{pkg.name}' with the following arguments: " f'{{{ordered_package_args}}}') task_context = TaskContext( pkg=pkg, args=package_args, dependencies=recursive_dependencies) job = Job( identifier=pkg.name, dependencies=set(recursive_dependencies.keys()), task=extension, task_context=task_context) jobs[pkg.name] = job return jobs, unselected_packages def _create_prefix_scripts(self, install_base, merge_install): extensions = get_shell_extensions() for priority in extensions.keys(): extensions_same_prio = extensions[priority] for extension in extensions_same_prio.values(): try: scripts = extension.create_prefix_script( Path(install_base), merge_install) # TODO: Disallow 'None' in v3.0 of ShellExtensionPoint if scripts is not None: assert isinstance(scripts, list), \ 'create_prefix_script() should return a list' except Exception as e: # noqa: F841 # catch exceptions raised in shell extension exc = traceback.format_exc() logger.error( 'Exception in shell extension ' f"'{extension.SHELL_NAME}': {e}\n{exc}") # skip failing extension, continue with next one colcon-core-0.17.1/colcon_core/verb/test.py000066400000000000000000000215211465053734400205730ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict import os import types from colcon_core.argument_default import wrap_default_value from colcon_core.argument_parser.destination_collector \ import DestinationCollectorDecorator from colcon_core.argument_type import get_cwd_path_resolver from colcon_core.event.test import TestFailure from colcon_core.event_handler import add_event_handler_arguments from colcon_core.executor import add_executor_arguments from colcon_core.executor import execute_jobs from colcon_core.executor import Job from colcon_core.executor import OnError from colcon_core.feature_flags import is_feature_flag_set from colcon_core.logging import colcon_logger from colcon_core.package_selection import add_arguments \ as add_packages_arguments from colcon_core.package_selection import get_packages from colcon_core.plugin_system import satisfies_version from colcon_core.task import add_task_arguments from colcon_core.task import get_task_extension from colcon_core.task import TaskContext from colcon_core.verb import check_and_mark_build_tool from colcon_core.verb import check_and_mark_install_layout from colcon_core.verb import update_object from colcon_core.verb import VerbExtensionPoint logger = colcon_logger.getChild(__name__) class TestPackageArguments: """Arguments to test a specific package.""" def __init__(self, pkg, args, *, additional_destinations=None): """ Construct a TestPackageArguments. :param pkg: The package descriptor :param args: The parsed command line arguments :param list additional_destinations: The destinations of additional arguments """ super().__init__() self.path = os.path.abspath( os.path.join(os.getcwd(), str(pkg.path))) self.build_base = os.path.abspath(os.path.join( os.getcwd(), args.build_base, pkg.name)) self.install_base = os.path.abspath(os.path.join( os.getcwd(), args.install_base)) if not args.merge_install: self.install_base = os.path.join( self.install_base, pkg.name) self.test_result_base = os.path.abspath(os.path.join( os.getcwd(), args.test_result_base, pkg.name)) \ if args.test_result_base else None # set additional arguments for dest in (additional_destinations or []): # from the command line if hasattr(args, dest): update_object( self, dest, getattr(args, dest), pkg.name, 'test', 'command line') # from the package metadata if dest in pkg.metadata: update_object( self, dest, pkg.metadata[dest], pkg.name, 'test', 'package metadata') class TestVerb(VerbExtensionPoint): """ Test a set of packages. Each test task is expected to post a :py:class:`colcon_core.event.test.TestFailure` event in case of test failures. """ def __init__(self): # noqa: D107 super().__init__() satisfies_version(VerbExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--build-base', default=wrap_default_value('build'), type=get_cwd_path_resolver(), help='The base path for all build directories (default: build)') parser.add_argument( '--install-base', default=wrap_default_value('install'), type=get_cwd_path_resolver(), help='The base path for all install prefixes (default: install)') parser.add_argument( '--merge-install', action='store_true', help='Merge all install prefixes into a single location') parser.add_argument( '--test-result-base', type=get_cwd_path_resolver(), help='The base path for all test results (default: --build-base)') group = parser.add_mutually_exclusive_group() group.add_argument( '--retest-until-fail', type=int, default=0, metavar='N', help='Rerun tests up to N times if they pass') group.add_argument( '--retest-until-pass', type=int, default=0, metavar='N', help='Rerun failing tests up to N times') parser.add_argument( '--abort-on-error', action='store_true', help='Abort after the first package with any errors (failing ' 'tests are not considered errors in this context)') parser.add_argument( '--return-code-on-test-failure', action='store_true', help='Use a non-zero return code to indicate any test failure') add_executor_arguments(parser) add_event_handler_arguments(parser) add_packages_arguments(parser) decorated_parser = DestinationCollectorDecorator(parser) add_task_arguments(decorated_parser, 'colcon_core.task.test') self.task_argument_destinations = decorated_parser.get_destinations() self.task_argument_destinations['retest-until-pass'] = \ 'retest_until_pass' self.task_argument_destinations['retest-until-fail'] = \ 'retest_until_fail' def main(self, *, context): # noqa: D102 check_and_mark_build_tool(context.args.build_base) check_and_mark_install_layout( context.args.install_base, merge_install=context.args.merge_install) decorators = get_packages( context.args, additional_argument_names=self.task_argument_destinations, recursive_categories=('run', )) install_base = os.path.abspath(os.path.join( os.getcwd(), context.args.install_base)) jobs = self._get_jobs(context.args, decorators, install_base) if context.args.return_code_on_test_failure: # watch published events on all jobs to detect any test failures any_test_failures = False def check_for_test_failures(put_event_into_queue): nonlocal any_test_failures def put_event_into_queue_(self, event): nonlocal any_test_failures nonlocal put_event_into_queue if isinstance(event, TestFailure): any_test_failures = True return put_event_into_queue(event) return put_event_into_queue_ for job in jobs.values(): job.put_event_into_queue = types.MethodType( check_for_test_failures(job.put_event_into_queue), job) on_error = OnError.continue_ \ if not context.args.abort_on_error else OnError.interrupt rc = execute_jobs(context, jobs, on_error=on_error) if context.args.return_code_on_test_failure: if not rc and any_test_failures: return 1 return rc def _get_jobs(self, args, decorators, install_base): jobs = OrderedDict() drop_test_deps = is_feature_flag_set('drop_test_deps') for decorator in decorators: if not decorator.selected: continue pkg = decorator.descriptor extension = get_task_extension('colcon_core.task.test', pkg.type) if not extension: logger.warning( f"No task extension to 'test' a '{pkg.type}' package") continue recursive_dependencies = OrderedDict() # for testing a package include itself in the environment for dep_name in decorator.recursive_dependencies + [pkg.name]: dep_path = install_base if not args.merge_install: dep_path = os.path.join(dep_path, dep_name) recursive_dependencies[dep_name] = dep_path package_args = TestPackageArguments( pkg, args, additional_destinations=self .task_argument_destinations.values()) ordered_package_args = ', '.join([ ('%s: %s' % (repr(k), repr(package_args.__dict__[k]))) for k in sorted(package_args.__dict__.keys()) ]) logger.debug( f"Testing package '{pkg.name}' with the following arguments: " f'{{{ordered_package_args}}}') task_context = TaskContext( pkg=pkg, args=package_args, dependencies=recursive_dependencies) job = Job( identifier=pkg.name, dependencies=set( () if drop_test_deps else recursive_dependencies.keys() ), task=extension, task_context=task_context) jobs[pkg.name] = job return jobs colcon-core-0.17.1/debian/000077500000000000000000000000001465053734400152405ustar00rootroot00000000000000colcon-core-0.17.1/debian/patches/000077500000000000000000000000001465053734400166675ustar00rootroot00000000000000colcon-core-0.17.1/debian/patches/setup.cfg.patch000066400000000000000000000014001465053734400216010ustar00rootroot00000000000000Description: Remove optional dependencies from Debian packages Otherwise being listed there but not being installed would result in a runtime error by pkg_resources. Author: Dirk Thomas --- setup.cfg 2018-05-27 11:22:33.000000000 -0700 +++ setup.cfg.patched 2018-05-27 11:22:33.000000000 -0700 @@ -33,9 +33,12 @@ importlib-metadata; python_version < "3.8" packaging pytest - pytest-cov - pytest-repeat - pytest-rerunfailures + # the following dependencies are optional when installing from Debians + # so listing them here but not installing them in the Debian package + # would result in a runtime error by pkg_resources + # pytest-cov + # pytest-repeat + # pytest-rerunfailures setuptools>=30.3.0 packages = find: zip_safe = false colcon-core-0.17.1/publish-python.yaml000066400000000000000000000005201465053734400176640ustar00rootroot00000000000000artifacts: - type: wheel uploads: - type: pypi - type: stdeb uploads: - type: packagecloud config: repository: dirk-thomas/colcon distributions: - ubuntu:focal - ubuntu:jammy - ubuntu:noble - debian:bookworm - debian:trixie colcon-core-0.17.1/setup.cfg000066400000000000000000000150321465053734400156400ustar00rootroot00000000000000[metadata] name = colcon-core version = attr: colcon_core.__version__ url = https://colcon.readthedocs.io project_urls = Changelog = https://github.com/colcon/colcon-core/milestones?direction=desc&sort=due_date&state=closed GitHub = https://github.com/colcon/colcon-core/ author = Dirk Thomas author_email = web@dirk-thomas.net maintainer = Dirk Thomas maintainer_email = web@dirk-thomas.net classifiers = Development Status :: 3 - Alpha Environment :: Console Intended Audience :: Developers License :: OSI Approved :: Apache Software License Operating System :: MacOS Operating System :: Microsoft :: Windows Operating System :: POSIX Programming Language :: Python Topic :: Software Development :: Build Tools license = Apache License, Version 2.0 description = Command line tool to build sets of software packages. long_description = file: README.rst keywords = colcon [options] python_requires = >=3.6 install_requires = coloredlogs; sys_platform == 'win32' distlib EmPy<4 importlib-metadata; python_version < "3.8" packaging # the pytest dependency and its extensions are provided for convenience # even though they are only conditional pytest pytest-cov pytest-repeat pytest-rerunfailures setuptools>=30.3.0 packages = find: zip_safe = false [options.extras_require] test = flake8>=3.6.0 flake8-blind-except flake8-builtins flake8-class-newline flake8-comprehensions flake8-deprecated flake8-docstrings flake8-import-order flake8-quotes pep8-naming pylint pytest pytest-cov scspell3k>=2.2 [options.packages.find] exclude = test test.* [tool:pytest] filterwarnings = error # Suppress deprecation warnings in other packages ignore:Deprecated call to `pkg_resources.declare_namespace\('paste'\)`:: ignore:lib2to3 package is deprecated::scspell ignore:pkg_resources is deprecated as an API:: ignore:SelectableGroups dict interface is deprecated::flake8 ignore:The loop argument is deprecated::asyncio ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated::pydocstyle ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated::pyreadline junit_suite_name = colcon-core markers = flake8 linter python_classes = !TestFailure [options.entry_points] colcon_core.argument_parser = colcon_core.environment = path = colcon_core.environment.path:PathEnvironment pythonpath = colcon_core.environment.pythonpath:PythonPathEnvironment pythonscriptspath = colcon_core.environment.path:PythonScriptsPathEnvironment colcon_core.environment_variable = all_shells = colcon_core.shell:ALL_SHELLS_ENVIRONMENT_VARIABLE default_executor = colcon_core.executor:DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE extension_blocklist = colcon_core.extension_point:EXTENSION_BLOCKLIST_ENVIRONMENT_VARIABLE home = colcon_core.command:HOME_ENVIRONMENT_VARIABLE log_level = colcon_core.command:LOG_LEVEL_ENVIRONMENT_VARIABLE warnings = colcon_core.command:WARNINGS_ENVIRONMENT_VARIABLE colcon_core.event_handler = console_direct = colcon_core.event_handler.console_direct:ConsoleDirectEventHandler console_start_end = colcon_core.event_handler.console_start_end:ConsoleStartEndEventHandler log_command = colcon_core.event_handler.log_command:LogCommandEventHandler colcon_core.executor = sequential = colcon_core.executor.sequential:SequentialExecutor colcon_core.extension_point = colcon_core.argument_parser = colcon_core.argument_parser:ArgumentParserDecoratorExtensionPoint colcon_core.environment = colcon_core.environment:EnvironmentExtensionPoint colcon_core.event_handler = colcon_core.event_handler:EventHandlerExtensionPoint colcon_core.executor = colcon_core.executor:ExecutorExtensionPoint colcon_core.package_augmentation = colcon_core.package_augmentation:PackageAugmentationExtensionPoint colcon_core.package_discovery = colcon_core.package_discovery:PackageDiscoveryExtensionPoint colcon_core.package_identification = colcon_core.package_identification:PackageIdentificationExtensionPoint colcon_core.package_selection = colcon_core.package_selection:PackageSelectionExtensionPoint colcon_core.prefix_path = colcon_core.prefix_path:PrefixPathExtensionPoint colcon_core.python_testing = colcon_core.task.python.test:PythonTestingStepExtensionPoint colcon_core.shell = colcon_core.shell:ShellExtensionPoint colcon_core.shell.find_installed_packages = colcon_core.shell:FindInstalledPackagesExtensionPoint colcon_core.task.build = colcon_core.task:TaskExtensionPoint colcon_core.task.test = colcon_core.task:TaskExtensionPoint colcon_core.verb = colcon_core.verb:VerbExtensionPoint colcon_core.package_augmentation = python = colcon_core.package_augmentation.python:PythonPackageAugmentation colcon_core.package_discovery = path = colcon_core.package_discovery.path:PathPackageDiscovery colcon_core.package_identification = ignore = colcon_core.package_identification.ignore:IgnorePackageIdentification python = colcon_core.package_identification.python:PythonPackageIdentification colcon_core.package_selection = colcon_core.prefix_path = colcon = colcon_core.prefix_path.colcon:ColconPrefixPath colcon_core.python_testing = pytest = colcon_core.task.python.test.pytest:PytestPythonTestingStep setuppy_test = colcon_core.task.python.test.setuppy_test:SetuppyPythonTestingStep colcon_core.shell = bat = colcon_core.shell.bat:BatShell dsv = colcon_core.shell.dsv:DsvShell sh = colcon_core.shell.sh:ShShell colcon_core.shell.find_installed_packages = colcon_isolated = colcon_core.shell.installed_packages:IsolatedInstalledPackageFinder colcon_merged = colcon_core.shell.installed_packages:MergedInstalledPackageFinder colcon_core.task.build = python = colcon_core.task.python.build:PythonBuildTask colcon_core.task.test = python = colcon_core.task.python.test:PythonTestTask colcon_core.verb = build = colcon_core.verb.build:BuildVerb test = colcon_core.verb.test:TestVerb console_scripts = colcon = colcon_core.command:main pytest11 = colcon_core_warnings_stderr = colcon_core.pytest.hooks [options.package_data] colcon_core.shell.template = *.em colcon_core.task.python.template = *.em colcon_core.task.python.colcon_distutils_commands = */METADATA */entry_points.txt [flake8] import-order-style = google per-file-ignores = colcon_core/distutils/__init__.py:A005 colcon_core/logging.py:A005 colcon_core/subprocess.py:A005 [coverage:run] source = colcon_core colcon-core-0.17.1/setup.py000066400000000000000000000041171465053734400155330ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os import sys from setuptools import setup minimum_version = (3, 6) if sys.version_info < minimum_version: sys.exit('This package requires at least Python %d.%d' % minimum_version) cmdclass = {} try: from stdeb.command.sdist_dsc import sdist_dsc except ImportError: pass else: class CustomSdistDebCommand(sdist_dsc): """Weird approach to apply the Debian patches during packaging.""" def run(self): # noqa: D102 from stdeb.command import sdist_dsc build_dsc = sdist_dsc.build_dsc def custom_build_dsc(*args, **kwargs): nonlocal build_dsc debinfo = self.get_debinfo() repackaged_dirname = \ debinfo.source + '-' + debinfo.upstream_version dst_directory = os.path.join( self.dist_dir, repackaged_dirname, 'debian', 'patches') os.makedirs(dst_directory, exist_ok=True) # read patch with open('debian/patches/setup.cfg.patch', 'r') as h: lines = h.read().splitlines() print( "writing customized patch '%s'" % os.path.join(dst_directory, 'setup.cfg.patch')) # write patch with modified path with open( os.path.join(dst_directory, 'setup.cfg.patch'), 'w' ) as h: for line in lines: if line.startswith('--- ') or line.startswith('+++ '): line = \ line[0:4] + repackaged_dirname + '/' + line[4:] h.write(line + '\n') with open(os.path.join(dst_directory, 'series'), 'w') as h: h.write('setup.cfg.patch\n') return build_dsc(*args, **kwargs) sdist_dsc.build_dsc = custom_build_dsc super().run() cmdclass['sdist_dsc'] = CustomSdistDebCommand setup(cmdclass=cmdclass) colcon-core-0.17.1/stdeb.cfg000066400000000000000000000005411465053734400156000ustar00rootroot00000000000000[colcon-core] No-Python2: Depends3: python3-distlib, python3-empy (<4), python3-packaging, python3-pytest, python3-setuptools, python3 (>= 3.8) | python3-importlib-metadata Recommends3: python3-pytest-cov Suggests3: python3-pytest-repeat, python3-pytest-rerunfailures Replaces3: colcon Suite: focal jammy noble bookworm trixie X-Python3-Version: >= 3.6 colcon-core-0.17.1/test/000077500000000000000000000000001465053734400147755ustar00rootroot00000000000000colcon-core-0.17.1/test/__init__.py000066400000000000000000000000001465053734400170740ustar00rootroot00000000000000colcon-core-0.17.1/test/entry_point_context.py000066400000000000000000000014651465053734400214730ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core import plugin_system class EntryPointContext: def __init__(self, **kwargs): self._kwargs = kwargs self._memento = None def __enter__(self): # reset entry point cache, provide new instances in each scope plugin_system._extension_instances.clear() self._memento = plugin_system.load_entry_points def load_entry_points(_, *, exclude_names=None): nonlocal self return { k: v for k, v in self._kwargs.items() if exclude_names is None or k not in exclude_names} plugin_system.load_entry_points = load_entry_points def __exit__(self, *_): plugin_system.load_entry_points = self._memento colcon-core-0.17.1/test/environment_context.py000066400000000000000000000011301465053734400214520ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os class EnvironmentContext: def __init__(self, **kwargs): self._kwargs = kwargs self._memento = {} def __enter__(self): for k, v in self._kwargs.items(): if k in os.environ: self._memento[k] = os.environ[k] os.environ[k] = v def __exit__(self, *_): for k, v in self._kwargs.items(): if k in self._memento: os.environ[k] = self._memento[k] else: del os.environ[k] colcon-core-0.17.1/test/extension_point_context.py000066400000000000000000000015651465053734400223470ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from colcon_core import plugin_system class ExtensionPointContext: def __init__(self, **kwargs): self._kwargs = kwargs self._memento = None def __enter__(self): # reset entry point cache, provide new instances in each scope plugin_system._extension_instances.clear() self._memento = plugin_system.load_extension_points def load_extension_points(_, *, excludes=None): nonlocal self return { k: v for k, v in self._kwargs.items() if excludes is None or k not in excludes} plugin_system.load_extension_points = load_extension_points def __exit__(self, *_): plugin_system.load_extension_points = self._memento colcon-core-0.17.1/test/run_until_complete.py000066400000000000000000000005651465053734400212640ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import asyncio from colcon_core.subprocess import new_event_loop def run_until_complete(coroutine): loop = new_event_loop() asyncio.set_event_loop(loop) try: return loop.run_until_complete(coroutine) finally: loop.close() assert loop.is_closed() colcon-core-0.17.1/test/spell_check.words000066400000000000000000000025661465053734400203420ustar00rootroot00000000000000addfinalizer addopts apache argparse asyncio autouse backported basepath bazqux blocklist callables capsys catched changelog classname colcon coloredlogs configparser contextlib coroutine coroutines cpython datetime debian debinfo decodable decoree decos deepcopy defaultdict depreated deps descs distlib docstring executables exitstatus fdopen ffoo filterwarnings foobar fooo fromhex functools getcategory getpid getpreferredencoding getsignal github hardcodes hookimpl hookwrapper https importlib importorskip isatty iterdir itertools junit levelname libexec lineno linter linux lstrip minversion mkdtemp monkeypatch namedtuple nargs noop noops noqa notestscollected openpty optionxform pathlib pkgname pkgs plugin popitem prepend prepended prepending proactor purelib pydocstyle pytest pytests pythondontwritebytecode pythonpath pythonscriptspath pythonwarnings readouterr readthedocs recrawling recursing relpath rerunfailures returncode retval rglob rindex rmtree rstrip rtype samefile scspell sdist searchability separarator setupcfg setuppy setupscript setuptools shlex sigint sitecustomize skipif sloretz stacklevel staticmethod stdeb stringify subparser subparsers subprocesses symlink symlinks sysconfig tempfile terminalreporter testcase testsfailed testsuite thomas tmpdir todo traceback tryfirst tuples uninstall unittest unittests unlinking unrenamed usefixtures wildcards workaround colcon-core-0.17.1/test/test_action_collector.py000066400000000000000000000116141465053734400217340ustar00rootroot00000000000000# Copyright 2022 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import argparse import sys from colcon_core.argument_parser.action_collector \ import ActionCollectorDecorator from colcon_core.argument_parser.action_collector \ import SuppressRequiredActions from colcon_core.argument_parser.action_collector \ import SuppressTypeConversions import pytest class _RaisingArgumentParser(argparse.ArgumentParser): def error(self, message): raise sys.exc_info()[1] or Exception(message) def test_action_collector_decorator(): parser = argparse.ArgumentParser() decorator = ActionCollectorDecorator(parser) a = decorator.add_argument('positional') assert decorator.get_collected_actions() == {a} b = decorator.add_argument('--option', type=bool) assert decorator.get_collected_actions() == {a, b} def test_suppress_required_actions(): parser = _RaisingArgumentParser() decorator = ActionCollectorDecorator(parser) pos1 = decorator.add_argument('pos1') decorator.add_argument('pos2', nargs='?') args = parser.parse_args(['foo', 'bar']) assert 'foo' == args.pos1 assert 'bar' == args.pos2 with SuppressRequiredActions((decorator,)): parser.parse_args([]) with pytest.raises(Exception): parser.parse_args([]) with pytest.raises(Exception): with SuppressRequiredActions((decorator,), {pos1}): parser.parse_args([]) args = parser.parse_args(['foo', 'bar']) assert 'foo' == args.pos1 assert 'bar' == args.pos2 def test_suppress_type_conversions(): parser = _RaisingArgumentParser() decorator = ActionCollectorDecorator(parser) action_f = decorator.add_argument('-f', type=float) action_i = decorator.add_argument('-i', type=int) decorator.register('action', 'not_implemented', argparse.Action) decorator.register('type', 'hex', float.fromhex) action_x = decorator.add_argument('-x', type='hex', default=None) decorator.add_argument('-s') args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {action_f}): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {action_i}): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {action_x}): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x def test_suppress_required_actions_not_decorated(): parser = _RaisingArgumentParser() parser.add_argument('pos1') parser.add_argument('pos2', nargs='?') args = parser.parse_args(['foo']) assert 'foo' == args.pos1 with pytest.raises(Exception): parser.parse_args([]) with SuppressRequiredActions((parser,)): args = parser.parse_args(['foo']) assert 'foo' == args.pos1 with pytest.raises(Exception): with SuppressRequiredActions((parser,)): parser.parse_args([]) args = parser.parse_args(['foo']) assert 'foo' == args.pos1 with pytest.raises(Exception): parser.parse_args([]) def test_suppress_type_conversion_not_decorated(): parser = _RaisingArgumentParser() parser.add_argument('-f', type=float) parser.add_argument('-i', type=int) parser.register('action', 'not_implemented', argparse.Action) parser.register('type', 'hex', float.fromhex) parser.add_argument('-x', type='hex', default=None) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x with SuppressTypeConversions((parser,)): parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x colcon-core-0.17.1/test/test_argument_default.py000066400000000000000000000017601465053734400217400ustar00rootroot00000000000000# Copyright 2020 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.argument_default import is_default_value from colcon_core.argument_default import unwrap_default_value from colcon_core.argument_default import wrap_default_value import pytest def test_argument_default(): values = [ True, [1, 2, 3], 'foo', ] for value in values: assert not is_default_value(value) with pytest.raises(ValueError): unwrap_default_value(value) default_value = wrap_default_value(value) assert is_default_value(default_value) assert type(default_value) is not type(value) with pytest.raises(ValueError): wrap_default_value(default_value) unwrapped_value = unwrap_default_value(default_value) assert value == unwrapped_value value = 42 unchanged_value = wrap_default_value(value) assert type(unchanged_value) is type(value) assert unchanged_value == value colcon-core-0.17.1/test/test_argument_parser.py000066400000000000000000000110551465053734400216060ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from argparse import ArgumentParser from unittest.mock import Mock from unittest.mock import patch from colcon_core.argument_parser import ArgumentParserDecorator from colcon_core.argument_parser import ArgumentParserDecoratorExtensionPoint from colcon_core.argument_parser import decorate_argument_parser from colcon_core.argument_parser import get_argument_parser_extensions import pytest from .extension_point_context import ExtensionPointContext class Extension1(ArgumentParserDecoratorExtensionPoint): PRIORITY = 80 class Extension2(ArgumentParserDecoratorExtensionPoint): pass def test_get_argument_parser_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_argument_parser_extensions() assert ['extension2', 'extension1'] == \ list(extensions.keys()) def decorate_argument_parser_mock(*, parser): class Decorator(): def __init__(self, parser): self.parser = parser def add_argument(self, *args, **kwargs): pass # pragma: no cover return Decorator(parser) def test_decorate_argument_parser(): parser = ArgumentParser() with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_argument_parser_extensions() # one invalid return value, one not implemented extensions['extension1'].decorate_argument_parser = Mock( return_value=None) with patch('colcon_core.argument_parser.logger.error') as error: decorated_parser = decorate_argument_parser(parser) assert decorated_parser == parser # the raised exceptions are catched and result in error messages assert error.call_count == 2 assert len(error.call_args_list[0][0]) == 1 assert error.call_args_list[0][0][0].startswith( "Exception in argument parser decorator extension 'extension2': " '\n') assert error.call_args_list[0][0][0].endswith( '\nNotImplementedError\n') assert len(error.call_args_list[1][0]) == 1 assert error.call_args_list[1][0][0].startswith( "Exception in argument parser decorator extension 'extension1': " 'decorate_argument_parser() should return a parser like object\n') # one exception, one valid decorator extensions['extension2'].decorate_argument_parser = Mock( side_effect=RuntimeError('custom exception')) extensions['extension1'].decorate_argument_parser = Mock( side_effect=decorate_argument_parser_mock) with patch('colcon_core.argument_parser.logger.error') as error: decorated_parser = decorate_argument_parser(parser) assert decorated_parser.parser == parser # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in argument parser decorator extension 'extension2': " 'custom exception\n') class Decorator(ArgumentParserDecorator): def __init__(self, parser, **kwargs): self.foo = 'foo' super().__init__(parser, **kwargs) def test_argument_parser_decorator(): parser = ArgumentParser() # __getattr__ decorator = ArgumentParserDecorator(parser) assert decorator.format_help == parser.format_help del decorator.__dict__['_decoree'] with pytest.raises(AttributeError): decorator.format_help # __setattr__ decorator = Decorator(parser) decorator.foo = 'bar' assert 'foo' in decorator.__dict__ assert decorator.__dict__['foo'] == 'bar' decorator.add_argument = True assert parser.add_argument is True assert 'bar' not in decorator.__dict__ del decorator.__dict__['_decoree'] decorator.bar = 'baz' assert 'bar' in decorator.__dict__ assert decorator.__dict__['bar'] == 'baz' # nesting parser = ArgumentParser() decorator = Decorator(parser) group = decorator.add_argument_group() group.add_argument('arg1') group = decorator.add_mutually_exclusive_group() group.add_argument('--arg2', action='store_true') group = decorator.add_subparsers(dest='verb') group = group.add_parser('do') group.add_argument('arg3') args = parser.parse_args(['ARG1', '--arg2', 'do', 'ARG3']) assert args.arg1 == 'ARG1' assert args.arg2 is True assert args.verb == 'do' assert args.arg3 == 'ARG3' colcon-core-0.17.1/test/test_build_python.py000066400000000000000000000150741465053734400211150ustar00rootroot00000000000000# Copyright 2019 Rover Robotics # Licensed under the Apache License, Version 2.0 import asyncio from contextlib import suppress from pathlib import Path from tempfile import TemporaryDirectory from types import SimpleNamespace from colcon_core.package_descriptor import PackageDescriptor from colcon_core.plugin_system import SkipExtensionException import colcon_core.shell from colcon_core.shell.bat import BatShell from colcon_core.shell.sh import ShShell from colcon_core.subprocess import new_event_loop from colcon_core.task import TaskContext from colcon_core.task.python.build import PythonBuildTask import pytest @pytest.fixture(autouse=True) def monkey_patch_get_shell_extensions(monkeypatch): a_shell = None for shell_extension_class in [ShShell, BatShell]: with suppress(SkipExtensionException): a_shell = shell_extension_class() break if a_shell is None: pytest.fail('No valid shell extension found.') monkeypatch.setattr( colcon_core.shell, 'get_shell_extensions', lambda: { 200: {'mock': a_shell} } ) @pytest.fixture(autouse=True) def monkey_patch_put_event_into_queue(monkeypatch): monkeypatch.setattr( TaskContext, 'put_event_into_queue', lambda *args: None ) def _test_build_package( tmp_path_str, *, symlink_install, setup_cfg, libexec_pattern, data_files ): assert not libexec_pattern or setup_cfg, \ 'The libexec pattern requires use of setup.cfg' if setup_cfg and data_files: pytest.importorskip('setuptools', minversion='40.5.0') event_loop = new_event_loop() asyncio.set_event_loop(event_loop) try: tmp_path = Path(tmp_path_str) python_build_task = PythonBuildTask() package = PackageDescriptor(tmp_path / 'src') package.name = 'test-package' package.type = 'python' package.metadata['get_python_setup_options'] = lambda _: { 'packages': ['my_module'], **( { 'data_files': [ ('share/test_package', ['test-resource']), ] } if data_files else {} ) } context = TaskContext( pkg=package, args=SimpleNamespace( path=str(tmp_path / 'src'), build_base=str(tmp_path / 'build'), install_base=str(tmp_path / 'install'), symlink_install=symlink_install, ), dependencies={} ) python_build_task.set_context(context=context) pkg = python_build_task.context.pkg pkg.path.mkdir(exist_ok=True) if setup_cfg: (pkg.path / 'setup.py').write_text( 'from setuptools import setup\n' 'setup()\n' ) (pkg.path / 'setup.cfg').write_text( '[metadata]\n' 'name = test-package\n' '[options]\n' 'packages = find:\n' '[options.entry_points]\n' 'console_scripts =\n' ' my_command = my_module:main\n' + ( '[develop]\n' 'script-dir=$base/lib/test_package\n' '[install]\n' 'install-scripts=$base/lib/test_package\n' if libexec_pattern else '' ) + ( '[options.data_files]\n' 'share/test_package = test-resource\n' if data_files else '' ) ) else: (pkg.path / 'setup.py').write_text( 'from setuptools import setup\n' 'setup(\n' ' name="test-package",\n' ' packages=["my_module"],\n' ' entry_points={\n' ' "console_scripts": ["my_command = my_module:main"],\n' ' },\n' + ( ' data_files=[\n' ' ("share/test_package", [\n' ' "test-resource",\n' ' ]),\n' ' ],\n' if data_files else '' ) + ')\n' ) (pkg.path / 'my_module').mkdir(exist_ok=True) (pkg.path / 'test-resource').touch() (pkg.path / 'my_module' / '__init__.py').write_text( 'def main():\n' ' print("Hello, World!")\n' ) src_base = Path(python_build_task.context.args.path) source_files_before = set(src_base.rglob('*')) rc = event_loop.run_until_complete(python_build_task.build()) assert not rc source_files_after = set(src_base.rglob('*')) assert source_files_before == source_files_after build_base = Path(python_build_task.context.args.build_base) assert build_base.rglob('my_module/__init__.py') install_base = Path(python_build_task.context.args.install_base) assert symlink_install == any(install_base.rglob( 'test-package.egg-link')) assert symlink_install != any(install_base.rglob( 'PKG-INFO')) assert libexec_pattern == any(install_base.rglob( 'lib/test_package/my_command*')) assert libexec_pattern != ( any(install_base.rglob('bin/my_command*')) or any(install_base.rglob('Scripts/my_command*'))) assert data_files == any(install_base.rglob( 'share/test_package/test-resource')) if not symlink_install: pkg_info, = install_base.rglob('PKG-INFO') assert 'Name: test-package' in pkg_info.read_text().splitlines() finally: event_loop.close() @pytest.mark.parametrize( 'data_files', [False, True]) @pytest.mark.parametrize( 'setup_cfg,libexec_pattern', [(False, False), (True, False), (True, True)]) @pytest.mark.parametrize( 'symlink_first', [False, True]) def test_build_package(symlink_first, setup_cfg, libexec_pattern, data_files): with TemporaryDirectory(prefix='test_colcon_') as tmp_path_str: _test_build_package( tmp_path_str, symlink_install=symlink_first, setup_cfg=setup_cfg, libexec_pattern=libexec_pattern, data_files=data_files) # Test again with the symlink flag inverted to validate cleanup _test_build_package( tmp_path_str, symlink_install=not symlink_first, setup_cfg=setup_cfg, libexec_pattern=libexec_pattern, data_files=data_files) colcon-core-0.17.1/test/test_command.py000066400000000000000000000212311465053734400200230ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os import shutil import signal import sys from tempfile import mkdtemp from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.command import CommandContext from colcon_core.command import create_parser from colcon_core.command import get_prog_name from colcon_core.command import main from colcon_core.command import verb_main from colcon_core.environment_variable import EnvironmentVariable from colcon_core.verb import VerbExtensionPoint import pytest from .extension_point_context import ExtensionPointContext class Extension1(VerbExtensionPoint): pass class Extension2: """Very long line so that the help text needs to be wrapped.""" def main(self, *, context): pass # pragma: no cover class Extension3(VerbExtensionPoint): def add_arguments(self, *, parser): raise RuntimeError('custom exception') def test_main(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3 ): with patch( 'colcon_core.argument_parser.get_argument_parser_extensions', return_value={} ): with pytest.raises(SystemExit) as e: main(argv=['--help']) assert e.value.code == 0 with pytest.raises(SystemExit) as e: main(argv=['--log-level', 'invalid']) assert e.value.code == 2 # avoid creating log directory in the package directory log_base = mkdtemp(prefix='test_colcon_') argv = ['--log-base', log_base] try: main(argv=argv + ['--log-level', 'info']) with patch( 'colcon_core.command.load_extension_points', return_value={ 'key1': EnvironmentVariable('name', 'description'), 'key2': EnvironmentVariable( 'extra_long_name_to_wrap help', 'extra long description text to require a wrap of ' 'the help text not_only_on_spaces_but_also_forced_' 'within_a_very_long_consecutive_word'), } ): main(argv=argv + ['extension1']) finally: # the logging subsystem might still have file handles pending # therefore only try to delete the temporary directory shutil.rmtree(log_base, ignore_errors=True) # catch KeyboardInterrupt and return SIGINT error code with patch('colcon_core.command._main', return_value=0) as _main: _main.side_effect = KeyboardInterrupt() rc = main() assert rc == signal.SIGINT def test_main_no_verbs_or_env(): with ExtensionPointContext(): with patch( 'colcon_core.command.load_extension_points', return_value={}, ): with pytest.raises(SystemExit) as e: main(argv=['--help']) assert e.value.code == 0 def test_main_default_verb(): with ExtensionPointContext(): with patch( 'colcon_core.argument_parser.get_argument_parser_extensions', return_value={} ): with pytest.raises(SystemExit) as e: main(argv=['--help'], default_verb=Extension1) assert e.value.code == 0 with pytest.raises(SystemExit) as e: main( argv=['--log-level', 'invalid'], default_verb=Extension1) assert e.value.code == 2 with patch.object(Extension1, 'main', return_value=0) as mock_main: assert not main( argv=['--log-base', '/dev/null'], default_verb=Extension1) mock_main.assert_called_once() def test_create_parser(): with ExtensionPointContext(): parser = create_parser('colcon_core.environment_variable') parser.add_argument('--foo', nargs='*', type=str.lstrip) args = parser.parse_args(['--foo', '--bar', '--baz']) assert args.foo == ['--bar', '--baz'] parser.add_argument('--baz', action='store_true') args = parser.parse_args(['--foo', '--bar', '--baz']) assert args.foo == ['--bar'] assert args.baz is True args = parser.parse_args(['--foo', '--bar', ' --baz']) assert args.foo == ['--bar', '--baz'] argv = sys.argv sys.argv = ['/some/path/prog_name/__main__.py'] + sys.argv[1:] with ExtensionPointContext(): parser = create_parser('colcon_core.environment_variable') sys.argv = argv assert parser.prog == 'prog_name' class Object(object): pass def test_verb_main(): args = Object() args.verb_name = 'verb_name' logger = Object() logger.error = Mock() # pass through return code args.main = Mock(return_value=42) context = CommandContext(command_name='command_name', args=args) rc = verb_main(context, logger) assert rc == args.main.return_value logger.error.assert_not_called() # catch RuntimeError and output error message args.main.side_effect = RuntimeError('known error condition') rc = verb_main(context, logger) assert rc logger.error.assert_called_once_with( 'command_name verb_name: known error condition') logger.error.reset_mock() # catch Exception and output error message including traceback args.main.side_effect = Exception('custom error message') rc = verb_main(context, logger) assert rc assert logger.error.call_count == 1 assert len(logger.error.call_args[0]) == 1 assert logger.error.call_args[0][0].startswith( 'command_name verb_name: custom error message\n') assert 'Exception: custom error message' in logger.error.call_args[0][0] def test_prog_name_module(): argv = [os.path.join('foo', 'bar', '__main__.py')] with patch('colcon_core.command.sys.argv', argv): # prog should be the module containing __main__.py assert get_prog_name() == 'bar' def test_prog_name_on_path(): # use __file__ since we know it exists argv = [__file__] with patch('colcon_core.command.sys.argv', argv): with patch( 'colcon_core.command.shutil.which', return_value=__file__ ): # prog should be shortened to the basename assert get_prog_name() == 'test_command.py' def test_prog_name_not_on_path(): # use __file__ since we know it exists argv = [__file__] with patch('colcon_core.command.sys.argv', argv): with patch('colcon_core.command.shutil.which', return_value=None): # prog should remain unchanged assert get_prog_name() == __file__ def test_prog_name_different_on_path(): # use __file__ since we know it exists argv = [__file__] with patch('colcon_core.command.sys.argv', argv): with patch( 'colcon_core.command.shutil.which', return_value=sys.executable ): # prog should remain unchanged assert get_prog_name() == __file__ def test_prog_name_not_a_file(): # pick some file that doesn't actually exist on disk no_such_file = os.path.join(__file__, 'foobar') argv = [no_such_file] with patch('colcon_core.command.sys.argv', argv): with patch( 'colcon_core.command.shutil.which', return_value=no_such_file ): # prog should remain unchanged assert get_prog_name() == no_such_file @pytest.mark.skipif(sys.platform == 'win32', reason='Symlinks not supported.') def test_prog_name_symlink(): # use __file__ since we know it exists with TemporaryDirectory(prefix='test_colcon_') as temp_dir: linked_file = os.path.join(temp_dir, 'test_command.py') os.symlink(__file__, linked_file) argv = [linked_file] with patch('colcon_core.command.sys.argv', argv): with patch( 'colcon_core.command.shutil.which', return_value=__file__ ): # prog should be shortened to the basename assert get_prog_name() == 'test_command.py' @pytest.mark.skipif(sys.platform != 'win32', reason='Only valid on Windows.') def test_prog_name_easy_install(): # use __file__ since we know it exists argv = [__file__[:-3]] with patch('colcon_core.command.sys.argv', argv): with patch( 'colcon_core.command.shutil.which', return_value=__file__ ): # prog should be shortened to the basename assert get_prog_name() == 'test_command' colcon-core-0.17.1/test/test_copyright_license.py000066400000000000000000000026241465053734400221240ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path import sys import pytest @pytest.mark.linter def test_copyright_license(): missing = check_files([ Path(__file__).parents[1], Path(__file__).parents[1] / 'bin' / 'colcon', ]) assert not len(missing), \ 'In some files no copyright / license line was found' def check_files(paths): missing = [] for path in paths: if path.is_dir(): for p in sorted(path.iterdir()): if p.name.startswith('.'): continue if p.name.endswith('.py') or p.is_dir(): missing += check_files([p]) if path.is_file(): content = path.read_text() if not content: continue lines = content.splitlines() has_copyright = any(filter( lambda line: line.startswith('# Copyright'), lines)) has_license = \ '# Licensed under the Apache License, Version 2.0' in lines if not has_copyright or not has_license: # pragma: no cover print( 'Could not find copyright / license in:', path, file=sys.stderr) missing .append(path) else: print('Found copyright / license in:', path) return missing colcon-core-0.17.1/test/test_dependency_descriptor.py000066400000000000000000000013411465053734400227610ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import copy from colcon_core.dependency_descriptor import DependencyDescriptor def test_constructor(): d = DependencyDescriptor('foo') assert d == 'foo' assert str(d) == 'foo' assert d.name == 'foo' assert len(d.metadata) == 0 d = DependencyDescriptor('foo', metadata={'bar': 'baz'}) assert d == 'foo' assert str(d) == 'foo' assert d.name == 'foo' assert len(d.metadata) == 1 assert 'bar' in d.metadata assert d.metadata['bar'] == 'baz' d2 = copy.deepcopy(d) assert d.name == d2.name assert d.metadata == d2.metadata d.metadata['bar'] = 'baz baz' assert d.metadata != d2.metadata colcon-core-0.17.1/test/test_destination_collector.py000066400000000000000000000024521465053734400230000ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse from collections import OrderedDict from colcon_core.argument_parser.destination_collector \ import DestinationCollectorDecorator def test_destination_collector_decorator(): parser = argparse.ArgumentParser() decorator = DestinationCollectorDecorator(parser) decorator.add_argument('positional') assert decorator.get_destinations() == {} decorator.add_argument('--option', action='store_true') assert decorator.get_destinations() == OrderedDict([('option', 'option')]) group = decorator.add_mutually_exclusive_group() group.add_argument('--other-option', action='store_true') assert decorator.get_destinations() == OrderedDict([ ('option', 'option'), ('other-option', 'other_option')]) subparser = decorator.add_subparsers(title='subs', dest='dest') parser = subparser.add_parser('parser') parser.add_argument('--another-option', action='store_true') assert decorator.get_destinations() == OrderedDict([ ('option', 'option'), ('other-option', 'other_option'), ('another-option', 'another_option')]) assert decorator.get_destinations(recursive=False) == OrderedDict([ ('option', 'option'), ('other-option', 'other_option')]) colcon-core-0.17.1/test/test_entry_point.py000066400000000000000000000127511465053734400207660ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from unittest.mock import Mock from unittest.mock import patch import warnings with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', message='.*entry_point.*deprecated.*', category=UserWarning) from colcon_core.entry_point import EXTENSION_POINT_GROUP_NAME from colcon_core.entry_point import get_all_entry_points from colcon_core.entry_point import get_entry_points from colcon_core.entry_point import load_entry_point from colcon_core.entry_point import load_entry_points import pytest from .environment_context import EnvironmentContext class Group1: name = 'group1' class Group2: name = 'group2' class Dist(): def __init__(self, group_name, group): self._group_name = group_name self._group = group def __lt__(self, other): return self._group_name < other._group_name def get_entry_map(self): return self._group class EntryPoint: pass def iter_entry_points(*, group): if group == EXTENSION_POINT_GROUP_NAME: return [Group1, Group2] assert group == Group1.name ep1 = EntryPoint() ep1.name = 'extA' ep2 = EntryPoint() ep2.name = 'extB' return [ep1, ep2] def working_set(): return [ Dist('group1', { 'group1': {ep.name: ep for ep in iter_entry_points(group='group1')} }), Dist('group2', {'group2': {'extC': EntryPoint()}}), Dist('other_group', {'other_group': {'extD': EntryPoint()}}), ] def test_all_entry_points(): with patch( 'colcon_core.entry_point.iter_entry_points', side_effect=iter_entry_points ): with patch( 'colcon_core.entry_point.WorkingSet', side_effect=working_set ): # successfully load a known entry point assert set(get_all_entry_points().keys()) == {'group1', 'group2'} assert set(get_all_entry_points()['group1'].keys()) == \ {'extA', 'extB'} assert len(get_all_entry_points()['group1']['extA']) == 2 assert isinstance( get_all_entry_points()['group1']['extA'][1], EntryPoint) assert get_all_entry_points()['group1']['extA'][1] .group_name == \ 'group1' assert get_all_entry_points()['group1']['extA'][1] .name == 'extA' def test_entry_point_blocklist(): # successful loading of entry point without a blocklist with patch( 'colcon_core.entry_point.iter_entry_points', side_effect=iter_entry_points ): with patch( 'colcon_core.entry_point.WorkingSet', side_effect=working_set ): entry_points = get_entry_points('group1') assert 'extA' in entry_points.keys() entry_point = entry_points['extA'] assert entry_point.group_name == 'group1' assert entry_point.name == 'extA' entry_point.load = Mock() assert isinstance(entry_point, EntryPoint) load_entry_point(entry_point) assert entry_point.load.call_count == 1 # successful loading of entry point not in blocklist entry_point.load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST=os.pathsep.join([ 'group1.extB', 'group2.extC']) ): load_entry_point(entry_point) assert entry_point.load.call_count == 1 # entry point in a blocked group can't be loaded entry_point.load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST='group1'): with pytest.raises(RuntimeError) as e: load_entry_point(entry_point) assert 'The entry point group name is listed in the environment ' \ 'variable' in str(e.value) assert entry_point.load.call_count == 0 # entry point listed in the blocklist can't be loaded with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST=os.pathsep.join([ 'group1.extA', 'group1.extB']) ): with pytest.raises(RuntimeError) as e: load_entry_point(entry_point) assert 'The entry point name is listed in the environment variable' \ in str(e.value) assert entry_point.load.call_count == 0 # mock entry points class EntryPointRaisingException: group_name = 'group' name = 'exception' def load(self): raise Exception('entry point raising exception') class EntryPointRaisingRuntimeError: group_name = 'group' name = 'runtime_error' def load(self): raise RuntimeError('entry point raising runtime error') class EntryPointSuccess: group_name = 'group' name = 'success' def load(self): pass @patch( 'colcon_core.entry_point.get_entry_points', return_value={ EntryPointRaisingException.name: EntryPointRaisingException(), EntryPointRaisingRuntimeError.name: EntryPointRaisingRuntimeError(), EntryPointSuccess.name: EntryPointSuccess(), }) def test_load_entry_points_with_exception(_): with patch('colcon_core.entry_point.logger.error') as error: extensions = load_entry_points('group') # the entry point raising an exception different than a runtime error # results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert "Exception loading extension 'group.exception'" \ in error.call_args[0][0] assert 'entry point raising exception' in error.call_args[0][0] # neither of the entry points was loaded successfully assert extensions == {'success': None} colcon-core-0.17.1/test/test_environment.py000066400000000000000000000105331465053734400207540ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.environment import create_environment_hooks from colcon_core.environment import create_environment_scripts from colcon_core.environment import EnvironmentExtensionPoint from colcon_core.environment import get_environment_extensions from colcon_core.shell import get_shell_extensions from colcon_core.shell import ShellExtensionPoint import pytest from .extension_point_context import ExtensionPointContext def test_extension_interface(): extension = EnvironmentExtensionPoint() with pytest.raises(NotImplementedError): extension.create_environment_hooks(None, None) class Extension1(EnvironmentExtensionPoint): def create_environment_hooks(self, prefix_path, pkg_name): return [ f'{prefix_path}/share/{pkg_name}/hook/one.ext', f'{prefix_path}/share/{pkg_name}/hook/two.ext', ] class Extension2(EnvironmentExtensionPoint): PRIORITY = 110 def test_get_environment_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_environment_extensions() assert list(extensions.keys()) == ['extension2', 'extension1'] class Extension3(ShellExtensionPoint): pass class Extension4(ShellExtensionPoint): pass def test_create_environment_scripts(): with TemporaryDirectory(prefix='test_colcon_') as basepath: pkg = Mock() pkg.name = 'name' pkg.dependencies = {} pkg.hooks = [] args = Mock() args.install_base = basepath # no hooks at all with patch( 'colcon_core.environment.create_environment_hooks', return_value=[] ): with patch( 'colcon_core.environment.get_shell_extensions', return_value={} ): create_environment_scripts(pkg, args) pkg.hooks = [os.path.join(basepath, 'subA')] with ExtensionPointContext( extension3=Extension3, extension4=Extension4 ): extensions = get_shell_extensions() # one invalid return value, one check correct hooks argument extensions[100]['extension3'].create_package_script = Mock() extensions[100]['extension4'].create_package_script = Mock( return_value=None) with patch('colcon_core.environment.logger.error') as error: create_environment_scripts( pkg, args, default_hooks=[('subB', )], additional_hooks=[['subC', 'arg1', 'arg2']]) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in shell extension 'extension3': " 'create_package_script() should return a list\n') # check for correct hooks argument mock = extensions[100]['extension4'].create_package_script assert mock.call_count == 1 assert len(mock.call_args[0]) == 3 assert mock.call_args[0][0] == Path(args.install_base) assert mock.call_args[0][1] == pkg.name hook_tuples = mock.call_args[0][2] assert len(hook_tuples) == 3 assert hook_tuples[0] == ('subB', ()) assert hook_tuples[1] == ('subC', ['arg1', 'arg2']) assert hook_tuples[2] == ('subA', []) def test_create_environment_hooks(): with TemporaryDirectory(prefix='test_colcon_') as basepath: with ExtensionPointContext( extension1=Extension1, extension2=Extension2 ): with patch('colcon_core.environment.logger.error') as error: hooks = create_environment_hooks(basepath, 'pkg_name') assert len(hooks) == 2 assert hooks[0] == f'{basepath}/share/pkg_name/hook/one.ext' assert hooks[1] == f'{basepath}/share/pkg_name/hook/two.ext' # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in environment extension 'extension2': \n") colcon-core-0.17.1/test/test_environment_path.py000066400000000000000000000030501465053734400217640ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.environment.path import PathEnvironment def test_path(): extension = PathEnvironment() with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) with patch( 'colcon_core.shell.create_environment_hook', return_value=['/some/hook', '/other/hook'] ): # bin directory does not exist hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, but empty (prefix_path / 'bin').mkdir() hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, but only subdirectories (prefix_path / 'bin' / 'subdir').mkdir() hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, with file (prefix_path / 'bin' / 'hook').write_text('') hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 2 # bin directory exists, with files (prefix_path / 'bin' / 'hook2').write_text('') hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 2 colcon-core-0.17.1/test/test_environment_pythonpath.py000066400000000000000000000020751465053734400232340ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.environment.pythonpath import PythonPathEnvironment from colcon_core.python_install_path import get_python_install_path def test_pythonpath(): extension = PythonPathEnvironment() with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) with patch( 'colcon_core.shell.create_environment_hook', return_value=['/some/hook', '/other/hook'] ): # Python path does not exist hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # Python path exists python_path = get_python_install_path( 'purelib', {'base': prefix_path}) python_path.mkdir(parents=True) hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 2 colcon-core-0.17.1/test/test_environment_pythonscriptspath.py000066400000000000000000000033061465053734400246420ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.environment.path import PythonScriptsPathEnvironment from colcon_core.python_install_path import get_python_install_path def test_path(): extension = PythonScriptsPathEnvironment() with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) scripts_path = get_python_install_path( 'scripts', {'base': prefix_path}) with patch( 'colcon_core.shell.create_environment_hook', return_value=['/some/hook', '/other/hook'] ): # bin directory does not exist hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, but empty scripts_path.mkdir() hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, but only subdirectories (scripts_path / 'subdir').mkdir() hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 0 # bin directory exists, with file (scripts_path / 'hook').write_text('') hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 2 # bin directory exists, with files (scripts_path / 'hook2').write_text('') hooks = extension.create_environment_hooks(prefix_path, 'pkg_name') assert len(hooks) == 2 colcon-core-0.17.1/test/test_event_command.py000066400000000000000000000036221465053734400212300ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import sys from colcon_core.event.command import Command from .environment_context import EnvironmentContext def test_command_to_string(): cmd = ['executable', 'arg1', 'arg2'] cwd = '/some/path' command = Command(cmd, cwd=cwd) assert command.to_string() == \ "Invoking command in '/some/path': executable arg1 arg2" env = { '_TEST_NEW_KEY': 'new', '_TEST_SAME_VALUE': 'same', '_TEST_DIFFERENT_VALUE': 'different', '_TEST_PREPENDED_VALUE': 'before-base', '_TEST_APPENDED_VALUE': 'base-after', } if sys.platform != 'win32': env['PWD'] = '/other/path' command = Command(cmd, cwd=cwd, env=env) expected = "Invoking command in '/some/path': " \ '_TEST_APPENDED_VALUE=${_TEST_APPENDED_VALUE}-after ' \ '_TEST_DIFFERENT_VALUE=different ' \ '_TEST_NEW_KEY=new ' \ '_TEST_PREPENDED_VALUE=before-${_TEST_PREPENDED_VALUE} ' \ 'executable arg1 arg2' if sys.platform == 'win32': expected = expected.replace('${', '%') expected = expected.replace('}', '%') with EnvironmentContext( _TEST_SAME_VALUE='same', _TEST_DIFFERENT_VALUE='same', _TEST_PREPENDED_VALUE='base', _TEST_APPENDED_VALUE='base', ): assert command.to_string() == expected cmd = ['executable', '&&', 'other exec'] command = Command(cmd, cwd=cwd) assert command.to_string() == \ "Invoking command in '/some/path': executable && other exec" command = Command(cmd, cwd=cwd, shell=True) if sys.platform != 'win32': assert command.to_string() == \ "Invoking command in '/some/path': executable && 'other exec'" else: assert command.to_string() == \ "Invoking command in '/some/path': " \ 'executable && "other exec"' colcon-core-0.17.1/test/test_event_handler.py000066400000000000000000000102021465053734400212170ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse from unittest.mock import Mock from colcon_core.event_handler import add_event_handler_arguments from colcon_core.event_handler import apply_event_handler_arguments from colcon_core.event_handler import EventHandlerExtensionPoint from colcon_core.event_handler import format_duration from colcon_core.event_handler import get_event_handler_extensions import pytest from .extension_point_context import ExtensionPointContext class Extension1(EventHandlerExtensionPoint): """Some doc block.""" def __init__(self): super().__init__() self.enabled = False class Extension2(EventHandlerExtensionPoint): """Other doc block.""" PRIORITY = 90 class Extension3(EventHandlerExtensionPoint): pass def test_extension_interface(): extension = Extension1() with pytest.raises(NotImplementedError): extension(None) def test_get_shell_extensions(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3 ): extensions = get_event_handler_extensions(context=None) assert list(extensions.keys()) == [ 'extension1', 'extension3', 'extension2'] def test_add_event_handler_arguments(): parser = argparse.ArgumentParser() with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3 ): add_event_handler_arguments(parser) text = parser.format_help() assert 'extension1- extension2+ extension3+' in text assert '* extension1:' in text assert 'Some doc block' in text assert '* extension2:' in text assert 'Other doc block' in text def test_apply_event_handler_arguments(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, ): extensions = get_event_handler_extensions(context=None) assert extensions['extension1'].enabled is False assert extensions['extension2'].enabled is True assert extensions['extension3'].enabled is True extensions['extension3'].enabled = None args = Mock() args.event_handlers = ['extension1+', 'extension2-'] apply_event_handler_arguments(extensions, args) assert extensions['extension1'].enabled is True assert extensions['extension2'].enabled is False assert extensions['extension3'].enabled is None def test_format_duration(): # seconds below 10 with two decimal points assert format_duration(0) == '0.00s' assert format_duration(0.001) == '0.00s' assert format_duration(0.004999) == '0.00s' assert format_duration(0.005) == '0.01s' assert format_duration(9.99) == '9.99s' assert format_duration(9.994999) == '9.99s' assert format_duration(9.995) == '9.99s' # floating point imprecision # seconds between 10 and 60 with one decimal points assert format_duration(9.995001) == '10.0s' assert format_duration(10) == '10.0s' assert format_duration(59.94) == '59.9s' # seconds above one minute with no decimal points assert format_duration(59.95) == '1min 0s' assert format_duration(83.45) == '1min 23s' assert format_duration(119.49) == '1min 59s' assert format_duration(119.5) == '2min 0s' assert format_duration(3599.4) == '59min 59s' # seconds above one hour with no decimal points assert format_duration(3599.5) == '1h 0min 0s' assert format_duration(5025.123) == '1h 23min 45s' assert format_duration(3599999) == '999h 59min 59s' # zero fixed decimal point assert format_duration(1.5, fixed_decimal_points=0) == '2s' assert format_duration(12.345, fixed_decimal_points=0) == '12s' # one fixed decimal points assert format_duration(1.5, fixed_decimal_points=1) == '1.5s' assert format_duration(12.345, fixed_decimal_points=1) == '12.3s' assert format_duration(34.5, fixed_decimal_points=1) == '34.5s' assert format_duration(3599.4, fixed_decimal_points=1) == '59min 59.4s' assert format_duration(4984.5, fixed_decimal_points=1) == '1h 23min 4.5s' # raise for negative parameter with pytest.raises(ValueError): format_duration(-1.0) colcon-core-0.17.1/test/test_event_handler_console_direct.py000066400000000000000000000037741465053734400243130ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from unittest.mock import patch from colcon_core.event.output import StderrLine from colcon_core.event.output import StdoutLine from colcon_core.event_handler.console_direct import ConsoleDirectEventHandler import pytest def test_console_direct(): with patch('sys.stdout') as stdout: extension = ConsoleDirectEventHandler() event = StdoutLine(b'bytes line') extension((event, None)) assert stdout.buffer.write.call_count == 1 event = StdoutLine('string line') extension((event, None)) assert stdout.write.call_count == 1 stdout.buffer.write.reset_mock() stdout.write.reset_mock() extension(('unknown', None)) assert stdout.buffer.write.call_count == 0 assert stdout.write.call_count == 0 with patch('sys.stderr') as stderr: extension = ConsoleDirectEventHandler() event = StderrLine(b'bytes line') extension((event, None)) assert stderr.buffer.write.call_count == 1 event = StderrLine('string line') extension((event, None)) assert stderr.write.call_count == 1 stderr.buffer.write.reset_mock() stderr.write.reset_mock() extension(('unknown', None)) assert stderr.buffer.write.call_count == 0 assert stderr.write.call_count == 0 with patch('sys.stdout') as stdout: stdout.buffer.write.side_effect = BrokenPipeError() stdout.write.side_effect = BrokenPipeError() extension = ConsoleDirectEventHandler() event = StdoutLine(b'bytes line') with pytest.raises(BrokenPipeError): extension((event, None)) assert stdout.buffer.write.call_count == 1 event = StdoutLine(b'bytes line') extension((event, None)) assert stdout.buffer.write.call_count == 1 event = StdoutLine('string line') extension((event, None)) assert stdout.write.call_count == 0 colcon-core-0.17.1/test/test_event_handler_console_start_end.py000066400000000000000000000040541465053734400250140ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from unittest.mock import patch from colcon_core.event.job import JobEnded from colcon_core.event.job import JobStarted from colcon_core.event.test import TestFailure from colcon_core.event_handler.console_start_end \ import ConsoleStartEndEventHandler from colcon_core.subprocess import SIGINT_RESULT def test_console_start_end(): extension = ConsoleStartEndEventHandler() with patch('sys.stdout') as stdout: with patch('sys.stderr') as stderr: event = JobStarted('idA') extension((event, None)) assert stdout.write.call_count == 2 assert stderr.write.call_count == 0 stdout.write.reset_mock() # success event = JobEnded('idA', 0) extension((event, None)) assert stdout.write.call_count == 2 assert stderr.write.call_count == 0 stdout.write.reset_mock() # aborted event = JobEnded('idA', SIGINT_RESULT) extension((event, None)) assert stdout.write.call_count == 2 assert stderr.write.call_count == 0 stdout.write.reset_mock() # failure event = JobEnded('idA', 1) extension((event, None)) assert stderr.write.call_count == 2 assert stdout.write.call_count == 0 stderr.write.reset_mock() # test failures event = TestFailure('idA') extension((event, None)) event = JobEnded('idA', 0) extension((event, None)) assert stdout.write.call_count == 2 assert len(stdout.write.call_args_list[0][0]) == 1 assert '[ with test failures ]' in \ stdout.write.call_args_list[0][0][0] assert stderr.write.call_count == 0 stdout.write.reset_mock() extension(('unknown', None)) assert stdout.write.call_count == 0 assert stderr.write.call_count == 0 colcon-core-0.17.1/test/test_event_handler_log_command.py000066400000000000000000000015121465053734400235620ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from unittest.mock import patch from colcon_core.event.command import Command from colcon_core.event.command import CommandEnded from colcon_core.event_handler.log_command import LogCommandEventHandler def test_console_direct(): extension = LogCommandEventHandler() with patch('colcon_core.event_handler.log_command.logger.debug') as debug: event = Command(['executable'], cwd='/some/path') extension((event, None)) assert debug.call_count == 1 debug.reset_mock() event = CommandEnded(['executable'], cwd='/some/path', returncode=1) extension((event, None)) assert debug.call_count == 1 debug.reset_mock() extension(('unknown', None)) assert debug.call_count == 0 colcon-core-0.17.1/test/test_event_reactor.py000066400000000000000000000100531465053734400212450ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from queue import Queue import time from unittest.mock import Mock from unittest.mock import patch from colcon_core.event.timer import TimerEvent from colcon_core.event_handler import EventHandlerExtensionPoint from colcon_core.event_reactor import create_event_reactor from colcon_core.event_reactor import EventReactorShutdown from .extension_point_context import ExtensionPointContext class CustomExtension(EventHandlerExtensionPoint): def __init__(self): super().__init__() self.events = [] def __call__(self, event): self.events.append(event) class Extension1(CustomExtension): pass class Extension2(CustomExtension): def __init__(self): super().__init__() self.enabled = False class Extension3(CustomExtension): def __call__(self, event): super().__call__(event) if event[0] == 'first': raise ValueError("ValueError for '%s'" % event[0]) if event[0] == 'third': raise RuntimeError("RuntimeError for '%s'" % event[0]) def test_create_event_reactor(): context = Mock() context.args = Mock() context.args.event_handlers = [] with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3 ): event_reactor = create_event_reactor(context) with event_reactor: queue = event_reactor.get_queue() assert isinstance(queue, Queue) # use larger interval to prevent different timing to effect the results event_reactor.TIMER_INTERVAL = 1.0 # add a few dummy events with patch('colcon_core.event_reactor.logger.error') as error: assert error.call_count == 0 queue.put(('first', None)) queue.join() assert error.call_count == 1 queue.put(('second', None)) queue.join() assert error.call_count == 1 queue.put(('third', None)) queue.join() assert error.call_count == 2 # 1 timer event, 3 mock string events assert len(event_reactor._observers[0].events) == 4 assert len(event_reactor._observers[1].events) == 4 # both observers got the timer event assert isinstance(event_reactor._observers[0].events[0][0], TimerEvent) assert isinstance(event_reactor._observers[1].events[0][0], TimerEvent) # both observers got the 3 mock string events assert event_reactor._observers[0].events[1:] == \ [('first', None), ('second', None), ('third', None)] assert event_reactor._observers[1].events[1:] == \ [('first', None), ('second', None), ('third', None)] # the raised exception is catched and results in an error message assert error.call_count == 2 assert len(error.call_args_list[0][0]) == 1 assert error.call_args_list[0][0][0].startswith( "Exception in event handler extension 'extension3': " "ValueError for 'first'\n") assert len(error.call_args_list[1][0]) == 1 assert error.call_args_list[1][0][0].startswith( "Exception in event handler extension 'extension3': " "RuntimeError for 'third'") # wait for another timer event to be generated time.sleep(1.5 * event_reactor.TIMER_INTERVAL) assert len(event_reactor._observers[0].events) == 5 assert len(event_reactor._observers[1].events) == 5 assert isinstance( event_reactor._observers[0].events[-1][0], TimerEvent) assert isinstance( event_reactor._observers[1].events[-1][0], TimerEvent) assert len(event_reactor._observers[0].events) == 6 assert len(event_reactor._observers[1].events) == 6 assert isinstance( event_reactor._observers[0].events[-1][0], EventReactorShutdown) assert isinstance( event_reactor._observers[1].events[-1][0], EventReactorShutdown) # no harm in flushing after the thread has been joined event_reactor.flush() colcon-core-0.17.1/test/test_executor.py000066400000000000000000000206121465053734400202450ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from argparse import ArgumentParser from asyncio import CancelledError from unittest.mock import Mock from unittest.mock import patch from colcon_core.event.job import JobEnded from colcon_core.event.job import JobQueued from colcon_core.event.job import JobSkipped from colcon_core.event.job import JobStarted from colcon_core.event.output import StderrLine from colcon_core.executor import add_executor_arguments from colcon_core.executor import DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE from colcon_core.executor import execute_jobs from colcon_core.executor import ExecutorExtensionPoint from colcon_core.executor import get_executor_extensions from colcon_core.executor import Job from colcon_core.executor import OnError from colcon_core.subprocess import SIGINT_RESULT import pytest from .environment_context import EnvironmentContext from .extension_point_context import ExtensionPointContext from .run_until_complete import run_until_complete class Task: def __init__(self): self.return_value = None def set_context(self, *, context): pass async def __call__(self, *args, **kwargs): if isinstance(self.return_value, BaseException): raise self.return_value return self.return_value def test_job(): task = Task() task_context = Mock() task_context.dependencies = Mock() task_context.pkg = Mock() task_context.pkg.name = 'name' job = Job( identifier='id', dependencies=set(), task=task, task_context=task_context) assert str(job) == 'id' events = [] event_queue = Mock() event_queue.put = lambda event: events.append(event) job.set_event_queue(event_queue) assert len(events) == 1 assert isinstance(events[-1][0], JobQueued) assert events[-1][0].identifier == 'name' assert events[-1][0].dependencies == task_context.dependencies assert events[-1][1] == job # successful task rc = run_until_complete(job()) assert rc == 0 assert len(events) == 3 assert isinstance(events[-2][0], JobStarted) assert events[-2][0].identifier == 'name' assert events[-2][1] == job assert isinstance(events[-1][0], JobEnded) assert events[-1][0].identifier == 'name' assert events[-1][0].rc == 0 assert events[-1][1] == job # canceled task job.returncode = None task.return_value = CancelledError() rc = run_until_complete(job()) assert rc is SIGINT_RESULT assert len(events) == 5 assert isinstance(events[-2][0], JobStarted) assert events[-2][0].identifier == 'name' assert events[-2][1] == job assert isinstance(events[-1][0], JobEnded) assert events[-1][0].identifier == 'name' assert events[-1][0].rc is SIGINT_RESULT assert events[-1][1] == job # task raising exception job.returncode = None task.return_value = RuntimeError('custom exception') with pytest.raises(RuntimeError): run_until_complete(job()) assert len(events) == 8 assert isinstance(events[-3][0], JobStarted) assert events[-3][0].identifier == 'name' assert events[-3][1] == job assert isinstance(events[-2][0], StderrLine) assert events[-2][0].line.endswith(b'\nRuntimeError: custom exception\n') assert events[-2][1] == job assert isinstance(events[-1][0], JobEnded) assert events[-1][0].identifier == 'name' assert events[-1][0].rc == 1 assert events[-1][1] == job # override task return code job.returncode = 2 task.return_value = 0 rc = run_until_complete(job()) assert rc == 2 assert len(events) == 10 assert isinstance(events[-2][0], JobStarted) assert events[-2][0].identifier == 'name' assert events[-2][1] == job assert isinstance(events[-1][0], JobEnded) assert events[-1][0].identifier == 'name' assert events[-1][0].rc == 2 assert events[-1][1] == job def test_interface(): interface = ExecutorExtensionPoint() interface._flush() event_controller = Mock() interface.set_event_controller(event_controller) interface._flush() assert event_controller.flush.call_count == 1 class Extension1(ExecutorExtensionPoint): """Class documentation.""" class Extension2(ExecutorExtensionPoint): PRIORITY = 110 class Extension3(ExecutorExtensionPoint): pass def test_add_executor_arguments(): parser = ArgumentParser() # extensions with the same priority with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3 ): with pytest.raises(AssertionError) as e: add_executor_arguments(parser) assert 'Executor extensions must have unique priorities' in \ str(e.value) # no extensions with ExtensionPointContext(): with pytest.raises(AssertionError) as e: add_executor_arguments(parser) assert 'No executor extensions found' in str(e.value) # choose executor by environment variable with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_executor_extensions() extensions[110]['extension2'].add_arguments = Mock( side_effect=RuntimeError('custom exception')) extensions[100]['extension1'].add_arguments = Mock(return_value=None) env = {DEFAULT_EXECUTOR_ENVIRONMENT_VARIABLE.name: 'extension1'} with EnvironmentContext(**env): with patch('colcon_core.executor.logger.error') as error: add_executor_arguments(parser) assert extensions[100]['extension1'].add_arguments.call_count == 1 # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in executor extension 'extension2': custom exception\n") args = parser.parse_args([]) assert args.executor == 'extension1' # choose default executor parser = ArgumentParser() with ExtensionPointContext(extension1=Extension1, extension2=Extension2): add_executor_arguments(parser) args = parser.parse_args([]) assert args.executor == 'extension2' def test_execute_jobs(): context = Mock() context.args = Mock() context.args.event_handlers = None task_context = Mock() task_context.pkg = Mock() task_context.pkg.name = 'name' jobs = { 'one': Job( identifier='id', dependencies=set(), task=None, task_context=task_context)} event_reactor = Mock() event_reactor.__enter__ = lambda self: self event_reactor.__exit__ = lambda self, *args: None with patch( 'colcon_core.executor.create_event_reactor', return_value=event_reactor ): with ExtensionPointContext( extension1=Extension1, extension2=Extension2 ): # no extension selected with pytest.raises(AssertionError): execute_jobs(context, jobs) # execute method not implemented and sending skipped job event context.args.executor = 'extension2' with patch('colcon_core.executor.logger.error') as error: rc = execute_jobs(context, jobs) assert rc == 1 assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in executor extension 'extension2': \n") assert event_reactor.get_queue().put.call_count == 2 assert isinstance( event_reactor.get_queue().put.call_args_list[0][0][0][0], JobQueued) assert isinstance( event_reactor.get_queue().put.call_args_list[1][0][0][0], JobSkipped) # successful execution event_reactor.get_queue().put.reset_mock() jobs['one'].returncode = 0 extensions = get_executor_extensions() extensions[110]['extension2'].execute = \ lambda args, jobs, on_error: 0 callback = Mock() rc = execute_jobs( context, jobs, on_error=OnError.interrupt, pre_execution_callback=callback) assert rc == 0 assert event_reactor.get_queue().put.call_count == 1 assert isinstance( event_reactor.get_queue().put.call_args[0][0][0], JobQueued) assert callback.call_count == 1 colcon-core-0.17.1/test/test_executor_sequential.py000066400000000000000000000114041465053734400224760ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import asyncio from collections import OrderedDict import os import signal import sys from threading import Thread import time from colcon_core.executor import Job from colcon_core.executor import OnError from colcon_core.executor.sequential import SequentialExecutor import pytest ran_jobs = [] class Job1(Job): def __init__(self): super().__init__( identifier='job1', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): global ran_jobs ran_jobs.append(self.identifier) class Job2(Job): def __init__(self): super().__init__( identifier='job2', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): return 2 class Job3(Job): def __init__(self): super().__init__( identifier='job3', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): raise RuntimeError('custom exception') class Job4(Job): def __init__(self): super().__init__( identifier='job4', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): global ran_jobs ran_jobs.append(self.identifier) class Job5(Job): def __init__(self): super().__init__( identifier='job5', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): return 5 class Job6(Job): def __init__(self): super().__init__( identifier='job6', dependencies=('job2', ), task=None, task_context=None) async def __call__(self, *args, **kwargs): global ran_jobs ran_jobs.append(self.identifier) class Job7(Job): def __init__(self): super().__init__( identifier='job7', dependencies=('job1', ), task=None, task_context=None) async def __call__(self, *args, **kwargs): global ran_jobs ran_jobs.append(self.identifier) def test_sequential(): global ran_jobs extension = SequentialExecutor() args = None jobs = OrderedDict() jobs['one'] = Job1() # success rc = extension.execute(args, jobs) assert rc == 0 assert ran_jobs == ['job1'] ran_jobs.clear() # return error code jobs['two'] = Job2() jobs['four'] = Job4() rc = extension.execute(args, jobs) assert rc == 2 assert ran_jobs == ['job1'] ran_jobs.clear() rc = extension.execute(args, jobs, on_error=OnError.skip_pending) assert rc == 2 assert ran_jobs == ['job1'] ran_jobs.clear() # continue after error, keeping first error code jobs['five'] = Job5() rc = extension.execute(args, jobs, on_error=OnError.continue_) assert rc == 2 assert ran_jobs == ['job1', 'job4'] ran_jobs.clear() # continue but skip downstream jobs['six'] = Job6() jobs['seven'] = Job7() rc = extension.execute(args, jobs, on_error=OnError.skip_downstream) assert rc == 2 assert ran_jobs == ['job1', 'job4', 'job7'] ran_jobs.clear() # exception jobs['two'] = Job3() rc = extension.execute(args, jobs) assert rc == 1 assert ran_jobs == ['job1'] ran_jobs.clear() class Job8(Job): def __init__(self): super().__init__( identifier='job8', dependencies=set(), task=None, task_context=None) async def __call__(self, *args, **kwargs): global ran_jobs await asyncio.sleep(3) ran_jobs.append(self.identifier) @pytest.fixture def restore_sigint_handler(): handler = signal.getsignal(signal.SIGINT) yield signal.signal(signal.SIGINT, handler) def test_sequential_keyboard_interrupt(restore_sigint_handler): global ran_jobs if sys.platform == 'win32': pytest.skip( 'Skipping keyboard interrupt test since the signal will cause ' 'pytest to return failure even if no tests fail.') extension = SequentialExecutor() args = None jobs = OrderedDict() jobs['one'] = Job1() jobs['aborted'] = Job8() jobs['four'] = Job4() def delayed_sigint(): time.sleep(0.1) # Note: a real Ctrl-C would signal the whole process group os.kill( os.getpid(), signal.SIGINT if sys.platform != 'win32' else signal.CTRL_C_EVENT) if sys.platform == 'win32': os.kill(os.getpid(), signal.CTRL_C_EVENT) thread = Thread(target=delayed_sigint) thread.start() try: rc = extension.execute(args, jobs) finally: thread.join() assert rc == signal.SIGINT ran_jobs.clear() colcon-core-0.17.1/test/test_extension_point.py000066400000000000000000000213741465053734400216420ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Copyright 2023 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import os from unittest.mock import DEFAULT from unittest.mock import patch try: from importlib.metadata import Distribution except ImportError: # TODO: Drop this with Python 3.7 support from importlib_metadata import Distribution from colcon_core.environment_variable import EnvironmentVariable from colcon_core.extension_point import clear_entry_point_cache from colcon_core.extension_point import EntryPoint from colcon_core.extension_point import EXTENSION_POINT_GROUP_NAME from colcon_core.extension_point import get_all_extension_points from colcon_core.extension_point import get_extension_points from colcon_core.extension_point import load_extension_point from colcon_core.extension_point import load_extension_points from colcon_core.extension_point import override_blocklist_variable import pytest from .environment_context import EnvironmentContext class _FakeDistribution(Distribution): def __init__(self, entry_points): entry_points_spec = [] for group_name, group_members in entry_points.items(): entry_points_spec.append(f'[{group_name}]') for member_name, member_value in group_members: entry_points_spec.append(f'{member_name} = {member_value}') entry_points_spec.append('') self._files = { 'PKG-INFO': f'Name: dist-{id(self)}\nVersion: 0.0.0\n', 'entry_points.txt': '\n'.join(entry_points_spec) + '\n', } def read_text(self, filename): return self._files.get(filename) def locate_file(self, path): return path def _distributions(): yield _FakeDistribution({ EXTENSION_POINT_GROUP_NAME: [('group1', 'g1')], 'group1': [('extA', 'eA'), ('extB', 'eB')], }) yield _FakeDistribution({ EXTENSION_POINT_GROUP_NAME: [('group2', 'g2')], 'group2': [('extC', 'eC')], }) yield _FakeDistribution({ 'groupX': [('extD', 'eD')], }) def _entry_points(): for dist in _distributions(): yield from dist.entry_points def test_all_extension_points(): with patch( 'colcon_core.extension_point.entry_points', side_effect=_entry_points ): with patch( 'colcon_core.extension_point.distributions', side_effect=_distributions ): clear_entry_point_cache() # successfully load a known entry point extension_points = get_all_extension_points() assert set(extension_points.keys()) == { EXTENSION_POINT_GROUP_NAME, 'group1', 'group2', } assert set(extension_points['group1'].keys()) == {'extA', 'extB'} assert extension_points['group1']['extA'][0] == 'eA' def test_extension_point_blocklist(): # successful loading of extension point without a blocklist with patch( 'colcon_core.extension_point.entry_points', side_effect=_entry_points ): with patch( 'colcon_core.extension_point.distributions', side_effect=_distributions ): clear_entry_point_cache() extension_points = get_extension_points('group1') assert 'extA' in extension_points.keys() extension_point = extension_points['extA'] assert extension_point == 'eA' with patch.object(EntryPoint, 'load', return_value=None) as load: clear_entry_point_cache() load_extension_point('extA', 'eA', 'group1') assert load.call_count == 1 # successful loading of entry point not in blocklist load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST=os.pathsep.join([ 'group1.extB', 'group2.extC']) ): clear_entry_point_cache() load_extension_point('extA', 'eA', 'group1') assert load.call_count == 1 # entry point in a blocked group can't be loaded load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST='group1'): clear_entry_point_cache() with pytest.raises(RuntimeError) as e: load_extension_point('extA', 'eA', 'group1') assert 'The entry point group name is listed in the environment ' \ 'variable' in str(e.value) assert load.call_count == 0 # entry point listed in the blocklist can't be loaded with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST=os.pathsep.join([ 'group1.extA', 'group1.extB']) ): clear_entry_point_cache() with pytest.raises(RuntimeError) as e: load_extension_point('extA', 'eA', 'group1') assert 'The entry point name is listed in the environment ' \ 'variable' in str(e.value) assert load.call_count == 0 def test_extension_point_blocklist_override(): with patch.object(EntryPoint, 'load', return_value=None) as load: clear_entry_point_cache() my_extension_blocklist = EnvironmentVariable( 'MY_EXTENSION_BLOCKLIST', 'Foo bar baz') override_blocklist_variable(my_extension_blocklist) try: # entry point in default blocklist variable can be loaded load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST='group1'): clear_entry_point_cache() load_extension_point('extA', 'eA', 'group1') assert load.call_count == 1 # entry point in custom blocklist variable can't be loaded load.reset_mock() with EnvironmentContext(MY_EXTENSION_BLOCKLIST='group1'): clear_entry_point_cache() with pytest.raises(RuntimeError) as e: load_extension_point('extA', 'eA', 'group1') assert 'The entry point group name is listed in the ' \ 'environment variable' in str(e.value) assert load.call_count == 0 finally: override_blocklist_variable(None) # entry point in default blocklist variable can no longer be loaded load.reset_mock() with EnvironmentContext(COLCON_EXTENSION_BLOCKLIST='group1'): clear_entry_point_cache() with pytest.raises(RuntimeError) as e: load_extension_point('extA', 'eA', 'group1') assert 'The entry point group name is listed in the ' \ 'environment variable' in str(e.value) assert load.call_count == 0 def test_redefined_extension_point(): def _duped_distributions(): yield from _distributions() yield _FakeDistribution({ 'group2': [('extC', 'eC-prime')], }) def _duped_entry_points(): for dist in _duped_distributions(): yield from dist.entry_points with patch('colcon_core.extension_point.logger.error') as error: with patch( 'colcon_core.extension_point.entry_points', side_effect=_duped_entry_points ): with patch( 'colcon_core.extension_point.distributions', side_effect=_duped_distributions ): clear_entry_point_cache() extension_points = get_all_extension_points() assert 'eC-prime' == extension_points['group2']['extC'][0] assert error.call_count == 1 error.reset_mock() clear_entry_point_cache() extension_points = get_extension_points('group2') assert 'eC-prime' == extension_points.get('extC') assert error.call_count == 1 def entry_point_load(self, *args, **kwargs): if self.name == 'exception': raise Exception('entry point raising exception') if self.name == 'runtime_error': raise RuntimeError('entry point raising runtime error') elif self.name == 'success': return return DEFAULT @patch.object(EntryPoint, 'load', entry_point_load) @patch( 'colcon_core.extension_point.get_extension_points', return_value={'exception': 'a', 'runtime_error': 'b', 'success': 'c'} ) def test_load_extension_points_with_exception(_): with patch('colcon_core.extension_point.logger.error') as error: extensions = load_extension_points('group') # the extension point raising an exception different than a runtime error # results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert "Exception loading extension 'group.exception'" \ in error.call_args[0][0] assert 'entry point raising exception' in error.call_args[0][0] # neither of the extension points was loaded successfully assert extensions == {'success': None} colcon-core-0.17.1/test/test_feature_flags.py000066400000000000000000000065051465053734400212230ustar00rootroot00000000000000# Copyright 2024 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import os from unittest.mock import patch from colcon_core.feature_flags import check_implemented_flags from colcon_core.feature_flags import FEATURE_FLAGS_ENVIRONMENT_VARIABLE from colcon_core.feature_flags import get_feature_flags from colcon_core.feature_flags import is_feature_flag_set import pytest _FLAGS_TO_TEST = ( ('foo',), ('foo', 'foo'), ('foo', ''), ('', 'foo'), ('', 'foo', ''), ('foo', 'bar'), ('bar', 'foo'), ('bar', 'foo', 'baz'), ) @pytest.fixture def feature_flags_value(request): env = dict(os.environ) if request.param is not None: env[FEATURE_FLAGS_ENVIRONMENT_VARIABLE.name] = os.pathsep.join( request.param) else: env.pop(FEATURE_FLAGS_ENVIRONMENT_VARIABLE.name, None) mock_env = patch('colcon_core.feature_flags.os.environ', env) request.addfinalizer(mock_env.stop) mock_env.start() return request.param @pytest.fixture def feature_flag_reports(request): reported_uses = patch('colcon_core.feature_flags._REPORTED_USES', set()) request.addfinalizer(reported_uses.stop) reported_uses.start() return reported_uses @pytest.mark.parametrize( 'feature_flags_value', _FLAGS_TO_TEST, indirect=True) @pytest.mark.usefixtures('feature_flags_value', 'feature_flag_reports') def test_flag_is_set(): with patch('colcon_core.feature_flags.logger.warning') as warn: assert is_feature_flag_set('foo') assert warn.call_count == 2 assert is_feature_flag_set('foo') assert warn.call_count == 2 @pytest.mark.parametrize( 'feature_flags_value', (None, *_FLAGS_TO_TEST), indirect=True) @pytest.mark.usefixtures('feature_flags_value', 'feature_flag_reports') def test_flag_not_set(): with patch('colcon_core.feature_flags.logger.warning') as warn: assert not is_feature_flag_set('') assert not is_feature_flag_set('fo') assert not is_feature_flag_set('oo') assert not is_feature_flag_set('fooo') assert not is_feature_flag_set('ffoo') assert not is_feature_flag_set('qux') assert warn.call_count == 0 @pytest.mark.parametrize( 'feature_flags_value', (None, *_FLAGS_TO_TEST), indirect=True) @pytest.mark.usefixtures('feature_flags_value') def test_get_flags(feature_flags_value): assert [ flag for flag in (feature_flags_value or ()) if flag ] == get_feature_flags() @pytest.mark.parametrize('feature_flags_value', (('baz',),), indirect=True) @pytest.mark.usefixtures('feature_flags_value') def test_implemented(): with patch('colcon_core.feature_flags.IMPLEMENTED_FLAGS', {'foo'}): with patch('colcon_core.feature_flags.logger.warning') as warn: assert not is_feature_flag_set('bar') assert warn.call_count == 0 assert is_feature_flag_set('baz') assert warn.call_count == 2 assert is_feature_flag_set('foo') assert warn.call_count == 2 check_implemented_flags() assert warn.call_count == 2 with patch('colcon_core.feature_flags.IMPLEMENTED_FLAGS', {'baz'}): with patch('colcon_core.feature_flags.logger.warning') as warn: check_implemented_flags() assert warn.call_count == 1 colcon-core-0.17.1/test/test_flake8.py000066400000000000000000000034261465053734400175650ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import logging from pathlib import Path import sys import pytest @pytest.mark.flake8 @pytest.mark.linter def test_flake8(): from flake8.api.legacy import get_style_guide # avoid debug / info / warning messages from flake8 internals logging.getLogger('flake8').setLevel(logging.ERROR) # for some reason the pydocstyle logger changes to an effective level of 1 # set higher level to prevent the output to be flooded with debug messages logging.getLogger('pydocstyle').setLevel(logging.WARNING) style_guide = get_style_guide( extend_ignore=['D100', 'D104'], show_source=True, ) style_guide_tests = get_style_guide( extend_ignore=['D100', 'D101', 'D102', 'D103', 'D104', 'D105', 'D107'], show_source=True, ) stdout = sys.stdout sys.stdout = sys.stderr # implicitly calls report_errors() report = style_guide.check_files([ str(Path(__file__).parents[1] / 'bin' / 'colcon'), str(Path(__file__).parents[1] / 'colcon_core'), ]) report_tests = style_guide_tests.check_files([ str(Path(__file__).parents[1] / 'test'), ]) sys.stdout = stdout total_errors = report.total_errors + report_tests.total_errors if total_errors: # pragma: no cover # output summary with per-category counts print() if report.total_errors: report._application.formatter.show_statistics(report._stats) if report_tests.total_errors: report_tests._application.formatter.show_statistics( report_tests._stats) print(f'flake8 reported {total_errors} errors', file=sys.stderr) assert not total_errors, f'flake8 reported {total_errors} errors' colcon-core-0.17.1/test/test_location.py000066400000000000000000000225071465053734400202240ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core import location from colcon_core.location import _create_symlink from colcon_core.location import create_log_path from colcon_core.location import get_config_path from colcon_core.location import get_log_path from colcon_core.location import set_default_config_path from colcon_core.location import set_default_log_path import pytest from .environment_context import EnvironmentContext @pytest.fixture(scope='module', autouse=True) def reset_global_variables(): yield from colcon_core import location assert location._log_base_path is not None location._config_path = None location._config_path_env_var = None location._create_log_path_called = False location._log_base_path = None location._log_base_path_env_var = None location._log_subdirectory = None def test_config_path(): # use config path config_path = '/some/path'.replace('/', os.sep) with patch('colcon_core.location.logger.info') as info: set_default_config_path(path=config_path) info.assert_called_once_with(f"Using config path '{config_path}'") # use config path if environment variable is not set config_path_env_var = 'TEST_COLCON_CONFIG_PATH' with patch('colcon_core.location.logger.info') as info: set_default_config_path( path=config_path, env_var=config_path_env_var) info.assert_called_once_with(f"Using config path '{config_path}'") # use environment variable when set config_path = '/other/path'.replace('/', os.sep) with EnvironmentContext(**{config_path_env_var: config_path}): assert isinstance(get_config_path(), Path) assert str(get_config_path()) == config_path def test_log_path(): # use log base path log_base_path = '/some/path'.replace('/', os.sep) set_default_log_path(base_path=log_base_path) assert isinstance(get_log_path(), Path) assert str(get_log_path().parent) == log_base_path # use log base path if environment variable is not set log_base_path_env_var = 'TEST_COLCON_LOG_BASE_PATH' set_default_log_path( base_path=log_base_path, env_var=log_base_path_env_var) assert isinstance(get_log_path(), Path) assert str(get_log_path().parent) == log_base_path # use explicitly passed log base path even if environment variable is set with EnvironmentContext(**{log_base_path_env_var: '/not/used'}): assert isinstance(get_log_path(), Path) assert str(get_log_path().parent) == log_base_path # suppress logging when environment variable is set to devnull set_default_log_path(base_path=os.devnull) assert get_log_path() is None # use environment variable when set and no base path passed log_base_path = '/other/path'.replace('/', os.sep) set_default_log_path( base_path=None, env_var=log_base_path_env_var) with EnvironmentContext(**{log_base_path_env_var: log_base_path}): assert isinstance(get_log_path(), Path) assert str(get_log_path().parent) == log_base_path # use default if not environment variable is set and no base path passed set_default_log_path( base_path=None, env_var=log_base_path_env_var, default='some_default') assert isinstance(get_log_path(), Path) assert str(get_log_path().parent) == 'some_default' # use specific subdirectory subdirectory = 'sub' set_default_log_path( base_path=log_base_path, env_var=log_base_path_env_var, subdirectory=subdirectory) assert isinstance(get_log_path(), Path) assert get_log_path() == Path(log_base_path) / subdirectory @pytest.fixture def reset_log_path_creation_global(): yield from colcon_core import location location._reset_log_path_creation_global() def test_create_log_path(reset_log_path_creation_global): subdirectory = 'sub' with TemporaryDirectory(prefix='test_colcon_') as log_path: log_path = Path(log_path) set_default_log_path(base_path=log_path, subdirectory=subdirectory) # create a directory and symlink when the path doesn't exist with patch('os.makedirs', wraps=os.makedirs) as makedirs: create_log_path('verb') makedirs.assert_called_once_with(str(log_path / subdirectory)) assert (log_path / subdirectory).exists() # repeated call is a noop with patch('os.makedirs') as makedirs: makedirs.side_effect = AssertionError('should not be called') create_log_path('verb') # since the directory already exists create one with a suffix location._create_log_path_called = False with patch('os.makedirs', wraps=os.makedirs) as makedirs: create_log_path('verb') assert makedirs.call_count == 2 assert len(makedirs.call_args_list[0][0]) == 1 assert makedirs.call_args_list[0][0][0] == str( log_path / subdirectory) assert len(makedirs.call_args_list[1][0]) == 1 assert makedirs.call_args_list[1][0][0] == str( log_path / subdirectory) + '_2' assert (log_path / (str(subdirectory) + '_2')).exists() # and another increment of the suffix location._create_log_path_called = False location._log_subdirectory = subdirectory with patch('os.makedirs', wraps=os.makedirs) as makedirs: create_log_path('verb') assert makedirs.call_count == 3 assert len(makedirs.call_args_list[0][0]) == 1 assert makedirs.call_args_list[0][0][0] == str( log_path / subdirectory) assert len(makedirs.call_args_list[1][0]) == 1 assert makedirs.call_args_list[1][0][0] == str( log_path / subdirectory) + '_2' assert len(makedirs.call_args_list[2][0]) == 1 assert makedirs.call_args_list[2][0][0] == str( log_path / subdirectory) + '_3' assert (log_path / (str(subdirectory) + '_3')).exists() subdirectory += '_3' # check that `latest_verb` was created and points to the subdirectory assert (log_path / 'latest_verb').is_symlink() assert (log_path / 'latest_verb').resolve() == \ (log_path / subdirectory).resolve() # check that `latest` was created and points to the subdirectory assert (log_path / 'latest').is_symlink() assert (log_path / 'latest').resolve() == \ (log_path / subdirectory).resolve() # create directory but correct latest symlink already exists (log_path / subdirectory).rmdir() location._create_log_path_called = False create_log_path('verb') assert (log_path / subdirectory).exists() assert (log_path / 'latest').is_symlink() assert (log_path / 'latest').resolve() == \ (log_path / subdirectory).resolve() # create directory and update latest symlink subdirectory = 'other_sub' set_default_log_path(base_path=log_path, subdirectory=subdirectory) location._create_log_path_called = False create_log_path('verb') assert (log_path / subdirectory).exists() assert (log_path / 'latest').is_symlink() assert (log_path / 'latest').resolve() == \ (log_path / subdirectory).resolve() # create directory but latest is not a symlink (log_path / subdirectory).rmdir() (log_path / 'latest').unlink() (log_path / 'latest').mkdir() location._create_log_path_called = False create_log_path('verb') assert (log_path / subdirectory).exists() assert not (log_path / 'latest').is_symlink() # check that `latest_verb` is skipped when there is no verb (log_path / subdirectory).rmdir() (log_path / 'latest').rmdir() (log_path / 'latest_verb').unlink() location._create_log_path_called = False create_log_path(None) assert (log_path / subdirectory).exists() assert (log_path / 'latest').is_symlink() assert (log_path / 'latest').resolve() == \ (log_path / subdirectory).resolve() def test__create_symlink(): # check cases where functions raise exceptions and ensure it is being # handled gracefully with TemporaryDirectory(prefix='test_colcon_') as path: path = Path(path) # relative path couldn't be computed, symlink couldn't be created _create_symlink(path / 'nowhere', Path('/foo/bar')) # unlinking symlink failed class DummyPath: def __init__(self): self.parent = 'parent' def exists(self): return False def is_symlink(self): return True def unlink(self): raise FileNotFoundError() with patch('os.symlink') as symlink: _create_symlink(path / 'src', DummyPath()) assert symlink.call_count == 1 # (Windows) OSError: symbolic link privilege not held class ValidPath(DummyPath): def is_symlink(self): return False with patch('os.symlink') as symlink: symlink.side_effect = OSError() _create_symlink(path / 'src', ValidPath()) assert symlink.call_count == 1 colcon-core-0.17.1/test/test_logging.py000066400000000000000000000063671465053734400200500ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import logging from pathlib import Path from unittest.mock import Mock from colcon_core.logging import add_file_handler from colcon_core.logging import get_effective_console_level from colcon_core.logging import get_numeric_log_level from colcon_core.logging import set_logger_level_from_env import pytest from .environment_context import EnvironmentContext def test_set_logger_level_from_env(): logger = logging.getLogger('test') default_level = logger.getEffectiveLevel() # not set set_logger_level_from_env(logger, 'COLCON_TEST_LOGGER_LEVEL') assert logger.getEffectiveLevel() == default_level # invalid value with EnvironmentContext(COLCON_TEST_LOGGER_LEVEL='invalid'): logger.warning = Mock() set_logger_level_from_env(logger, 'COLCON_TEST_LOGGER_LEVEL') assert logger.warning.call_count == 1 assert logger.getEffectiveLevel() == default_level # valid value with EnvironmentContext(COLCON_TEST_LOGGER_LEVEL='debug'): set_logger_level_from_env(logger, 'COLCON_TEST_LOGGER_LEVEL') assert logger.getEffectiveLevel() == logging.DEBUG def test_get_numeric_log_level(): # numeric log_level = get_numeric_log_level('10') assert log_level == logging.DEBUG # string log_level = get_numeric_log_level('info') assert log_level == logging.INFO # string with mixed case log_level = get_numeric_log_level('Warning') assert log_level == logging.WARNING # invalid string with pytest.raises(ValueError) as e: get_numeric_log_level('invalid') assert str(e.value).endswith( 'valid names are: CRITICAL, ERROR, WARNING, INFO, DEBUG ' '(case-insensitive)') # negative numeric with pytest.raises(ValueError) as e: get_numeric_log_level('-1') assert str(e.value).endswith('numeric log levels must be positive') def test_add_file_handler(tmpdir): log_path = Path(tmpdir) / 'test_add_file_handler.log' log_path.touch() logger = logging.getLogger('test_add_file_handler') try: logger.setLevel(logging.WARN) add_file_handler(logger, log_path) assert logger.getEffectiveLevel() != logging.WARN logger.info('test_add_file_handler') finally: for handler in logger.handlers: logger.removeHandler(handler) handler.close() # check only that we logged SOMETHING to the file assert log_path.stat().st_size > 10 def test_get_effective_console_level(tmpdir): logger = logging.getLogger('test_sync_console_log_level') # no level set level = get_effective_console_level(logger) assert level == logger.getEffectiveLevel() # change the level to ERROR logger.setLevel(logging.ERROR) level = get_effective_console_level(logger) assert level == logger.getEffectiveLevel() == logging.ERROR # after add_file_handler log_path = Path(tmpdir) / 'test_add_file_handler.log' log_path.touch() try: add_file_handler(logger, log_path) level = get_effective_console_level(logger) assert level == logging.ERROR finally: for handler in logger.handlers: logger.removeHandler(handler) handler.close() colcon-core-0.17.1/test/test_package_augmentation.py000066400000000000000000000161451465053734400225630ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from unittest.mock import Mock from unittest.mock import patch from colcon_core.package_augmentation import augment_packages from colcon_core.package_augmentation \ import get_package_augmentation_extensions from colcon_core.package_augmentation import PackageAugmentationExtensionPoint from colcon_core.package_augmentation import update_descriptor from colcon_core.package_augmentation import update_metadata from colcon_core.package_descriptor import PackageDescriptor from .extension_point_context import ExtensionPointContext class Extension1(PackageAugmentationExtensionPoint): PRIORITY = 80 class Extension2(PackageAugmentationExtensionPoint): pass def test_get_package_augmentation_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_augmentation_extensions() assert ['extension2', 'extension1'] == \ list(extensions.keys()) def augment_package_metadata_with_data(desc, *, additional_argument_names): if str(desc.path) == '/some/path'.replace('/', os.sep): desc.metadata['key'] = 'value' def augment_package_metadata_with_path(desc, *, additional_argument_names): desc.metadata['path'] = desc.path def augment_package_with_hook(desc, *, additional_argument_names): desc.hooks += additional_argument_names def test_augment_packages(): desc1 = PackageDescriptor('/some/path') desc2 = PackageDescriptor('/other/path') descs = {desc1, desc2} with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_augmentation_extensions() extensions['extension1'].augment_package = Mock( side_effect=augment_package_metadata_with_data) extensions['extension2'].augment_package = Mock( side_effect=augment_package_metadata_with_path) augment_packages(descs) assert len(desc1.metadata) == 2 assert set(desc1.metadata.keys()) == {'key', 'path'} assert desc1.path == desc1.metadata['path'] assert len(desc2.metadata) == 1 assert set(desc2.metadata.keys()) == {'path'} assert desc2.path == desc2.metadata['path'] # raise exception desc1 = PackageDescriptor('/some/path') desc2 = PackageDescriptor('/other/path') descs = {desc1, desc2} with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_augmentation_extensions() extensions['extension1'].augment_package = Mock( side_effect=augment_package_with_hook) with patch('colcon_core.package_augmentation.logger.error') as error: augment_packages( descs, additional_argument_names=['arg1', 'arg2'], augmentation_extensions=extensions) assert desc1.hooks == ['arg1', 'arg2'] assert desc2.hooks == ['arg1', 'arg2'] # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package augmentation extension 'extension2': \n") # invalid return value desc1.hooks = [] desc2.hooks = [] extensions['extension2'].augment_packages = Mock(return_value=False) with patch('colcon_core.package_augmentation.logger.error') as error: augment_packages( descs, additional_argument_names=['arg1', 'arg2'], augmentation_extensions=extensions) assert desc1.hooks == ['arg1', 'arg2'] assert desc2.hooks == ['arg1', 'arg2'] # the raised assertion is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package augmentation extension 'extension2': ") def test_update_descriptor(): desc = PackageDescriptor('/some/path') assert len(desc.dependencies) == 0 assert len(desc.hooks) == 0 assert len(desc.metadata) == 0 data = { 'build-dependencies': {'b1', 'b2'}, 'test-dependencies': {'t1'}, } update_descriptor(desc, data) assert len(desc.dependencies) == 2 assert 'build' in desc.dependencies.keys() assert desc.dependencies['build'] == {'b1', 'b2'} assert 'test' in desc.dependencies.keys() assert desc.dependencies['test'] == {'t1'} data = { 'dependencies': {'d1'}, 'hooks': ['hook1', 'hook2'], 'key': 'value', } update_descriptor(desc, data, additional_argument_names=['*']) assert len(desc.dependencies) == 3 assert 'build' in desc.dependencies.keys() assert desc.dependencies['build'] == {'d1', 'b1', 'b2'} assert 'run' in desc.dependencies.keys() assert desc.dependencies['run'] == {'d1'} assert 'test' in desc.dependencies.keys() assert desc.dependencies['test'] == {'d1', 't1'} assert len(desc.hooks) == 2 assert desc.hooks == ['hook1', 'hook2'] assert len(desc.metadata) == 1 assert 'key' in desc.metadata assert desc.metadata['key'] == 'value' data = { 'other': 'value', 'some': 'value', } update_descriptor( desc, data, additional_argument_names=['some', 'unknown']) assert len(desc.metadata) == 2 assert 'other' not in desc.metadata assert 'some' in desc.metadata assert desc.metadata['some'] == 'value' def test_update_metadata(): desc = PackageDescriptor('/some/path') desc.name = 'name' assert len(desc.metadata) == 0 update_metadata(desc, 'd', {1: 'one', 2: 'two'}) assert len(desc.metadata) == 1 assert 'd' in desc.metadata.keys() assert desc.metadata['d'] == {1: 'one', 2: 'two'} update_metadata(desc, 'd', {2: 'TWO', 3: 'THREE'}) assert len(desc.metadata) == 1 assert 'd' in desc.metadata.keys() assert desc.metadata['d'] == {1: 'one', 2: 'TWO', 3: 'THREE'} update_metadata(desc, 'l', [1, 2]) assert len(desc.metadata) == 2 assert 'l' in desc.metadata.keys() assert desc.metadata['l'] == [1, 2] update_metadata(desc, 'l', [2, 3]) assert len(desc.metadata) == 2 assert 'l' in desc.metadata.keys() assert desc.metadata['l'] == [1, 2, 2, 3] update_metadata(desc, 's', {1, 2}) assert len(desc.metadata) == 3 assert 's' in desc.metadata.keys() assert desc.metadata['s'] == {1, 2} update_metadata(desc, 's', {2, 3}) assert len(desc.metadata) == 3 assert 's' in desc.metadata.keys() assert desc.metadata['s'] == {1, 2, 3} with patch('colcon_core.package_augmentation.logger.warning') as warn: update_metadata(desc, 's', 'different type') warn.assert_called_once_with( "update package 'name' metadata 's' from value '{1, 2, 3}' to " "'different type'") assert len(desc.metadata) == 3 assert 's' in desc.metadata.keys() assert desc.metadata['s'] == 'different type' with patch('colcon_core.package_augmentation.logger.warning') as warn: update_metadata(desc, 's', 'same type') assert warn.call_count == 0 assert len(desc.metadata) == 3 assert 's' in desc.metadata.keys() assert desc.metadata['s'] == 'same type' colcon-core-0.17.1/test/test_package_decorator.py000066400000000000000000000033711465053734400220470ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from unittest.mock import Mock from colcon_core.package_decorator import add_recursive_dependencies from colcon_core.package_decorator import get_decorators from colcon_core.package_decorator import PackageDecorator from colcon_core.package_descriptor import PackageDescriptor def test_constructor(): desc = Mock() d = PackageDecorator(desc) assert d.descriptor == desc assert d.recursive_dependencies is None assert d.selected is True def test_get_decorators(): desc1 = Mock() desc2 = Mock() decos = get_decorators([desc1, desc2]) assert len(decos) == 2 assert decos[0].descriptor == desc1 assert decos[1].descriptor == desc2 def test_add_recursive_dependencies(): d = PackageDescriptor('/some/path') d.name = 'A' d.dependencies['build'].add('B') d.dependencies['build'].add('c') d.dependencies['run'].add('D') d.dependencies['test'].add('e') d1 = PackageDescriptor('/other/path') d1.name = 'B' d1.dependencies['build'].add('f') d1.dependencies['run'].add('G') d2 = PackageDescriptor('/other/path') d2.name = 'D' d2.dependencies['run'].add('h') d3 = PackageDescriptor('/another/path') d3.name = 'G' d3.dependencies['build'].add('i') decos = get_decorators([d, d1, d2, d3]) add_recursive_dependencies( decos, direct_categories={'build', 'run'}, recursive_categories={'run'}) assert decos[0].recursive_dependencies is not None assert decos[1].recursive_dependencies is not None assert decos[2].recursive_dependencies is not None assert decos[3].recursive_dependencies is not None assert decos[0].recursive_dependencies == {'B', 'D', 'G'} colcon-core-0.17.1/test/test_package_descriptor.py000066400000000000000000000115721465053734400222450ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import defaultdict import os from pathlib import Path from colcon_core.dependency_descriptor import DependencyDescriptor from colcon_core.package_descriptor import PackageDescriptor import pytest def test_constructor(): d = PackageDescriptor('/some/path') assert d.path == Path('/some/path') assert d.type is None assert d.name is None assert len(d.dependencies.keys()) == 0 assert len(d.hooks) == 0 assert len(d.metadata.keys()) == 0 def test_identifies_package(): d = PackageDescriptor('/some/path') assert not d.identifies_package() d.type = 'type' assert not d.identifies_package() d.type = None d.name = 'name' assert not d.identifies_package() d.type = 'type' assert d.identifies_package() def test_get_dependencies(): d1 = PackageDescriptor('/some/path') d1.name = 'self' d1.dependencies['build'].add('build-depend') d1.dependencies['build'].add('depend') d1.dependencies['run'].add('run-depend') d1.dependencies['run'].add('depend') assert d1.get_dependencies() == {'build-depend', 'run-depend', 'depend'} d1.dependencies['test'].add('self') assert d1.get_dependencies(categories=('build', )) == \ {'build-depend', 'depend'} with pytest.raises(AssertionError) as e: d1.get_dependencies() assert "'self'" in str(e.value) @pytest.fixture def recursive_dependencies(): d = PackageDescriptor('/some/path') d.name = 'A' d.dependencies['build'].add('B') d.dependencies['run'].add('c') d.dependencies['test'].add('d') d1 = PackageDescriptor('/other/path') d1.name = 'B' d1.dependencies['build'].add(DependencyDescriptor('e')) d1.dependencies['run'].add(DependencyDescriptor('F')) d1.dependencies['test'].add(DependencyDescriptor('G')) d2 = PackageDescriptor('/another/path') d2.name = 'd' d3 = PackageDescriptor('/yet-another/path') d3.name = 'F' d3.dependencies['build'].add('h') d3.dependencies['test'].add('G') d3.dependencies['test'].add('I') d3.dependencies['test'].add('J') d4 = PackageDescriptor('/more/path') d4.name = 'G' d4.dependencies['test'].add('I') d5 = PackageDescriptor('/yet-more/path') d5.name = 'I' # circular dependencies should be ignored d5.dependencies['run'].add('A') d6 = PackageDescriptor('/paths/galore') d6.name = 'J' return d, {d, d1, d2, d3, d4, d5, d6} def test_get_recursive_dependencies(recursive_dependencies): desc, all_descs = recursive_dependencies rec_deps = desc.get_recursive_dependencies( all_descs, direct_categories=('build', 'run'), recursive_categories=('run', 'test')) assert rec_deps == { # direct dependencies 'B', # recursive dependencies 'F', 'G', 'I', 'J', } def test_get_recursive_dependencies_map(recursive_dependencies): recursive_categories = defaultdict(lambda: ('run', 'test')) recursive_categories['run'] = ('run',) desc, all_descs = recursive_dependencies rec_deps = desc.get_recursive_dependencies( all_descs, direct_categories=('build', 'run'), recursive_categories=recursive_categories) assert rec_deps == { # direct dependencies 'B', # recursive dependencies 'F', 'G', 'I', } def test_magic_methods(): d1 = PackageDescriptor('/some/path') d1.type = 'custom-type' d1.name = 'custom-name' d2 = PackageDescriptor('/some/path') d2.type = 'custom-type' d2.name = 'other-name' assert d1 != d2 assert hash(d1) != hash(d2) d2.name = 'custom-name' assert d1 == d2 assert hash(d1) == hash(d2) d1.dependencies['build'].add('build-depend') d2.hooks.append('hook') d2.metadata['key'] = 'value' assert d1 == d2 assert hash(d1) == hash(d2) d2.type = 'other-type' assert d1 != d2 assert hash(d1) != hash(d2) d2.type = 'custom-type' assert d1 == d2 assert hash(d1) == hash(d2) d2.path = Path('/other/path') assert d1 != d2 # comparing with other types always returns False assert d1 != [] def test_str(): d = PackageDescriptor('/some/path') d.type = 'custom-type' d.name = 'custom-name' d.dependencies['build'].add('build-depend') d.dependencies['run'].add('run-depend') d.hooks += ('hook-a', 'hook-b') d.metadata['key'] = 'value' s = str(d) assert s.startswith('{') assert s.endswith('}') assert 'path: ' in s assert '/some/path'.replace('/', os.sep) in s assert 'type: ' in s assert 'custom-type' in s assert 'name: ' in s assert 'custom-name' in s assert 'dependencies: ' in s assert 'build-depend' in s assert 'run-depend' in s assert 'hooks: ' in s assert 'hook-a' in s assert 'metadata: ' in s assert 'value' in s colcon-core-0.17.1/test/test_package_discovery.py000066400000000000000000000221561465053734400220760ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.package_descriptor import PackageDescriptor from colcon_core.package_discovery import _discover_packages from colcon_core.package_discovery import _get_extensions_with_parameters from colcon_core.package_discovery import add_package_discovery_arguments from colcon_core.package_discovery import discover_packages from colcon_core.package_discovery import expand_dir_wildcards from colcon_core.package_discovery import get_package_discovery_extensions from colcon_core.package_discovery import PackageDiscoveryExtensionPoint from .extension_point_context import ExtensionPointContext class Extension1(PackageDiscoveryExtensionPoint): PRIORITY = 80 class Extension2(PackageDiscoveryExtensionPoint): pass class Extension3(PackageDiscoveryExtensionPoint): PRIORITY = 90 class Extension4(PackageDiscoveryExtensionPoint): pass def test_get_package_discovery_extensions(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): extensions = get_package_discovery_extensions() assert ['extension2', 'extension4', 'extension3', 'extension1'] == \ list(extensions.keys()) def test_add_package_discovery_arguments(): parser = Mock() with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): add_package_discovery_arguments(parser) all_extensions = get_package_discovery_extensions() extensions = {'extension2': all_extensions['extension2']} # mock the has_default method extensions['extension2'].has_default = Mock( side_effect=ValueError('exception in has_default')) with patch('colcon_core.package_discovery.logger.error') as error: add_package_discovery_arguments(parser, extensions=extensions) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package discovery extension 'extension2': " 'exception in has_default\n') extensions = {'extension3': all_extensions['extension3']} # mock the add_arguments method extensions['extension3'].add_arguments = Mock( side_effect=ValueError('exception in add_arguments')) with patch('colcon_core.package_discovery.logger.error') as error: add_package_discovery_arguments(parser, extensions=extensions) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package discovery extension 'extension3': " 'exception in add_arguments\n') # mock the has_default method and return True extensions['extension3'].has_default = Mock(return_value=True) add_package_discovery_arguments(parser, extensions=extensions) def test_discover_packages(): # check without any extensions with patch( 'colcon_core.package_discovery.get_package_discovery_extensions', return_value={}, ) as get_extensions: with patch('colcon_core.package_discovery.logger.warning') as warn: descs = discover_packages(None, None) assert get_extensions.call_count == 1 warn.assert_called_once_with('No package discovery extensions found') assert descs == set() with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): extensions = get_package_discovery_extensions() assert len(extensions) == 4 # check without any parameters extensions['extension1'].discover = Mock( return_value={PackageDescriptor('/extension1/pkg1')}) extensions['extension2'].discover = Mock( return_value={PackageDescriptor('/extension2/pkg1')}) descs = discover_packages(None, None, discovery_extensions=extensions) assert len(descs) == 2 expected_path = '/extension1/pkg1'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) expected_path = '/extension2/pkg1'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) # check with parameters extensions['extension3'].has_parameters = Mock(return_value=True) extensions['extension3'].discover = Mock( return_value={ PackageDescriptor('/extension3/pkg1'), PackageDescriptor('/extension3/pkg2')}) descs = discover_packages(None, None, discovery_extensions=extensions) assert len(descs) == 2 expected_path = '/extension3/pkg1'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) expected_path = '/extension3/pkg2'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) def test_expand_dir_wildcards(): with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) (prefix_path / 'one').mkdir() (prefix_path / 'two').mkdir() (prefix_path / 'three').touch() paths = [ '/some/path', str(prefix_path / '*') ] expand_dir_wildcards(paths) assert len(paths) == 3 assert paths[0] == '/some/path' assert paths[1] == str((prefix_path / 'one')) assert paths[2] == str((prefix_path / 'two')) def test__get_extensions_with_parameters(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): extensions = get_package_discovery_extensions() # mock the has_parameters method extensions['extension1'].has_parameters = Mock(return_value=True) extensions['extension2'].has_parameters = Mock( side_effect=ValueError('exception in has_parameters')) extensions['extension3'].has_parameters = Mock( side_effect=PackageDiscoveryExtensionPoint.has_parameters) extensions['extension4'].has_parameters = Mock(return_value=False) with_parameters = _get_extensions_with_parameters(Mock(), extensions) assert extensions['extension1'].has_parameters.call_count == 1 assert extensions['extension2'].has_parameters.call_count == 1 assert extensions['extension3'].has_parameters.call_count == 1 assert extensions['extension4'].has_parameters.call_count == 1 assert set(with_parameters.keys()) == {'extension1'} def test__discover_packages(): descs = _discover_packages(None, None, {}) assert descs == set() with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): extensions = get_package_discovery_extensions() # mock the discover method in the order the extensions are being called extensions['extension2'].discover = Mock( side_effect=ValueError('exception in discover')) extensions['extension4'].discover = Mock( side_effect=extensions['extension4'].discover) extensions['extension3'].discover = Mock( return_value={ PackageDescriptor('/extension3/pkg1'), PackageDescriptor('/extension3/pkg2')}) # returns None instead of a set extensions['extension1'].discover = Mock() with patch('colcon_core.package_discovery.logger.error') as error: descs = _discover_packages(Mock(), None, extensions) # in the order the extensions are being called assert extensions['extension2'].discover.call_count == 1 assert extensions['extension4'].discover.call_count == 1 assert extensions['extension3'].discover.call_count == 1 assert extensions['extension1'].discover.call_count == 1 # the raised exceptions are catched and result in error messages assert error.call_count == 2 assert len(error.call_args_list[0][0]) == 1 assert error.call_args_list[0][0][0].startswith( "Exception in package discovery extension 'extension2': " 'exception in discover\n') assert len(error.call_args_list[1][0]) == 1 assert error.call_args_list[1][0][0].startswith( "Exception in package discovery extension 'extension1': " 'discover() should return a set\n') assert len(descs) == 2 expected_path = '/extension3/pkg1'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) expected_path = '/extension3/pkg2'.replace('/', os.sep) assert expected_path in (str(d.path) for d in descs) colcon-core-0.17.1/test/test_package_discovery_path.py000066400000000000000000000053531465053734400231120ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.package_descriptor import PackageDescriptor from colcon_core.package_discovery.path import PathPackageDiscovery from colcon_core.package_identification import IgnoreLocationException def test_path_package_discovery(): extension = PathPackageDiscovery() assert extension.has_default() is True def test_add_arguments(): extension = PathPackageDiscovery() parser = Mock() parser.add_argument = Mock() extension.add_arguments(parser=parser, with_default=True) assert parser.add_argument.call_count == 1 def test_has_parameters(): extension = PathPackageDiscovery() args = Mock() args.paths = [] assert extension.has_parameters(args=args) is False args.paths = ['/some/path'] assert extension.has_parameters(args=args) is True def identify(_, path): if path == '/empty/path': return None if path == '/skip/path': raise IgnoreLocationException() return PackageDescriptor(path) def test_discover(): extension = PathPackageDiscovery() args = Mock() args.paths = None assert extension.discover(args=args, identification_extensions={}) == set() args.paths = [ '/empty/path', '/skip/path', '/same/path', '/same/path/../path', '/other/path'] with patch( 'colcon_core.package_discovery.path.identify', side_effect=identify ): descs = extension.discover(args=args, identification_extensions={}) assert descs == { PackageDescriptor(os.path.realpath('/same/path')), PackageDescriptor(os.path.realpath('/other/path'))} def test_discover_with_wildcards(): with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) path_one = prefix_path / 'one' / 'path' path_two = prefix_path / 'two' / 'path' path_three = prefix_path / 'three' / 'path' path_one.mkdir(parents=True) path_two.mkdir(parents=True) path_three.mkdir(parents=True) extension = PathPackageDiscovery() args = Mock() args.paths = [str(prefix_path / '*' / 'path')] with patch( 'colcon_core.package_discovery.path.identify', side_effect=identify ): descs = extension.discover(args=args, identification_extensions={}) assert descs == { PackageDescriptor(os.path.realpath(str(path_one))), PackageDescriptor(os.path.realpath(str(path_two))), PackageDescriptor(os.path.realpath(str(path_three)))} colcon-core-0.17.1/test/test_package_identification.py000066400000000000000000000160211465053734400230520ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from unittest.mock import Mock from unittest.mock import patch from colcon_core.package_descriptor import PackageDescriptor from colcon_core.package_identification import _identify from colcon_core.package_identification \ import get_package_identification_extensions from colcon_core.package_identification import identify from colcon_core.package_identification import IgnoreLocationException from colcon_core.package_identification \ import PackageIdentificationExtensionPoint import pytest from .extension_point_context import ExtensionPointContext class Extension1(PackageIdentificationExtensionPoint): PRIORITY = 80 class Extension2(PackageIdentificationExtensionPoint): pass class Extension3(PackageIdentificationExtensionPoint): PRIORITY = 90 class Extension4(PackageIdentificationExtensionPoint): pass def test_get_package_identification_extensions(): with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): extensions = get_package_identification_extensions() assert list(extensions.keys()) == [100, 90, 80] assert list(extensions[100].keys()) == ['extension2', 'extension4'] assert list(extensions[90].keys()) == ['extension3'] assert list(extensions[80].keys()) == ['extension1'] def identify_name(desc): desc.name = 'name' def identify_type(desc): desc.type = 'type' def identify_name_and_type(desc): identify_name(desc) identify_type(desc) def test_identify(): path = '/some/path' context = ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4) with context: # no identification desc = identify({}, path) assert desc is None # no complete identification extensions = get_package_identification_extensions() extensions[80]['extension1'].identify = Mock( side_effect=identify_name) desc = identify(extensions, path) assert desc is None # valid result combined across priority groups extensions = get_package_identification_extensions() extensions[100]['extension4'].identify = Mock( side_effect=identify_type) desc = identify(extensions, path) assert isinstance(desc, PackageDescriptor) assert str(desc.path) == '/some/path'.replace('/', os.sep) assert desc.name == 'name' assert desc.type == 'type' # skip location extensions = get_package_identification_extensions() extensions[90]['extension3'].identify = Mock( side_effect=IgnoreLocationException()) with pytest.raises(IgnoreLocationException): identify(extensions, path) # valid result from first priority group # lower priority groups are not even invoked extensions = get_package_identification_extensions() extensions[100]['extension4'].identify.side_effect = \ identify_name_and_type desc = identify(extensions, path) assert isinstance(desc, PackageDescriptor) assert str(desc.path) == '/some/path'.replace('/', os.sep) assert desc.name == 'name' assert desc.type == 'type' with context: # multiple different results result in skipping the location extensions = get_package_identification_extensions() extensions[100]['extension2'].identify = Mock( side_effect=identify_name) extensions[100]['extension4'].identify = Mock( side_effect=identify_type) with pytest.raises(IgnoreLocationException): identify(extensions, path) def test__identify(): desc_path_only = PackageDescriptor('/some/path') with ExtensionPointContext( extension1=Extension1, extension2=Extension2, extension3=Extension3, extension4=Extension4, ): # valid result extensions = get_package_identification_extensions()[100] extensions['extension2'].identify = Mock() extensions['extension4'].identify = identify_name_and_type desc = _identify(extensions, desc_path_only) assert isinstance(desc, PackageDescriptor) assert str(desc.path) == '/some/path'.replace('/', os.sep) assert desc.name == 'name' assert desc.type == 'type' # no results extensions = get_package_identification_extensions()[100] extensions['extension2'].identify = Mock() extensions['extension4'].identify = Mock() desc = _identify(extensions, desc_path_only) assert desc is None # multiple different results extensions = get_package_identification_extensions()[100] extensions['extension2'].identify = identify_name extensions['extension4'].identify = identify_type with patch( 'colcon_core.package_identification.logger.warning' ) as warn: desc = _identify(extensions, desc_path_only) assert desc is False # the raised exception is catched and results in a warn message assert warn.call_count == 1 assert len(warn.call_args[0]) == 1 assert 'multiple matches' in warn.call_args[0][0] # invalid return value extensions = get_package_identification_extensions()[90] extensions['extension3'].identify = Mock(return_value=True) with patch('colcon_core.package_identification.logger.error') as error: desc = _identify(extensions, desc_path_only) assert desc is None # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package identification extension 'extension3' " "in '/some/path': identify() should return None\n" .replace('/', os.sep)) # skip location extensions = get_package_identification_extensions()[90] extensions['extension3'].identify = Mock( side_effect=IgnoreLocationException()) with pytest.raises(IgnoreLocationException): _identify(extensions, desc_path_only) # raise exception extensions = get_package_identification_extensions()[90] extensions['extension3'].identify = Mock( side_effect=RuntimeError('custom exception')) with patch('colcon_core.package_identification.logger.error') as error: desc = _identify(extensions, desc_path_only) assert desc is None # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package identification extension 'extension3' " "in '/some/path': custom exception\n".replace('/', os.sep)) colcon-core-0.17.1/test/test_package_identification_ignore.py000066400000000000000000000014621465053734400244200ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import Mock from colcon_core.package_identification import IgnoreLocationException from colcon_core.package_identification.ignore import IGNORE_MARKER from colcon_core.package_identification.ignore \ import IgnorePackageIdentification import pytest def test_identify(): extension = IgnorePackageIdentification() metadata = Mock() with TemporaryDirectory(prefix='test_colcon_') as basepath: metadata.path = Path(basepath) assert extension.identify(metadata) is None (metadata.path / IGNORE_MARKER).write_text('') with pytest.raises(IgnoreLocationException): extension.identify(metadata) colcon-core-0.17.1/test/test_package_identification_python.py000066400000000000000000000134761465053734400244660ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path from tempfile import TemporaryDirectory from colcon_core.package_augmentation.python \ import create_dependency_descriptor from colcon_core.package_augmentation.python \ import PythonPackageAugmentation from colcon_core.package_descriptor import PackageDescriptor from colcon_core.package_identification.python \ import PythonPackageIdentification import pytest def test_identify(): extension = PythonPackageIdentification() augmentation_extension = PythonPackageAugmentation() with TemporaryDirectory(prefix='test_colcon_') as basepath: desc = PackageDescriptor(basepath) desc.type = 'other' assert extension.identify(desc) is None assert desc.name is None desc.type = None assert extension.identify(desc) is None assert desc.name is None assert desc.type is None basepath = Path(basepath) (basepath / 'setup.py').write_text('setup()') assert extension.identify(desc) is None assert desc.name is None assert desc.type is None (basepath / 'setup.cfg').write_text('') assert extension.identify(desc) is None assert desc.name is None assert desc.type is None (basepath / 'setup.cfg').write_text( '[metadata]\n' 'name = pkg-name\n') assert extension.identify(desc) is None assert desc.name == 'pkg-name' assert desc.type == 'python' assert not desc.dependencies assert not desc.metadata augmentation_extension.augment_package(desc) assert set(desc.dependencies.keys()) == {'build', 'run', 'test'} assert not desc.dependencies['build'] assert not desc.dependencies['run'] assert not desc.dependencies['test'] desc = PackageDescriptor(basepath) desc.name = 'other-name' with pytest.raises(RuntimeError) as e: extension.identify(desc) assert str(e.value).endswith( 'Package name already set to different value') (basepath / 'setup.cfg').write_text( '[metadata]\n' 'name = other-name\n' 'maintainer = Foo Bar\n' 'maintainer_email = foobar@example.com\n' '[options]\n' 'setup_requires =\n' " build; sys_platform != 'win32'\n" " build-windows; sys_platform == 'win32'\n" 'install_requires =\n' ' runA > 1.2.3\n' ' runB\n' 'tests_require = test == 2.0.0\n' 'zip_safe = false\n' '[options.extras_require]\n' 'test = test2 == 3.0.0\n' 'tests = test3\n' 'testing = test4\n' 'other = not-test\n') assert extension.identify(desc) is None assert desc.name == 'other-name' assert desc.type == 'python' assert not desc.dependencies assert not desc.metadata augmentation_extension.augment_package(desc) assert set(desc.dependencies.keys()) == {'build', 'run', 'test'} assert desc.dependencies['build'] == {'build', 'build-windows'} assert desc.dependencies['run'] == {'runA', 'runB'} dep = next(x for x in desc.dependencies['run'] if x == 'runA') assert dep.metadata['version_gt'] == '1.2.3' assert desc.dependencies['test'] == {'test', 'test2', 'test3', 'test4'} assert callable(desc.metadata['get_python_setup_options']) options = desc.metadata['get_python_setup_options'](None) assert 'zip_safe' in options assert desc.metadata['maintainers'] == ['Foo Bar '] desc = PackageDescriptor(basepath) desc.name = 'other-name' (basepath / 'setup.cfg').write_text( '[metadata]\n' 'name = other-name\n' 'author = Baz Qux\n' 'author_email = bazqux@example.com\n') extension.identify(desc) augmentation_extension.augment_package(desc) assert desc.metadata['maintainers'] == ['Baz Qux '] def test_create_dependency_descriptor(): eq_str = 'pkgname==2.2.0' dep = create_dependency_descriptor(eq_str) assert dep.metadata['version_eq'] == '2.2.0' lt_str = 'pkgname<2.3.0' dep = create_dependency_descriptor(lt_str) assert dep.metadata['version_lt'] == '2.3.0' lte_str = 'pkgname<=2.2.0' dep = create_dependency_descriptor(lte_str) assert dep.metadata['version_lte'] == '2.2.0' gt_str = 'pkgname>2.3.0' dep = create_dependency_descriptor(gt_str) assert dep.metadata['version_gt'] == '2.3.0' gte_str = 'pkgname>=2.2.0' dep = create_dependency_descriptor(gte_str) assert dep.metadata['version_gte'] == '2.2.0' neq_str = 'pkgname!=1.2.1' dep = create_dependency_descriptor(neq_str) assert dep.metadata['version_neq'] == '1.2.1' compat_str = 'pkgname~=1.4.1a4' dep = create_dependency_descriptor(compat_str) assert dep.metadata['version_gte'] == '1.4.1a4' assert dep.metadata['version_lt'] == '1.5' compat_str = 'pkgname~=1.4.1' dep = create_dependency_descriptor(compat_str) assert dep.metadata['version_gte'] == '1.4.1' assert dep.metadata['version_lt'] == '1.5' compat_str = 'pkgname~=1.4.1.4' dep = create_dependency_descriptor(compat_str) assert dep.metadata['version_gte'] == '1.4.1.4' assert dep.metadata['version_lt'] == '1.4.2' compat_str = 'pkgname~=11.12' dep = create_dependency_descriptor(compat_str) assert dep.metadata['version_gte'] == '11.12' assert dep.metadata['version_lt'] == '12.0' multi_str = 'pkgname<=3.2.0, >=2.2.0' dep = create_dependency_descriptor(multi_str) assert dep.metadata['version_gte'] == '2.2.0' assert dep.metadata['version_lte'] == '3.2.0' colcon-core-0.17.1/test/test_package_selection.py000066400000000000000000000171021465053734400220470ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from argparse import Namespace import os from unittest.mock import Mock from unittest.mock import patch from colcon_core.package_descriptor import PackageDescriptor from colcon_core.package_selection import _add_package_selection_arguments from colcon_core.package_selection import _check_package_selection_parameters from colcon_core.package_selection import add_arguments from colcon_core.package_selection import get_package_selection_extensions from colcon_core.package_selection import get_packages from colcon_core.package_selection import PackageSelectionExtensionPoint from colcon_core.package_selection import select_package_decorators import pytest from .extension_point_context import ExtensionPointContext class Extension1(PackageSelectionExtensionPoint): pass class Extension2(PackageSelectionExtensionPoint): pass def test_get_package_selection_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_selection_extensions() assert ['extension1', 'extension2'] == list(extensions.keys()) def add_dummy_arguments(parser): parser.add_argument('arg') def test__add_package_selection_arguments(): parser = Mock() with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_selection_extensions() # invalid return value extensions['extension1'].add_arguments = Mock(return_value=True) with patch( 'colcon_core.package_selection.add_package_discovery_arguments' ) as add_package_discovery_arguments: with patch('colcon_core.package_selection.logger.error') as error: add_arguments(parser) assert add_package_discovery_arguments.call_count == 1 # the raised assertion is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': " 'add_arguments() should return None\n') # raise exception extensions['extension1'].add_arguments = Mock( side_effect=RuntimeError('custom exception')) # check that arguments can be added extensions['extension2'].add_arguments = Mock( side_effect=add_dummy_arguments) with patch('colcon_core.package_selection.logger.error') as error: _add_package_selection_arguments(parser) assert extensions['extension2'].add_arguments.call_count == 1 # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': " 'custom exception\n') def test_get_packages(): args = Namespace() d1 = PackageDescriptor('/some/path') d1.name = 'one' d2 = PackageDescriptor('/other/path') d2.name = 'two' with patch( 'colcon_core.package_selection.discover_packages', return_value=[d1, d2] ): decos = get_packages(args) assert len(decos) == 2 assert decos[0].descriptor.name == 'one' assert decos[0].selected is True assert decos[1].descriptor.name == 'two' assert decos[1].selected is True d2.name = 'one' with patch( 'colcon_core.package_selection.discover_packages', return_value=[d1, d2] ): with pytest.raises(RuntimeError) as e: get_packages(args) assert 'Duplicate package names not supported:' in str(e.value) assert '- one:' in str(e.value) assert '- {sep}some{sep}path'.format(sep=os.sep) in str(e.value) assert '- {sep}other{sep}path'.format(sep=os.sep) in str(e.value) def test__check_package_selection_parameters(): args = Mock() pkg_names = Mock() with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_selection_extensions() # nothing wrong with the arguments _check_package_selection_parameters(args, pkg_names) # raise exception extensions['extension1'].check_parameters = Mock( side_effect=RuntimeError('custom exception')) extensions['extension2'].check_parameters = Mock(return_value=None) with patch('colcon_core.package_selection.logger.error') as error: _check_package_selection_parameters(args, pkg_names) assert extensions['extension2'].check_parameters.call_count == 1 # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': custom " 'exception\n') # invalid return value extensions['extension1'].check_parameters = Mock(return_value=True) extensions['extension2'].check_parameters.reset_mock() with patch('colcon_core.package_selection.logger.error') as error: _check_package_selection_parameters(args, pkg_names) assert extensions['extension2'].check_parameters.call_count == 1 # the raised assertion is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': " 'check_parameters() should return None\n') # select some packages extensions['extension1'].check_parameters = Mock( side_effect=SystemExit(1)) with pytest.raises(SystemExit): _check_package_selection_parameters(args, pkg_names) def select_some_packages(*, args, decorators): for i, decorator in enumerate(decorators): decorator.selected = bool(i % 2) def test_select_package_decorators(): args = Mock() deco1 = Mock() deco1.selected = True deco2 = Mock() deco2.selected = True decos = [deco1, deco2] with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_package_selection_extensions() # raise exception extensions['extension2'].select_packages = Mock(return_value=None) with patch('colcon_core.package_selection.logger.error') as error: select_package_decorators(args, decos) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': \n") # invalid return value extensions['extension1'].select_packages = Mock(return_value=True) with patch('colcon_core.package_selection.logger.error') as error: select_package_decorators(args, decos) # the raised assertion is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in package selection extension 'extension1': " 'select_packages() should return None\n') # select some packages extensions['extension1'].select_packages = Mock( side_effect=select_some_packages) select_package_decorators(args, decos) assert not deco1.selected assert deco2.selected colcon-core-0.17.1/test/test_plugin_system.py000066400000000000000000000135141465053734400213140ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from unittest.mock import patch from colcon_core.plugin_system import get_first_line_doc from colcon_core.plugin_system import instantiate_extensions from colcon_core.plugin_system import order_extensions_by_name from colcon_core.plugin_system import order_extensions_by_priority from colcon_core.plugin_system import order_extensions_grouped_by_priority from colcon_core.plugin_system import satisfies_version from colcon_core.plugin_system import SkipExtensionException import pytest from .extension_point_context import ExtensionPointContext def test_instantiate_extensions(): class Extension1: pass class Extension2: pass with ExtensionPointContext(extension1=Extension1, extension2=Extension2): # successful instantiation of extensions extensions = instantiate_extensions('group') assert 'extension1' in extensions.keys() assert isinstance(extensions['extension1'], Extension1) assert 'extension2' in extensions.keys() assert isinstance(extensions['extension2'], Extension2) # unique extension instances unique_extensions = instantiate_extensions( 'group', unique_instance=True) assert 'extension1' in unique_extensions.keys() assert isinstance(unique_extensions['extension1'], Extension1) assert extensions['extension1'] != unique_extensions['extension1'] # exclude extension names extensions = instantiate_extensions( 'group', exclude_names=['extension1']) assert 'extension1' not in extensions.keys() assert 'extension2' in extensions.keys() def test_instantiate_extensions_exception(): class ExtensionRaisingException: def __init__(self): raise Exception('extension raising exception') class ExtensionSkipExtensionException: def __init__(self): raise SkipExtensionException( 'extension raising skip extension exception') with ExtensionPointContext( exception=ExtensionRaisingException, skip_extension_exception=ExtensionSkipExtensionException ): with patch('colcon_core.plugin_system.logger.error') as error: with patch('colcon_core.plugin_system.logger.info') as info: extensions = instantiate_extensions('group') # the entry point raising an exception different than a skip # extension exception results in an error message in the log assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert "Exception instantiating extension 'group.exception'" \ in error.call_args[0][0] assert 'extension raising exception' in error.call_args[0][0] # the entry point raising a skip extension exception results in # an info message in the log assert info.call_count == 1 assert len(info.call_args[0]) == 1 assert "Skipping extension 'group.skip_extension_exception'" \ in info.call_args[0][0] assert 'extension raising skip extension exception' \ in info.call_args[0][0] # neither of the entry points was loaded successfully assert extensions == {} class ExtensionA: PRIORITY = 100 class ExtensionB: PRIORITY = 100 class ExtensionC: PRIORITY = 110 def test_order_extensions_by_name(): with ExtensionPointContext(foo=ExtensionA, bar=ExtensionB, baz=ExtensionC): extensions = instantiate_extensions('group') # ensure correct order based on name ordered_extensions = order_extensions_by_name(extensions) assert list(ordered_extensions.keys()) == ['bar', 'baz', 'foo'] def test_order_extensions_by_priority(): with ExtensionPointContext(foo=ExtensionA, bar=ExtensionB, baz=ExtensionC): extensions = instantiate_extensions('group') # ensure correct order based on priority ordered_extensions = order_extensions_by_priority(extensions) assert list(ordered_extensions.keys()) == ['baz', 'bar', 'foo'] def test_order_extensions_grouped_by_priority(): with ExtensionPointContext(foo=ExtensionA, bar=ExtensionB, baz=ExtensionC): extensions = instantiate_extensions('group') # ensure correct order based on priority grouped_extensions = order_extensions_grouped_by_priority(extensions) assert list(grouped_extensions.keys()) == [110, 100] # ensure correct order in each priority group based on name assert list(grouped_extensions[110].keys()) == ['baz'] assert list(grouped_extensions[100].keys()) == ['bar', 'foo'] def test_get_first_line_doc(): def single_line_doc(): """Single line.""" assert get_first_line_doc(single_line_doc) == 'Single line' def multi_line_doc(): # noqa: D400 """ First line. Second line. """ assert get_first_line_doc(multi_line_doc) == 'First line' def no_doc(): pass # pragma: no cover assert get_first_line_doc(no_doc) == '' def whitespace_doc(): """ """ # noqa: D419 assert get_first_line_doc(whitespace_doc) == '' def empty_lines_doc(): """ """ # noqa: D419 assert get_first_line_doc(empty_lines_doc) == '' def test_satisfies_version(): satisfies_version('1.2.3', '^1') satisfies_version('1.2.3', '^1.1') with pytest.raises(RuntimeError) as e: satisfies_version('1.0.3', '^1.1') assert 'too old' in str(e.value) with pytest.raises(RuntimeError) as e: satisfies_version('2.0.0', '^1.2') assert 'newer' in str(e.value) # different semantic for version numbers before 1.0 with pytest.raises(RuntimeError) as e: satisfies_version('0.2.3', '^0.1') assert 'newer' in str(e.value) colcon-core-0.17.1/test/test_prefix_path.py000066400000000000000000000102671465053734400207250ustar00rootroot00000000000000# Copyright 2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.prefix_path import get_chained_prefix_path from colcon_core.prefix_path import get_prefix_path_extensions from colcon_core.prefix_path import PrefixPathExtensionPoint from colcon_core.prefix_path.colcon import ColconPrefixPath import pytest from .environment_context import EnvironmentContext from .extension_point_context import ExtensionPointContext class Extension1(PrefixPathExtensionPoint): PRIORITY = 90 class Extension2(PrefixPathExtensionPoint): pass def test_extension_interface(): extension = Extension1() with pytest.raises(NotImplementedError): extension.extend_prefix_path(None) def test_get_prefix_path_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_prefix_path_extensions() assert list(extensions.keys()) == [100, 90] assert list(extensions[100].keys()) == ['extension2'] assert list(extensions[90].keys()) == ['extension1'] def test_get_chained_prefix_path(): ColconPrefixPath.PREFIX_PATH_NAME = 'colcon' with patch( 'colcon_core.prefix_path.get_prefix_path_extensions', return_value={ 100: {'colcon': ColconPrefixPath()}, } ): # empty environment variable with EnvironmentContext(COLCON_PREFIX_PATH=''): prefix_path = get_chained_prefix_path() assert prefix_path == [] # extra path separator with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep): prefix_path = get_chained_prefix_path(skip='/path/to/skip') assert prefix_path == [] with TemporaryDirectory(prefix='test_colcon_') as basepath: basepath = Path(basepath) with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep.join( [str(basepath), str(basepath)] )): # multiple results, duplicates being skipped prefix_path = get_chained_prefix_path(skip='/path/to/skip') assert prefix_path == [str(basepath)] # skipping results prefix_path = get_chained_prefix_path(skip=str(basepath)) assert prefix_path == [] # skipping non-existing results with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep.join( [str(basepath), str(basepath / 'non-existing-sub')] )): with patch( 'colcon_core.prefix_path.colcon.logger.warning' ) as warn: prefix_path = get_chained_prefix_path() assert prefix_path == [str(basepath)] assert warn.call_count == 1 assert len(warn.call_args[0]) == 1 assert warn.call_args[0][0].endswith( "non-existing-sub' in the environment variable " "COLCON_PREFIX_PATH doesn't exist") # suppress duplicate warning with patch( 'colcon_core.prefix_path.colcon.logger.warning' ) as warn: prefix_path = get_chained_prefix_path() assert prefix_path == [str(basepath)] assert warn.call_count == 0 with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_prefix_path_extensions() # one invalid return value, one not implemented extensions[100]['extension2'].extend_prefix_path = Mock( return_value=False) extensions[90]['extension1'].extend_prefix_path = Mock( return_value=None) with patch('colcon_core.prefix_path.logger.error') as error: get_chained_prefix_path() # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args_list[0][0]) == 1 assert error.call_args_list[0][0][0].startswith( "Exception in prefix path extension 'extension2': " 'extend_prefix_path() should return None\n') colcon-core-0.17.1/test/test_shell.py000066400000000000000000000464551465053734400175330ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from collections import OrderedDict import os from pathlib import Path import sys from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.plugin_system import SkipExtensionException from colcon_core.shell import check_dependency_availability from colcon_core.shell import create_environment_hook from colcon_core.shell import find_installed_packages from colcon_core.shell import find_installed_packages_in_environment from colcon_core.shell import FindInstalledPackagesExtensionPoint from colcon_core.shell import get_colcon_prefix_path from colcon_core.shell import get_command_environment from colcon_core.shell import get_environment_variables from colcon_core.shell import get_find_installed_packages_extensions from colcon_core.shell import get_shell_extensions from colcon_core.shell import ShellExtensionPoint from colcon_core.shell.installed_packages import IsolatedInstalledPackageFinder from colcon_core.shell.installed_packages import MergedInstalledPackageFinder import pytest from .environment_context import EnvironmentContext from .extension_point_context import ExtensionPointContext from .run_until_complete import run_until_complete class Extension1(ShellExtensionPoint): PRIORITY = 90 class Extension2(ShellExtensionPoint): pass def test_extension_interface(): extension = Extension1() with pytest.raises(NotImplementedError): extension.create_prefix_script(None, None) with pytest.raises(NotImplementedError): extension.create_package_script(None, None, None) with pytest.raises(NotImplementedError): extension.create_hook_set_value(None, None, None, None, None) with pytest.raises(NotImplementedError): extension.create_hook_append_value(None, None, None, None, None) with pytest.raises(NotImplementedError): extension.create_hook_prepend_value(None, None, None, None, None) with pytest.raises(NotImplementedError): extension.create_hook_include_file(None, None, None, None) coroutine = extension.generate_command_environment(None, None, None) with pytest.raises(NotImplementedError): run_until_complete(coroutine) def test_get_shell_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_shell_extensions() assert list(extensions.keys()) == [100, 90] assert list(extensions[100].keys()) == ['extension2'] assert list(extensions[90].keys()) == ['extension1'] async def generate_command_environment(task_name, build_base, dependencies): return {'key': 'value'} def test_get_command_environment(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_shell_extensions() # one not implemented, one skipped extension extensions[90]['extension1'].generate_command_environment = Mock( side_effect=SkipExtensionException()) coroutine = get_command_environment(None, '/build/base', None) with patch('colcon_core.shell.logger.debug') as debug: with patch('colcon_core.shell.logger.info') as info: with pytest.raises(RuntimeError) as e: run_until_complete(coroutine) assert 'Could not find a shell extension for the command environment' \ in str(e.value) assert extensions[90]['extension1'].generate_command_environment \ .call_count == 1 # the raised exceptions are catched and result in a debug/info message assert debug.call_count == 1 assert len(debug.call_args[0]) == 1 assert debug.call_args[0][0] == \ "Skip shell extension 'extension2' for command environment" assert info.call_count == 1 assert len(info.call_args[0]) == 1 assert info.call_args[0][0].startswith( "Skip shell extension 'extension1' for command environment: ") # raise runtime error extensions[100]['extension2'].generate_command_environment = Mock( side_effect=RuntimeError('custom exception')) extensions[90]['extension1'].generate_command_environment.reset_mock() coroutine = get_command_environment(None, '/build/base', None) with pytest.raises(RuntimeError) as e: run_until_complete(coroutine) assert str(e.value) == 'custom exception' assert extensions[90]['extension1'].generate_command_environment \ .call_count == 0 # one exception, one successful extensions[100]['extension2'].generate_command_environment = Mock( side_effect=Exception('custom exception')) extensions[90]['extension1'].generate_command_environment = Mock( side_effect=generate_command_environment) coroutine = get_command_environment(None, '/build/base', None) with patch('colcon_core.shell.logger.error') as error: env = run_until_complete(coroutine) assert env == {'key': 'value'} # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in shell extension 'extension2': custom exception\n") def test_get_environment_variables(): cmd = [ sys.executable, '-c', r'print("FOO\nNAME=value\n\nSOMETHING\nNAME2=value with spaces")'] coroutine = get_environment_variables(cmd, shell=False) env = run_until_complete(coroutine) assert len(env.keys()) == 2 assert 'NAME' in env.keys() assert env['NAME'] == 'value\nSOMETHING' assert 'NAME2' in env.keys() assert env['NAME2'] == 'value with spaces' # test with environment strings which isn't decodable async def check_output(cmd, **kwargs): return b'DECODE_ERROR=\x81\nNAME=value' with patch('colcon_core.shell.check_output', side_effect=check_output): with patch('colcon_core.shell.logger.warning') as warn: coroutine = get_environment_variables(['not-used'], shell=False) env = run_until_complete(coroutine) assert len(env.keys()) == 1 assert 'NAME' in env.keys() assert env['NAME'] == 'value' # the raised decode error is catched and results in a warning message assert warn.call_count == 1 assert len(warn.call_args[0]) == 1 assert warn.call_args[0][0].startswith( "Failed to decode line from the environment using the encoding '") assert 'DECODE_ERROR=' in warn.call_args[0][0] class Extension3(ShellExtensionPoint): PRIORITY = 105 class Extension4(ShellExtensionPoint): PRIORITY = 101 class Extension5(ShellExtensionPoint): PRIORITY = 110 def test_create_environment_hook(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): # no primary shell extension with pytest.raises(RuntimeError) as e: create_environment_hook(None, None, None, None, None) assert str(e.value).endswith( 'Could not find a primary shell extension for creating an ' 'environment hook') with ExtensionPointContext( extension3=Extension3, extension4=Extension4, extension5=Extension5 ): extensions = get_shell_extensions() # append: one invalid, two valid return values extensions[105]['extension3'].create_hook_append_value = Mock() extensions[101]['extension4'].create_hook_append_value = Mock( return_value=Path('/some/path/sub/hookA')) extensions[110]['extension5'].create_hook_append_value = Mock( return_value=Path('/some/path/sub/hookB')) with patch('colcon_core.shell.logger.error') as error: hooks = create_environment_hook( None, None, None, None, None, mode='append') assert len(hooks) == 2 assert str(hooks[0]) == '/some/path/sub/hookB'.replace('/', os.sep) assert str(hooks[1]) == '/some/path/sub/hookA'.replace('/', os.sep) # the raised exception is caught and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in shell extension 'extension3': " 'create_hook_append_value() should return a Path object') # prepend: one invalid, two valid return values extensions[105]['extension3'].create_hook_prepend_value = Mock() extensions[101]['extension4'].create_hook_prepend_value = Mock( return_value=Path('/some/path/sub/hookA')) extensions[110]['extension5'].create_hook_prepend_value = Mock( return_value=Path('/some/path/sub/hookB')) with patch('colcon_core.shell.logger.error') as error: hooks = create_environment_hook(None, None, None, None, None) assert len(hooks) == 2 assert str(hooks[0]) == '/some/path/sub/hookB'.replace('/', os.sep) assert str(hooks[1]) == '/some/path/sub/hookA'.replace('/', os.sep) # the raised exception is caught and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in shell extension 'extension3': " 'create_hook_prepend_value() should return a Path object') # invalid mode with pytest.raises(NotImplementedError): create_environment_hook( None, None, None, None, None, mode='invalid') def test_get_colcon_prefix_path(): # ignore deprecation warning with patch('colcon_core.shell.warnings.warn') as warn: # empty environment variable with EnvironmentContext(COLCON_PREFIX_PATH=''): prefix_path = get_colcon_prefix_path() assert prefix_path == [] # extra path separator with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep): prefix_path = get_colcon_prefix_path(skip='/path/to/skip') assert prefix_path == [] with TemporaryDirectory(prefix='test_colcon_') as basepath: basepath = Path(basepath) with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep.join( [str(basepath), str(basepath)] )): # multiple results prefix_path = get_colcon_prefix_path(skip='/path/to/skip') assert prefix_path == [str(basepath), str(basepath)] # skipping results prefix_path = get_colcon_prefix_path(skip=str(basepath)) assert prefix_path == [] # skipping non-existing results with EnvironmentContext(COLCON_PREFIX_PATH=os.pathsep.join( [str(basepath), str(basepath / 'non-existing-sub')] )): with patch('colcon_core.shell.logger.warning') as warn: prefix_path = get_colcon_prefix_path() assert prefix_path == [str(basepath)] assert warn.call_count == 1 assert len(warn.call_args[0]) == 1 assert warn.call_args[0][0].endswith( "non-existing-sub' in the environment variable " "COLCON_PREFIX_PATH doesn't exist") # suppress duplicate warning with patch('colcon_core.shell.logger.warning') as warn: prefix_path = get_colcon_prefix_path() assert prefix_path == [str(basepath)] assert warn.call_count == 0 def test_check_dependency_availability(): with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) dependencies = OrderedDict() dependencies['pkgA'] = prefix_path # missing package with pytest.raises(RuntimeError) as e: check_dependency_availability( dependencies, script_filename='package.ext') assert len(dependencies) == 1 assert 'Failed to find the following files:' in str(e.value) assert str(prefix_path / 'share' / 'pkgA' / 'package.ext') \ in str(e.value) assert 'Check that the following packages have been built:' \ in str(e.value) assert '- pkgA' in str(e.value) # package in workspace (prefix_path / 'share' / 'pkgA').mkdir(parents=True) (prefix_path / 'share' / 'pkgA' / 'package.ext').write_text('') check_dependency_availability( dependencies, script_filename='package.ext') assert len(dependencies) == 1 # package in environment dependencies['pkgA'] = prefix_path / 'invalid' with patch( 'colcon_core.shell.find_installed_packages_in_environment', side_effect=lambda: {'pkgA': prefix_path / 'env'} ): with patch('colcon_core.shell.logger.warning') as warn: check_dependency_availability( dependencies, script_filename='package.ext') assert len(dependencies) == 0 assert warn.call_count == 1 assert len(warn.call_args[0]) == 1 assert warn.call_args[0][0].startswith( "The following packages are in the workspace but haven't been " 'built:') assert '- pkgA' in warn.call_args[0][0] assert 'They are being used from the following locations instead:' \ in warn.call_args[0][0] assert str(prefix_path / 'env') in warn.call_args[0][0] assert '--packages-ignore pkgA' in warn.call_args[0][0] class FIExtension1(FindInstalledPackagesExtensionPoint): PRIORITY = 90 class FIExtension2(FindInstalledPackagesExtensionPoint): pass def test_get_find_installed_packages_extensions(): with ExtensionPointContext( extension1=FIExtension1, extension2=FIExtension2 ): extensions = get_find_installed_packages_extensions() assert list(extensions.keys()) == [100, 90] assert list(extensions[100].keys()) == ['extension2'] assert list(extensions[90].keys()) == ['extension1'] def test_find_installed_packages_extension_not_implemented(): with pytest.raises(NotImplementedError): FindInstalledPackagesExtensionPoint().find_installed_packages(Path()) def test_find_installed_packages_in_environment(): with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) prefix_path1 = prefix_path / 'one' prefix_path2 = prefix_path / 'two' with patch( 'colcon_core.shell.get_chained_prefix_path', return_value=[prefix_path1, prefix_path2] ): # not used prefixes result debug messages with patch('colcon_core.shell.logger.debug') as debug: find_installed_packages_in_environment() assert debug.call_count == 2 # the package is picked up from the first prefix with patch( 'colcon_core.shell.find_installed_packages', side_effect=lambda p: {'pkgA': p} ): packages = find_installed_packages_in_environment() assert len(packages) == 1 assert 'pkgA' in packages assert packages['pkgA'] == prefix_path1 def test_find_installed_packages(): with ExtensionPointContext( colcon_isolated=IsolatedInstalledPackageFinder, colcon_merged=MergedInstalledPackageFinder ): with TemporaryDirectory(prefix='test_colcon_') as install_base: install_base = Path(install_base) # install base doesn't exist assert find_installed_packages(install_base) is None # unknown install layout marker_file = install_base / '.colcon_install_layout' marker_file.write_text('unknown') assert find_installed_packages(install_base) is None # package index directory doesn't exist marker_file.write_text('merged') packages = find_installed_packages(install_base) assert len(packages) == 0 with patch( 'colcon_core.shell.installed_packages' '.get_relative_package_index_path', return_value=Path('relative/package/index') ) as rel_path: # setup for isolated case (install_base / 'dummy_file').write_text('') (install_base / '.hidden_dir').mkdir() (install_base / 'dummy_dir' / rel_path() / 'dummy_dir').mkdir( parents=True) (install_base / 'pkgA' / rel_path()).mkdir(parents=True) (install_base / 'pkgA' / rel_path() / 'pkgA').write_text('') # setup for merged case (install_base / rel_path() / 'dummy_dir').mkdir(parents=True) (install_base / rel_path() / '.dummy').write_text('') (install_base / rel_path() / 'pkgB').write_text('') (install_base / rel_path() / 'pkgC').write_text('') marker_file.write_text('isolated') packages = find_installed_packages(install_base) assert len(packages) == 1 assert 'pkgA' in packages.keys() assert packages['pkgA'] == install_base / 'pkgA' marker_file.write_text('merged') packages = find_installed_packages(install_base) assert len(packages) == 2 assert 'pkgB' in packages.keys() assert packages['pkgC'] == install_base assert 'pkgC' in packages.keys() assert packages['pkgB'] == install_base class FIExtensionPathNotExist(FindInstalledPackagesExtensionPoint): def find_installed_packages(self, install_base: Path): return {'pkgA': Path('/does/not/exist')} def test_inconsistent_package_finding_extensions(): with ExtensionPointContext(dne=FIExtensionPathNotExist): with TemporaryDirectory(prefix='test_colcon_') as install_base: install_base = Path(install_base) with patch('colcon_core.shell.logger.warning') as mock_warn: assert {} == find_installed_packages(install_base) dne_path = Path('/does/not/exist') mock_warn.assert_called_once_with( "Ignoring 'pkgA' found at '{0}'" ' because the path does not exist.'.format(dne_path)) def test_find_package_two_locations(): with TemporaryDirectory(prefix='test_colcon_') as base: base = Path(base) location1 = base / 'pkgA' location2 = base / 'pkgB' location1.mkdir() location2.mkdir() class PackageLocation1(FindInstalledPackagesExtensionPoint): def find_installed_packages(self, base: Path): return {'pkgA': location1} class PackageLocation2(FindInstalledPackagesExtensionPoint): def find_installed_packages(self, base: Path): return {'pkgA': location2} with ExtensionPointContext( loc1=PackageLocation1, loc2=PackageLocation2 ): with patch('colcon_core.shell.logger.warning') as mock_warn: assert {'pkgA': location1} == find_installed_packages(base) mock_warn.assert_called_once_with( "The package 'pkgA' previously found at" f" '{location1}' was found again at '{location2}'." f" Ignoring '{location2}'") colcon-core-0.17.1/test/test_shell_bat.py000066400000000000000000000066261465053734400203550ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path import sys from tempfile import TemporaryDirectory from colcon_core import shell from colcon_core.plugin_system import SkipExtensionException from colcon_core.shell.bat import BatShell import pytest from .run_until_complete import run_until_complete def test_extension(): use_all_shell_extensions = shell.use_all_shell_extensions shell.use_all_shell_extensions = True try: with TemporaryDirectory(prefix='test_colcon_') as prefix_path: _test_extension(Path(prefix_path)) finally: shell.use_all_shell_extensions = use_all_shell_extensions if sys.platform != 'win32': shell.use_all_shell_extensions = False try: with pytest.raises(SkipExtensionException): BatShell() finally: shell.use_all_shell_extensions = use_all_shell_extensions def _test_extension(prefix_path): extension = BatShell() # create_prefix_script extension.create_prefix_script(prefix_path, False) assert (prefix_path / 'local_setup.bat').exists() # create_package_script extension.create_package_script( prefix_path, 'pkg_name', [ ('hookA.bat', '/some/path/hookA.bat'), ('hookB.other', '/some/path/hookB.other')]) assert (prefix_path / 'share' / 'pkg_name' / 'package.bat').exists() content = (prefix_path / 'share' / 'pkg_name' / 'package.bat').read_text() assert 'hookA' in content assert 'hookB' not in content # create_hook_append_value hook_path = extension.create_hook_append_value( 'append_env_hook_name', prefix_path, 'pkg_name', 'APPEND_NAME', 'append_subdirectory') assert hook_path.exists() assert hook_path.name == 'append_env_hook_name.bat' content = hook_path.read_text() assert 'APPEND_NAME' in content # create_hook_prepend_value hook_path = extension.create_hook_prepend_value( 'env_hook_name', prefix_path, 'pkg_name', 'NAME', 'subdirectory') assert hook_path.exists() assert hook_path.name == 'env_hook_name.bat' content = hook_path.read_text() assert 'NAME' in content # generate_command_environment if sys.platform != 'win32': with pytest.raises(SkipExtensionException) as e: coroutine = extension.generate_command_environment( 'task_name', prefix_path, {}) run_until_complete(coroutine) assert str(e.value).endswith('Not usable on non-Windows systems') else: # dependency script missing with pytest.raises(RuntimeError) as e: coroutine = extension.generate_command_environment( 'task_name', prefix_path, {'dep': str(prefix_path)}) run_until_complete(coroutine) assert str(e.value) == ( 'Failed to find the following files:\n' f'- {prefix_path}\\share\\dep\\package.bat\n' 'Check that the following packages have been built:\n' '- dep') # dependency script exists dep_script = prefix_path / 'share' / 'dep' / 'package.bat' os.makedirs(str(dep_script.parent)) dep_script.write_text('') coroutine = extension.generate_command_environment( 'task_name', prefix_path, {'dep': str(prefix_path)}) env = run_until_complete(coroutine) assert isinstance(env, dict) colcon-core-0.17.1/test/test_shell_sh.py000066400000000000000000000067001465053734400202120ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path import sys from tempfile import TemporaryDirectory from colcon_core import shell from colcon_core.plugin_system import SkipExtensionException from colcon_core.shell.sh import ShShell import pytest from .run_until_complete import run_until_complete def test_extension(): use_all_shell_extensions = shell.use_all_shell_extensions shell.use_all_shell_extensions = True try: with TemporaryDirectory(prefix='test_colcon_') as prefix_path: _test_extension(Path(prefix_path)) finally: shell.use_all_shell_extensions = use_all_shell_extensions if sys.platform == 'win32': shell.use_all_shell_extensions = False try: with pytest.raises(SkipExtensionException): ShShell() finally: shell.use_all_shell_extensions = use_all_shell_extensions def _test_extension(prefix_path): extension = ShShell() # create_prefix_script extension.create_prefix_script(prefix_path, False) assert (prefix_path / 'local_setup.sh').exists() assert (prefix_path / '_local_setup_util_sh.py').exists() # create_package_script extension.create_package_script( prefix_path, 'pkg_name', [ ('hookA.sh', '/some/path/hookA.sh'), ('hookB.other', '/some/path/hookB.other')]) assert (prefix_path / 'share' / 'pkg_name' / 'package.sh').exists() content = (prefix_path / 'share' / 'pkg_name' / 'package.sh').read_text() assert 'hookA' in content assert 'hookB' not in content # create_hook_append_value hook_path = extension.create_hook_append_value( 'append_env_hook_name', prefix_path, 'pkg_name', 'APPEND_NAME', 'append_subdirectory') assert hook_path.exists() assert hook_path.name == 'append_env_hook_name.sh' content = hook_path.read_text() assert 'APPEND_NAME' in content # create_hook_prepend_value hook_path = extension.create_hook_prepend_value( 'env_hook_name', prefix_path, 'pkg_name', 'NAME', 'subdirectory') assert hook_path.exists() assert hook_path.name == 'env_hook_name.sh' content = hook_path.read_text() assert 'NAME' in content # generate_command_environment if sys.platform == 'win32': with pytest.raises(SkipExtensionException) as e: coroutine = extension.generate_command_environment( 'task_name', prefix_path, {}) run_until_complete(coroutine) assert str(e.value).endswith('Not usable on Windows systems') else: # dependency script missing with pytest.raises(RuntimeError) as e: coroutine = extension.generate_command_environment( 'task_name', prefix_path, {'dep': str(prefix_path)}) run_until_complete(coroutine) assert str(e.value) == ( 'Failed to find the following files:\n' f'- {prefix_path}/share/dep/package.sh\n' 'Check that the following packages have been built:\n' '- dep') # dependency script exists dep_script = prefix_path / 'share' / 'dep' / 'package.sh' os.makedirs(str(dep_script.parent)) dep_script.write_text('') coroutine = extension.generate_command_environment( 'task_name', prefix_path, {'dep': str(prefix_path)}) env = run_until_complete(coroutine) assert isinstance(env, dict) colcon-core-0.17.1/test/test_shell_template.py000066400000000000000000000041141465053734400214100ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path import sys from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.shell.template import expand_template from em import TransientParseError import pytest def test_expand_template(): with TemporaryDirectory(prefix='test_colcon_') as base_path: template_path = Path(base_path) / 'template.em' destination_path = Path(base_path) / 'expanded_template' # invalid template, missing @[end if] template_path.write_text( '@[if True]') with pytest.raises(TransientParseError): with patch('colcon_core.shell.template.logger.error') as error: expand_template(template_path, destination_path, {}) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].endswith( f" processing template '{template_path}'") assert not destination_path.exists() # missing variable template_path.write_text( '@(var)') with pytest.raises(NameError): with patch('colcon_core.shell.template.logger.error') as error: expand_template(template_path, destination_path, {}) # the raised exception is catched and results in an error message assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].endswith( f" processing template '{template_path}'") assert not destination_path.exists() # skip all symlink tests on Windows for now if sys.platform == 'win32': # pragma: no cover return # remove destination if it is a symlink destination_path.symlink_to(template_path) assert destination_path.is_symlink() expand_template(template_path, destination_path, {'var': 'value'}) assert not destination_path.is_symlink() assert destination_path.exists() colcon-core-0.17.1/test/test_shell_template_prefix_util.py000066400000000000000000000064611465053734400240310ustar00rootroot00000000000000# Copyright 2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.location import get_relative_package_index_path from colcon_core.shell.template.prefix_util import get_packages from colcon_core.shell.template.prefix_util import main from colcon_core.shell.template.prefix_util import order_packages from colcon_core.shell.template.prefix_util import reduce_cycle_set import pytest def test_main(capsys): with patch( 'colcon_core.shell.template.prefix_util.get_packages', return_value={'pkgA': set()} ): main([]) out, err = capsys.readouterr() assert out == 'pkgA\n' assert not err def test_get_packages(): with TemporaryDirectory(prefix='test_colcon_') as prefix_path: prefix_path = Path(prefix_path) # check no packages in not merged install layout packages = get_packages(prefix_path, False) assert packages == {} # mock packages in not merged install layout subdirectory = get_relative_package_index_path() for pkg_name in ('pkgA', 'pkgB'): (prefix_path / pkg_name / subdirectory).mkdir(parents=True) (prefix_path / pkg_name / subdirectory / pkg_name).write_text( 'depX') (prefix_path / 'dummy_dir').mkdir() (prefix_path / '.hidden_dir').mkdir() (prefix_path / 'dummy_file').write_text('') # check no packages in merged install layout packages = get_packages(prefix_path, True) assert packages == {} # mock packages in merged install layout (prefix_path / subdirectory).mkdir(parents=True) (prefix_path / subdirectory / 'pkgB').write_text('') (prefix_path / subdirectory / 'pkgC').write_text( os.pathsep.join(('pkgB', 'depC'))) (prefix_path / subdirectory / 'dummy_dir').mkdir() (prefix_path / subdirectory / '.hidden_file').write_text('') # check packages and dependencies in not merged install layout packages = get_packages(prefix_path, False) assert len(packages) == 2 assert 'pkgA' in packages assert packages['pkgA'] == set() assert 'pkgB' in packages assert packages['pkgB'] == set() # check packages and dependencies in not merged install layout packages = get_packages(prefix_path, True) assert len(packages) == 2 assert 'pkgB' in packages assert packages['pkgB'] == set() assert 'pkgC' in packages assert packages['pkgC'] == {'pkgB'} def test_order_packages(): packages = { 'pkgA': {'pkgC'}, 'pkgB': {}, 'pkgC': {}, } ordered = order_packages(packages) assert ordered == ['pkgB', 'pkgC', 'pkgA'] packages = { 'pkgA': {'pkgB'}, 'pkgB': {'pkgA'}, 'pkgC': set(), } with pytest.raises(RuntimeError) as e: ordered = order_packages(packages) assert 'Circular dependency between:' in str(e.value) assert 'pkgA' in str(e.value) assert 'pkgB' in str(e.value) assert 'pkgC' not in str(e.value) def test_reduce_cycle_set(): packages = { 'pkgA': {'pkgB'}, 'pkgB': set(), } reduce_cycle_set(packages) assert len(packages) == 0 colcon-core-0.17.1/test/test_spell_check.py000066400000000000000000000036551465053734400206730ustar00rootroot00000000000000# Copyright 2016-2019 Dirk Thomas # Licensed under the Apache License, Version 2.0 from pathlib import Path import pytest spell_check_words_path = Path(__file__).parent / 'spell_check.words' @pytest.fixture(scope='module') def known_words(): global spell_check_words_path return spell_check_words_path.read_text().splitlines() @pytest.mark.linter def test_spell_check(known_words): from scspell import Report from scspell import SCSPELL_BUILTIN_DICT from scspell import spell_check source_filenames = [ Path(__file__).parents[1] / 'bin' / 'colcon', Path(__file__).parents[1] / 'setup.py'] + \ list((Path(__file__).parents[1] / 'colcon_core').glob('**/*.py')) + \ list((Path(__file__).parents[1] / 'test').glob('**/*.py')) for source_filename in sorted(source_filenames): print('Spell checking:', source_filename) # check all files report = Report(known_words) spell_check( [str(p) for p in source_filenames], base_dicts=[SCSPELL_BUILTIN_DICT], report_only=report, additional_extensions=[('', 'Python')]) unknown_word_count = len(report.unknown_words) assert unknown_word_count == 0, \ f'Found {unknown_word_count} unknown words: ' + \ ', '.join(sorted(report.unknown_words)) unused_known_words = set(known_words) - report.found_known_words unused_known_word_count = len(unused_known_words) assert unused_known_word_count == 0, \ f'{unused_known_word_count} words in the word list are not used: ' + \ ', '.join(sorted(unused_known_words)) @pytest.mark.linter def test_spell_check_word_list_order(known_words): assert known_words == sorted(known_words), \ 'The word list should be ordered alphabetically' @pytest.mark.linter def test_spell_check_word_list_duplicates(known_words): assert len(known_words) == len(set(known_words)), \ 'The word list should not contain duplicates' colcon-core-0.17.1/test/test_subprocess.py000066400000000000000000000057241465053734400206060ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import asyncio import sys from colcon_core.subprocess import check_output from colcon_core.subprocess import new_event_loop from colcon_core.subprocess import run import pytest from .run_until_complete import run_until_complete # TODO figure out how to avoid the stderr output @pytest.mark.skip( reason='Results in stderr output due to a UnicodeDecodeError for the ' 'generated coverage files') def test_check_output(): coroutine = check_output( [sys.executable, '-c', r"print('line1\nline2')"], shell=True) output = run_until_complete(coroutine) assert output.decode().splitlines() == ['line1', 'line2'] # TODO figure out how to avoid the stderr output @pytest.mark.skip( reason='Results in stderr output due to a UnicodeDecodeError for the ' 'generated coverage files') def test_run(): # without callbacks coroutine = run( [sys.executable, '-c', r"print('line1\nline2')"], None, None) completed_process = run_until_complete(coroutine) assert completed_process.returncode == 0 # without callbacks, with pty coroutine = run( [sys.executable, '-c', r"print('line1\nline2')"], None, None, use_pty=True) completed_process = run_until_complete(coroutine) assert completed_process.returncode == 0 # with callbacks stdout_lines = [] stderr_lines = [] def stdout_callback(line): nonlocal stdout_lines stdout_lines.append(line) def stderr_callback(line): nonlocal stderr_lines stderr_lines.append(line) coroutine = run( [sys.executable, '-c', r"print('line1\nline2')"], stdout_callback, stderr_callback) completed_process = run_until_complete(coroutine) assert completed_process.returncode == 0 assert stdout_lines == [b'line1\n', b'line2\n'] # with callbacks, with pty stdout_lines = [] stderr_lines = [] coroutine = run( [sys.executable, '-c', r"print('line1\nline2')"], stdout_callback, stderr_callback, use_pty=True) completed_process = run_until_complete(coroutine) assert completed_process.returncode == 0 assert stdout_lines == [b'line1\n', b'line2\n'] # TODO figure out why no coverage is being generated @pytest.mark.skip( reason='No coverage is being generated for this test') def test_run_cancel(): # with callbacks, canceled stdout_lines = [] def stdout_callback(line): nonlocal stdout_lines stdout_lines.append(line) coroutine = run( [ sys.executable, '-c', r"import time; time.sleep(1); print('line1\nline2')"], stdout_callback, None) loop = new_event_loop() asyncio.set_event_loop(loop) task = asyncio.Task(coroutine, loop=loop) assert task.cancel() is True try: with pytest.raises(asyncio.CancelledError): loop.run_until_complete(task) finally: loop.close() colcon-core-0.17.1/test/test_task.py000066400000000000000000000220111465053734400173440ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import os from pathlib import Path import sys from tempfile import TemporaryDirectory from unittest.mock import Mock from unittest.mock import patch from colcon_core.event.command import Command from colcon_core.event.job import JobProgress from colcon_core.event.output import StderrLine from colcon_core.event.output import StdoutLine from colcon_core.plugin_system import instantiate_extensions from colcon_core.task import add_task_arguments from colcon_core.task import create_file from colcon_core.task import get_task_extension from colcon_core.task import get_task_extensions from colcon_core.task import install from colcon_core.task import run from colcon_core.task import TaskContext from colcon_core.task import TaskExtensionPoint import pytest from .extension_point_context import ExtensionPointContext from .run_until_complete import run_until_complete def test_context_interface(): context = TaskContext(pkg=None, args=None, dependencies=None) with pytest.raises(NotImplementedError): context.put_event_into_queue(None) class Extension(TaskExtensionPoint): TASK_NAME = 'do' async def do(self, *args, **kwargs): self.progress('progress') self.print('hello') self.print('hello', file=sys.stdout) self.print('world', file=sys.stderr) with pytest.raises(AssertionError): self.print('invalid file handle', file=False) return 1 def test_extension_interface(): context = Mock() # capture events events = [] def put_event_into_queue(event): nonlocal events events.append(event) context.put_event_into_queue = put_event_into_queue extension = Extension() extension.set_context(context=context) rc = run_until_complete(extension()) assert rc == 1 assert len(events) == 4 assert isinstance(events[0], JobProgress) assert events[0].progress == 'progress' assert isinstance(events[1], StdoutLine) assert events[1].line == 'hello\n' assert isinstance(events[2], StdoutLine) assert events[2].line == 'hello\n' assert isinstance(events[3], StderrLine) assert events[3].line == 'world\n' # TODO figure out how to avoid the stderr output @pytest.mark.skip( reason='Results in stderr output due to a UnicodeDecodeError for the ' 'generated coverage files') def test_run(): context = Mock() events = [] def put_event_into_queue(event): nonlocal events events.append(event) context.put_event_into_queue = put_event_into_queue cmd = [ sys.executable, '-c', "import sys; print('hello'); print('world', file=sys.stderr)"] coroutine = run(context, cmd) completed_process = run_until_complete(coroutine) assert completed_process.returncode == 0 assert len(events) == 3 assert isinstance(events[0], Command) assert events[0].cmd == cmd assert isinstance(events[1], StdoutLine) assert events[1].line == b'hello\n' assert isinstance(events[2], StderrLine) assert events[2].line == b'world\n' class Extension1(TaskExtensionPoint): def build(self, *args, **kwargs): pass # pragma: no cover class Extension2(TaskExtensionPoint): def build(self, *args, **kwargs): pass # pragma: no cover def instantiate_extensions_without_cache( group_name, *, exclude_names=None, unique_instance=False ): return instantiate_extensions(group_name) def test_add_task_arguments(): parser = Mock() task_name = 'colcon_core.task.build' with ExtensionPointContext(extension1=Extension1, extension2=Extension2): with patch( 'colcon_core.task.instantiate_extensions', side_effect=instantiate_extensions_without_cache ): extensions = get_task_extensions(task_name) # one exception, one success extensions['extension1'].add_arguments = Mock( side_effect=RuntimeError('custom exception')) with patch('colcon_core.task.logger.error') as error: add_task_arguments(parser, task_name) assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in task extension 'build.extension1': custom " 'exception\n') # invalid return value extensions['extension1'].add_arguments = Mock() extensions['extension2'].add_arguments = Mock(return_value=None) with patch('colcon_core.task.logger.error') as error: add_task_arguments(parser, task_name) assert error.call_count == 1 assert len(error.call_args[0]) == 1 assert error.call_args[0][0].startswith( "Exception in task extension 'build.extension1': " 'add_arguments() should return None\n') assert extensions['extension2'].add_arguments.call_count == 1 def test_get_task_extension(): task_name = 'colcon_core.task.build' with ExtensionPointContext(extension1=Extension1, extension2=Extension2): # request invalid extension extension = get_task_extension(task_name, 'package_type') assert extension is None # request valid extension extension = get_task_extension(task_name, 'extension2') assert isinstance(extension, Extension2) def test_create_file(): with TemporaryDirectory(prefix='test_colcon_') as base_path: args = Mock() args.install_base = base_path create_file(args, 'file.txt') path = Path(base_path) / 'file.txt' assert path.is_file() assert path.read_text() == '' create_file(args, 'path/file.txt', content='content') path = Path(base_path) / 'path' / 'file.txt' assert path.is_file() assert path.read_text() == 'content' def test_install(): with TemporaryDirectory(prefix='test_colcon_') as base_path: args = Mock() args.path = os.path.join(base_path, 'path') args.install_base = os.path.join(base_path, 'install') args.symlink_install = False # create source files os.makedirs(args.path) with open(os.path.join(args.path, 'source.txt'), 'w') as h: h.write('content') with open(os.path.join(args.path, 'source2.txt'), 'w') as h: h.write('content2') # copy file install(args, 'source.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert not path.is_symlink() assert path.read_text() == 'content' # skip all symlink tests on Windows for now if sys.platform == 'win32': # pragma: no cover return # symlink file, removing existing file args.symlink_install = True install(args, 'source.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert path.is_symlink() assert path.samefile(os.path.join(args.path, 'source.txt')) assert path.read_text() == 'content' # symlink other file, removing existing directory os.remove(os.path.join(args.install_base, 'destination.txt')) os.makedirs(os.path.join(args.install_base, 'destination.txt')) install(args, 'source2.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert path.is_symlink() assert path.samefile(os.path.join(args.path, 'source2.txt')) assert path.read_text() == 'content2' # copy file, removing existing symlink args.symlink_install = False install(args, 'source.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert not path.is_symlink() assert path.read_text() == 'content' # symlink file os.remove(os.path.join(args.install_base, 'destination.txt')) args.symlink_install = True install(args, 'source.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert path.is_symlink() assert path.samefile(os.path.join(args.path, 'source.txt')) assert path.read_text() == 'content' # symlink file, same already existing install(args, 'source.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert path.is_symlink() assert path.samefile(os.path.join(args.path, 'source.txt')) assert path.read_text() == 'content' # symlink exists, but to a not existing location os.remove(os.path.join(args.path, 'source.txt')) install(args, 'source2.txt', 'destination.txt') path = Path(base_path) / 'install' / 'destination.txt' assert path.is_file() assert path.is_symlink() assert path.samefile(os.path.join(args.path, 'source2.txt')) colcon-core-0.17.1/test/test_task_python_test_pytest.py000066400000000000000000000031521465053734400234210ustar00rootroot00000000000000# Copyright 2021 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 from colcon_core.package_descriptor import PackageDescriptor from colcon_core.task import TaskContext from colcon_core.task.python import get_setup_data from colcon_core.task.python.test.pytest import PytestPythonTestingStep def test_pytest_match(): extension = PytestPythonTestingStep() env = {} desc = PackageDescriptor('/dev/null') context = TaskContext(pkg=desc, args=None, dependencies=None) desc.name = 'pkg-name' desc.type = 'python' # no test requirements desc.metadata['get_python_setup_options'] = lambda env: {} assert not extension.match(context, env, get_setup_data(desc, env)) # pytest not in tests_require desc.metadata['get_python_setup_options'] = lambda env: { 'tests_require': ['nose'], } assert not extension.match(context, env, get_setup_data(desc, env)) # pytest not in extras_require.test desc.metadata['get_python_setup_options'] = lambda env: { 'extras_require': { 'test': ['nose'] }, } assert not extension.match(context, env, get_setup_data(desc, env)) # pytest in tests_require desc.metadata['get_python_setup_options'] = lambda env: { 'tests_require': ['pytest'], } assert extension.match(context, env, get_setup_data(desc, env)) # pytest in extras_require.test desc.metadata['get_python_setup_options'] = lambda env: { 'extras_require': { 'test': ['pytest'] }, } assert extension.match(context, env, get_setup_data(desc, env)) colcon-core-0.17.1/test/test_topological_order.py000066400000000000000000000044171465053734400221230ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 from colcon_core.package_descriptor import PackageDescriptor from colcon_core.topological_order import topological_order_packages import pytest def test_topological_order_packages(): d1 = PackageDescriptor('/some/path') d1.name = 'a' d1.dependencies['build'].add('c') d2 = PackageDescriptor('/other/path') d2.name = 'b' d2.dependencies['run'].add('c') d3 = PackageDescriptor('/another/path') d3.name = 'c' d3.dependencies['build'].add('e') d3.dependencies['run'].add('f') d3.dependencies['test'].add('d') d4 = PackageDescriptor('/yet-another/path') d4.name = 'd' d4.dependencies['run'].add('f') d5 = PackageDescriptor('/more/path') d5.name = 'e' d5.dependencies['run'].add('f') d6 = PackageDescriptor('/yet-more/path') d6.name = 'f' decos = topological_order_packages( {d1, d2, d3, d4, d5, d6}) names = [d .descriptor.name for d in decos] assert names == ['f', 'd', 'e', 'c', 'a', 'b'] # ensure that input order doesn't affect the result decos = topological_order_packages( {d6, d5, d4, d3, d2, d1}) names = [d .descriptor.name for d in decos] assert names == ['f', 'd', 'e', 'c', 'a', 'b'] def test_topological_order_packages_with_circular_dependency(): d1 = PackageDescriptor('/some/path') d1.name = 'one' d1.dependencies['run'].add('two') d2 = PackageDescriptor('/other/path') d2.name = 'two' d2.dependencies['run'].add('three') d3 = PackageDescriptor('/another/path') d3.name = 'three' d3.dependencies['run'].add('one') d3.dependencies['run'].add('six') d4 = PackageDescriptor('/yet-another/path') d4.name = 'four' d5 = PackageDescriptor('/more/path') d5.name = 'five' d5.dependencies['run'].add('four') d6 = PackageDescriptor('/yet-more/path') d6.name = 'six' with pytest.raises(RuntimeError) as e: topological_order_packages({d1, d2, d3, d4}) lines = str(e.value).splitlines() assert len(lines) == 4 assert lines[0] == 'Unable to order packages topologically:' assert lines[1] == "one: ['three', 'two']" assert lines[2] == "three: ['one', 'two']" assert lines[3] == "two: ['one', 'three']" colcon-core-0.17.1/test/test_type_collector.py000066400000000000000000000070721465053734400214430ustar00rootroot00000000000000# Copyright 2021 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import argparse import sys import warnings import pytest with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', message='.*deprecated.*', category=UserWarning, module='colcon_core.argument_parser.type_collector') from colcon_core.argument_parser.type_collector \ import SuppressTypeConversions # noqa: E402 from colcon_core.argument_parser.type_collector \ import TypeCollectorDecorator # noqa: E402 class _RaisingArgumentParser(argparse.ArgumentParser): def error(self, message): raise sys.exc_info()[1] def test_type_collector_decorator(): parser = argparse.ArgumentParser() decorator = TypeCollectorDecorator(parser) decorator.add_argument('positional') assert decorator.get_types() == {} decorator.add_argument('--option', type=bool) assert decorator.get_types() == {bool: bool} def test_suppress_type_conversions(): parser = _RaisingArgumentParser() decorator = TypeCollectorDecorator(parser) decorator.add_argument('-f', type=float) decorator.add_argument('-i', type=int) decorator.register('action', 'not_implemented', argparse.Action) decorator.register('type', 'hex', float.fromhex) decorator.add_argument('-x', type='hex', default=None) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {float}): parser.parse_args(['-f', 'bar', '-i', '1', '-x', '0x42']) with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {int}): parser.parse_args(['-f', '3.14', '-i', 'bar', '-x', '0x42']) with SuppressTypeConversions((decorator,)): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) with pytest.raises(argparse.ArgumentError): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) with pytest.raises(argparse.ArgumentError): with SuppressTypeConversions((decorator,), {'hex'}): parser.parse_args(['-f', '3.14', '-i', '1', '-x', 'foo']) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x def test_suppress_not_decorated(): parser = _RaisingArgumentParser() parser.add_argument('-f', type=float) parser.add_argument('-i', type=int) parser.register('action', 'not_implemented', argparse.Action) parser.register('type', 'hex', float.fromhex) parser.add_argument('-x', type='hex', default=None) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x with SuppressTypeConversions((parser,)): parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) args = parser.parse_args(['-f', '3.14', '-i', '1', '-x', '0x42']) assert 3.14 == args.f assert 1 == args.i assert 0x42 == args.x colcon-core-0.17.1/test/test_verb.py000066400000000000000000000127231465053734400173510ustar00rootroot00000000000000# Copyright 2016-2018 Dirk Thomas # Licensed under the Apache License, Version 2.0 import argparse import logging from pathlib import Path from tempfile import TemporaryDirectory from unittest.mock import patch from colcon_core.verb import check_and_mark_build_tool from colcon_core.verb import check_and_mark_install_layout from colcon_core.verb import get_verb_extensions from colcon_core.verb import update_object from colcon_core.verb import VerbExtensionPoint import pytest from .extension_point_context import ExtensionPointContext def test_verb_interface(): assert hasattr(VerbExtensionPoint, 'EXTENSION_POINT_VERSION') interface = VerbExtensionPoint() interface.add_arguments(parser=None) with pytest.raises(NotImplementedError): interface.main(context=None) class Extension1(VerbExtensionPoint): pass class Extension2(VerbExtensionPoint): pass def test_get_verb_extensions(): with ExtensionPointContext(extension1=Extension1, extension2=Extension2): extensions = get_verb_extensions() assert list(extensions.keys()) == ['extension1', 'extension2'] def test_check_and_mark_build_tool(): with TemporaryDirectory(prefix='test_colcon_') as base_path: base_path = Path(base_path) # create marker if it doesn't exist check_and_mark_build_tool(str(base_path)) marker_path = base_path / '.built_by' assert marker_path.exists() assert marker_path.read_text().rstrip() == 'colcon' # create path and marker if it doesn't exist path = base_path / 'no_base' check_and_mark_build_tool(str(path)) assert path.exists() marker_path = path / '.built_by' assert marker_path.exists() assert marker_path.read_text().rstrip() == 'colcon' # existing marker with same content path = base_path / 'existing_marker' path.mkdir() marker_path = path / '.built_by' marker_path.write_text('colcon') check_and_mark_build_tool(str(path)) assert marker_path.exists() assert marker_path.read_text().rstrip() == 'colcon' # existing marker with different content marker_path.write_text('other') with pytest.raises(RuntimeError): check_and_mark_build_tool(str(path)) def test_check_and_mark_install_layout(): with TemporaryDirectory(prefix='test_colcon_') as base_path: base_path = Path(base_path) # create marker if it doesn't exist check_and_mark_install_layout(str(base_path), merge_install=False) marker_path = base_path / '.colcon_install_layout' assert marker_path.exists() assert marker_path.read_text().rstrip() == 'isolated' # create path and marker if it doesn't exist path = base_path / 'no_base' check_and_mark_install_layout(str(path), merge_install=True) assert path.exists() marker_path = path / '.colcon_install_layout' assert marker_path.exists() assert marker_path.read_text().rstrip() == 'merged' # existing marker with same content check_and_mark_install_layout(str(path), merge_install=True) assert marker_path.exists() assert marker_path.read_text().rstrip() == 'merged' # existing marker with different content with pytest.raises(RuntimeError): check_and_mark_install_layout(str(path), merge_install=False) # install base which is a file with pytest.raises(RuntimeError): check_and_mark_install_layout(str(marker_path), merge_install=True) def test_update_object(): obj = argparse.Namespace() key = 'key' msg_args = ['package_name', 'argument_type', 'value_source'] update_object(obj, key, 'foo', *msg_args) assert hasattr(obj, key) assert getattr(obj, key) == 'foo' # debug message when overwriting a value with the same type with patch('colcon_core.verb.logger.log') as log: update_object(obj, key, 'bar', *msg_args) assert getattr(obj, key) == 'bar' assert log.call_count == 1 assert len(log.call_args[0]) == 2 assert log.call_args[0][0] == 5 assert 'overwrite' in log.call_args[0][1] # warning message when overwriting a value with a different type with patch('colcon_core.verb.logger.log') as log: update_object(obj, key, 42, *msg_args) assert getattr(obj, key) == 42 assert log.call_count == 1 assert len(log.call_args[0]) == 2 assert log.call_args[0][0] == logging.WARNING assert 'overwrite' in log.call_args[0][1] # an existing dictionary value is updated with the passed dictionary obj = argparse.Namespace() update_object(obj, key, {'foo': 'foo', 'bar': 'bar'}, *msg_args) with patch('colcon_core.verb.logger.log') as log: update_object(obj, key, {'bar': 'BAR', 'baz': 'BAZ'}, *msg_args) assert getattr(obj, key) == {'foo': 'foo', 'bar': 'BAR', 'baz': 'BAZ'} assert log.call_count == 1 assert len(log.call_args[0]) == 2 assert log.call_args[0][0] == 5 assert 'update' in log.call_args[0][1] # an existing list value is extended with the passed list obj = argparse.Namespace() update_object(obj, key, ['foo', 'bar'], *msg_args) with patch('colcon_core.verb.logger.log') as log: update_object(obj, key, ['bar', 'baz'], *msg_args) assert getattr(obj, key) == ['foo', 'bar', 'bar', 'baz'] assert log.call_count == 1 assert len(log.call_args[0]) == 2 assert log.call_args[0][0] == 5 assert 'extend' in log.call_args[0][1] colcon-core-0.17.1/test/test_verb_build.py000066400000000000000000000057241465053734400205330ustar00rootroot00000000000000# Copyright 2024 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import os from unittest.mock import Mock from unittest.mock import patch from colcon_core.command import CommandContext from colcon_core.package_decorator import get_decorators from colcon_core.package_descriptor import PackageDescriptor from colcon_core.plugin_system import satisfies_version from colcon_core.task import TaskExtensionPoint from colcon_core.verb.build import BuildVerb import pytest class NoopBuildTask(TaskExtensionPoint): TASK_NAME = 'build' PACKAGE_TYPE = 'baz' def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') async def build(self, *, additional_hooks=None): pass @pytest.fixture(scope='module', autouse=True) def patch_other_extension_args(): with patch('colcon_core.verb.build.add_event_handler_arguments'), \ patch('colcon_core.verb.build.add_executor_arguments'), \ patch('colcon_core.verb.build.add_packages_arguments'), \ patch('colcon_core.verb.build.add_task_arguments'): yield @pytest.fixture(scope='module', autouse=True) def patch_get_task_extension(): with patch( 'colcon_core.verb.build.get_task_extension', return_value=NoopBuildTask(), ) as get_task_extension: yield get_task_extension @pytest.fixture(scope='module', autouse=True) def patch_get_packages(): desc1 = PackageDescriptor('foo_bar') desc1.type = 'foo' desc1.name = 'bar' desc2 = PackageDescriptor('foo_baz') desc2.type = 'foo' desc2.name = 'baz' descriptors = {desc1, desc2} decorators = get_decorators(descriptors) for decorator in decorators: decorator.recursive_dependencies = [] for decorator in decorators: decorator.selected = False break with patch( 'colcon_core.verb.build.get_packages', return_value=decorators, ) as get_packages: yield get_packages @pytest.fixture(scope='module', autouse=True) def patch_execute_jobs(): with patch( 'colcon_core.verb.build.execute_jobs', return_value=0, ) as execute_jobs: yield execute_jobs def test_add_arguments(): extension = BuildVerb() parser = Mock() parser.add_argument = Mock() extension.add_arguments(parser=parser) # This extension calls argument adders from other extensions. # Verify only that *some* arguments were added. assert parser.add_argument.call_count > 4 def test_verb_test(tmpdir): extension = BuildVerb() extension.add_arguments(parser=Mock()) context = CommandContext( command_name='colcon', args=Mock()) context.args.build_base = os.path.join(tmpdir, 'build') context.args.install_base = os.path.join(tmpdir, 'install') context.args.test_result_base = os.path.join(tmpdir, 'test_results') assert 0 == extension.main(context=context) colcon-core-0.17.1/test/test_verb_test.py000066400000000000000000000057051465053734400204120ustar00rootroot00000000000000# Copyright 2024 Open Source Robotics Foundation, Inc. # Licensed under the Apache License, Version 2.0 import os from unittest.mock import Mock from unittest.mock import patch from colcon_core.command import CommandContext from colcon_core.package_decorator import get_decorators from colcon_core.package_descriptor import PackageDescriptor from colcon_core.plugin_system import satisfies_version from colcon_core.task import TaskExtensionPoint from colcon_core.verb.test import TestVerb import pytest class NoopTestTask(TaskExtensionPoint): TASK_NAME = 'test' PACKAGE_TYPE = 'baz' def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') async def test(self, *, additional_hooks=None): pass @pytest.fixture(scope='module', autouse=True) def patch_other_extension_args(): with patch('colcon_core.verb.test.add_event_handler_arguments'), \ patch('colcon_core.verb.test.add_executor_arguments'), \ patch('colcon_core.verb.test.add_packages_arguments'), \ patch('colcon_core.verb.test.add_task_arguments'): yield @pytest.fixture(scope='module', autouse=True) def patch_get_task_extension(): with patch( 'colcon_core.verb.test.get_task_extension', return_value=NoopTestTask(), ) as get_task_extension: yield get_task_extension @pytest.fixture(scope='module', autouse=True) def patch_get_packages(): desc1 = PackageDescriptor('foo_bar') desc1.type = 'foo' desc1.name = 'bar' desc2 = PackageDescriptor('foo_baz') desc2.type = 'foo' desc2.name = 'baz' descriptors = {desc1, desc2} decorators = get_decorators(descriptors) for decorator in decorators: decorator.recursive_dependencies = [] for decorator in decorators: decorator.selected = False break with patch( 'colcon_core.verb.test.get_packages', return_value=decorators, ) as get_packages: yield get_packages @pytest.fixture(scope='module', autouse=True) def patch_execute_jobs(): with patch( 'colcon_core.verb.test.execute_jobs', return_value=0, ) as execute_jobs: yield execute_jobs def test_add_arguments(): extension = TestVerb() parser = Mock() parser.add_argument = Mock() extension.add_arguments(parser=parser) # This extension calls argument adders from other extensions. # Verify only that *some* arguments were added. assert parser.add_argument.call_count > 4 def test_verb_test(tmpdir): extension = TestVerb() extension.add_arguments(parser=Mock()) context = CommandContext( command_name='colcon', args=Mock()) context.args.build_base = os.path.join(tmpdir, 'build') context.args.install_base = os.path.join(tmpdir, 'install') context.args.test_result_base = os.path.join(tmpdir, 'test_results') assert 0 == extension.main(context=context)