pax_global_header00006660000000000000000000000064144707004030014511gustar00rootroot0000000000000052 comment=f65f8594161dcff276813d59d3c1145ce767bb75 osrf_pycommon-2.1.4/000077500000000000000000000000001447070040300144075ustar00rootroot00000000000000osrf_pycommon-2.1.4/.github/000077500000000000000000000000001447070040300157475ustar00rootroot00000000000000osrf_pycommon-2.1.4/.github/workflows/000077500000000000000000000000001447070040300200045ustar00rootroot00000000000000osrf_pycommon-2.1.4/.github/workflows/ci.yaml000066400000000000000000000013511447070040300212630ustar00rootroot00000000000000name: osrf_pycommon-ci on: push: branches: [master] pull_request: jobs: build: strategy: matrix: os: [macos-latest, ubuntu-22.04, windows-latest] python: ['3.7', '3.8', '3.9', '3.10'] include: - os: ubuntu-20.04 python: '3.6' name: osrf_pycommon tests runs-on: ${{matrix.os}} steps: - uses: actions/checkout@v3 - name: Set up Python ${{matrix.python}} uses: actions/setup-python@v4 with: python-version: ${{matrix.python}} - name: Install dependencies run: | python -m pip install -U -e .[test] pytest-cov - name: Run tests run: | python -m pytest tests --cov osrf_pycommon-2.1.4/.gitignore000066400000000000000000000010671447070040300164030ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ bin/ build/ develop-eggs/ dist/ eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .cache nosetests.xml coverage.xml # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject # Rope .ropeproject # Django stuff: *.log *.pot # Sphinx documentation docs/_build/ deb_dist .pytest_cache osrf_pycommon-2.1.4/CHANGELOG.rst000066400000000000000000000142001447070040300164250ustar00rootroot000000000000002.1.4 (2023-08-21) ------------------ * Catch all of the spurious warnings from get_event_loop. (`#94 `_) * Contributors: Chris Lalancette 2.1.3 (2023-07-11) ------------------ * Add bookworm as a python3 target (`#91 `_) * Suppress warning for specifically handled behavior (`#87 `_) * Update supported platforms (`#93 `_) * Add GitHub Actions CI workflow (`#88 `_) * Contributors: Scott K Logan, Tully Foote 2.1.2 (2023-02-14) ------------------ * [master] Update maintainers - 2022-11-07 (`#89 `_) * Contributors: Audrow Nash 2.1.1 (2022-11-07) ------------------ * Declare test dependencies in [test] extra (`#86 `_) * Contributors: Scott K Logan 2.1.0 (2022-05-10) ------------------ 2.0.2 (2022-04-08) ------------------ * Fix an importlib_metadata warning with Python 3.10. (`#84 `_) * Contributors: Chris Lalancette 2.0.1 (2022-02-14) ------------------ * Don't release 2.x / master on Debian Buster. (`#83 `_) Debian Buster is on Python 3.7: https://packages.debian.org/buster/python3 * Stop using mock in favor of unittest.mock. (`#74 `_) Mock has been deprecated since Python 3.3; see https://pypi.org/project/mock/ . The recommended replacement is unittest.mock, which seems to be a drop-in replacement. Co-authored-by: William Woodall * Fix dependencies (`#81 `_) * Remove obsolete setuptools from install_requires Now that pkg_resources are no longer used, there is no need to depend on setuptools at runtime. * Fix version-conditional dependency on importlib-metadata Use version markers to depend on importlib-metadata correctly. Explicit conditions mean that wheels built with setup.py will either have the dep or not depending on what Python version they're built with, rather than what version they're installed on. * fix whitespace and date in changelog heading * Contributors: Chris Lalancette, Michał Górny, Steven! Ragnarök, William Woodall 2.0.0 (2022-02-01) ------------------ * Replace the use of ``pkg_resources`` with the more modern ``importlib-metadata``. (`#66 `_) * Note this means that from now on you can only release on >= Ubuntu focal as that was when ``python3-importlib-metadata`` was introduced. * Used the ``1.0.x`` branch if you need an ealier version that still uses ``pkg_resources``. Co-authored-by: William Woodall * Contributors: Chris Lalancette 1.0.1 (2022-01-20) ------------------ * Update release distributions. (`#78 `_) * Contributors: Steven! Ragnarök 1.0.0 (2021-01-25) ------------------ * Added missing conflict rules in stdeb.cfg. * Removed Python 2 support. * Contributors: Chris Lalancette, Timon Engelke 0.2.1 (2021-01-25) ------------------ * Fix osrf.py_common.process_utils.get_loop() implementation (`#70 `_) * Contributors: Michel Hidalgo 0.2.0 (2020-12-07) ------------------ * Python 2/3 version conflict (`#69 `_) * remove jessie because we no longer support 3.4 (`#67 `_) * Remove deprecated use of asyncio.coroutine decorator. (`#64 `_) * Fix the __str_\_ method for windows terminal_color. (`#65 `_) * Contributors: Chris Lalancette, Jochen Sprickerhof, William Woodall 0.1.10 (2020-05-08) ------------------- * fixed simple deprecation warnings (issue `#61 `_) (`#63 `_) * Also run tests with Python 3.7 and 3.8 (`#60 `_) * Remove old py2 platforms, add Suite3 option with Ubuntu Focal (`#58 `_) * Contributors: Shane Loretz, Zahi Kakish 0.1.9 (2019-10-10 12:55:00 -0800) --------------------------------- * install resource marker file for package (`#56 `_) 0.1.8 (2019-09-17 11:30:00 -0800) --------------------------------- * Install package manifest. (`#55 `_) Signed-off-by: Dirk Thomas * Rename ansi_escape_senquences to ansi_escape_sequences keeping backwards compatibility. (`#53 `_) * Contributors: Chris Lalancette, Dirk Thomas 0.1.7 (2019-04-11 12:45:00 -0800) --------------------------------- * Use keyword arguments only for protocol_class invocations (`#52 `_) * Contributors: Daniel Stonier 0.1.6 (2018-11-15 12:45:00 -0800) --------------------------------- - Changed package.xml to use python2 or python3 dependencies as appropriate. `#50 `_ 0.1.5 (2018-06-19 21:00:00 -0800) --------------------------------- - Fixed a try-catch statement to adapt to changes in asyncio's raise behavior in `asyncio.get_event_loop()`. - Small changes, mostly related to distribution. 0.1.4 (2017-12-08 16:00:00 -0800) --------------------------------- - Only small test/linter fixes and documentation typos removed. 0.1.3 (2017-03-28 19:30:00 -0800) --------------------------------- - Fix to support optional arguments in verb pattern `#24 `_ 0.1.2 (2016-03-28 19:30:00 -0800) --------------------------------- - Started keeping a changelog. - Changed ``process_utils`` module so that it will use Trollius even on Python >= 3.4 if ``trollius`` has previously been imported. osrf_pycommon-2.1.4/CODEOWNERS000066400000000000000000000001241447070040300157770ustar00rootroot00000000000000# This file was generated by https://github.com/audrow/update-ros2-repos * @wjwwood osrf_pycommon-2.1.4/LICENSE000066400000000000000000000260731447070040300154240ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.osrf_pycommon-2.1.4/MANIFEST.in000066400000000000000000000001611447070040300161430ustar00rootroot00000000000000include CHANGELOG.rst include LICENSE include package.xml include README.md recursive-include osrf_pycommon *.pyosrf_pycommon-2.1.4/README.md000066400000000000000000000007771447070040300157010ustar00rootroot00000000000000osrf_pycommon ============= Commonly needed Python modules, used by Python software developed at OSRF. Branches ======== If you are releasing (using ``stdeb`` or on the ROS buildfarm) for any Ubuntu < ``focal``, or for any OS that doesn't have a key for ``python3-importlib-metadata``, then you need to use the ``1.0.x`` branch, or the latest ``1.`` branch, because starting with ``2.0.0``, that dependency will be required. If you are using Python 2, then you should use the ``python2`` branch. osrf_pycommon-2.1.4/_config.yml000066400000000000000000000000331447070040300165320ustar00rootroot00000000000000theme: jekyll-theme-minimalosrf_pycommon-2.1.4/docs/000077500000000000000000000000001447070040300153375ustar00rootroot00000000000000osrf_pycommon-2.1.4/docs/.gitignore000066400000000000000000000000071447070040300173240ustar00rootroot00000000000000_build osrf_pycommon-2.1.4/docs/Makefile000066400000000000000000000152061447070040300170030ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/osrf_pycommon.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/osrf_pycommon.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/osrf_pycommon" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/osrf_pycommon" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." osrf_pycommon-2.1.4/docs/_static/000077500000000000000000000000001447070040300167655ustar00rootroot00000000000000osrf_pycommon-2.1.4/docs/_static/.gitignore000066400000000000000000000000011447070040300207440ustar00rootroot00000000000000 osrf_pycommon-2.1.4/docs/cli_utils.rst000066400000000000000000000200441447070040300200600ustar00rootroot00000000000000The ``cli_utils`` Module ======================== This module provides functions and patterns for creating Command Line Interface (CLI) tools. Common CLI Functions -------------------- .. automodule:: osrf_pycommon.cli_utils.common :members: The Verb Pattern ---------------- The verb pattern is a pattern where a single command aggregates multiple related commands by taking a required positional argument which is the "verb" for the action you want to perform. For example, ``catkin build`` is an example of a ``command`` and ``verb`` pair, where ``catkin`` is the command and ``build`` is the verb. In this example, the ``catkin`` command groups "actions" which are related to catkin together using verbs like ``build`` which will build a workspace of catkin packages. Command Boilerplate ^^^^^^^^^^^^^^^^^^^ This is an example boilerplate of a command which will use verbs: .. code-block:: python from __future__ import print_function import argparse import sys from osrf_pycommon.cli_utils.verb_pattern import create_subparsers from osrf_pycommon.cli_utils.verb_pattern import list_verbs from osrf_pycommon.cli_utils.verb_pattern import split_arguments_by_verb COMMAND_NAME = '' VERBS_ENTRY_POINT = '{0}.verbs'.format(COMMAND_NAME) def main(sysargs=None): # Assign sysargs if not set sysargs = sys.argv[1:] if sysargs is None else sysargs # Create a top level parser parser = argparse.ArgumentParser( description="{0} command".format(COMMAND_NAME) ) # Generate a list of verbs available verbs = list_verbs(VERBS_ENTRY_POINT) # Create the subparsers for each verb and collect the arg preprocessors argument_preprocessors, verb_subparsers = create_subparsers( parser, COMMAND_NAME, verbs, VERBS_ENTRY_POINT, sysargs, ) # Determine the verb, splitting arguments into pre and post verb verb, pre_verb_args, post_verb_args = split_arguments_by_verb(sysargs) # Short circuit -h and --help if '-h' in pre_verb_args or '--help' in pre_verb_args: parser.print_help() sys.exit(0) # Error on no verb provided if verb is None: print(parser.format_usage()) sys.exit("Error: No verb provided.") # Error on unknown verb provided if verb not in verbs: print(parser.format_usage()) sys.exit("Error: Unknown verb '{0}' provided.".format(verb)) # Short circuit -h and --help for verbs if '-h' in post_verb_args or '--help' in post_verb_args: verb_subparsers[verb].print_help() sys.exit(0) # First allow the verb's argument preprocessor to strip any args # and return any "extra" information it wants as a dict processed_post_verb_args, extras = \ argument_preprocessors[verb](post_verb_args) # Then allow argparse to process the left over post-verb arguments along # with the pre-verb arguments and the verb itself args = parser.parse_args(pre_verb_args + [verb] + processed_post_verb_args) # Extend the argparse result with the extras from the preprocessor for key, value in extras.items(): setattr(args, key, value) # Finally call the subparser's main function with the processed args # and the extras which the preprocessor may have returned sys.exit(args.main(args) or 0) This function is mostly boilerplate in that it will likely not change much between commands of different types, but it would also be less transparent to have this function created for you. If you are using this boilerplate to implement your command, then you should be careful to update ``COMMAND_NAME`` to reflect your command's name. This line defines the ``entry_point`` group for your command's verbs: .. code-block:: python VERBS_ENTRY_POINT = '{0}.verbs'.format(COMMAND_NAME) In the case that your command is called ``foo`` then this would become ``foo.verbs``. This name is important because it is how verbs for this command can be provided by your Python package or others. For example, each verb for your command ``foo`` will need entry in the ``setup.py`` of its containing package, like this: .. code-block:: python setup( ... entry_points={ ... 'foo.verbs': [ 'bar = foo.verbs.bar:entry_point_data', ], } ) You can see here that you are defining ``bar`` to be a an entry_point of type ``foo.verbs`` which in turn points to a module and reference ``foo.verbs.bar`` and ``entry_point_data``. At run time this verb pattern will let your command lookup all things defined as ``foo.verbs`` and load up the reference to which they point. Adding Verbs ^^^^^^^^^^^^ In order to add a verb to your command, a few things must happen. First you must have an entry in the ``setup.py`` as described above. This allows the command to find the ``entry_point`` for your verb at run time. The ``entry_point`` for these verbs should point to a dictionary which describes the verb being added. This is an example of an ``entry_point_data`` dictionary for a verb: .. code-block:: python entry_point_data = dict( verb='build', description='Builds a workspace of packages', # Called for execution, given parsed arguments object main=main, # Called first to setup argparse, given argparse parser prepare_arguments=prepare_arguments, # Called after prepare_arguments, but before argparse.parse_args argument_preprocessor=argument_preprocessor, ) As you can see this dictionary describes the verb and gives references to functions which allow the command to describe the verb, hook into argparse parameter creation for the verb, and to execute the verb. The ``verb``, ``description``, ``main``, and ``prepare_arguments`` keys of the dictionary are required, but the ``argument_preprocessor`` key is optional. - ``verb``: This is the name of the verb, and is how the command knows which verb implementation to match to a verb on the command line. - ``description``: This is used by the argument parsing to describe the verb in ``--help``. - ``prepare_arguments``: This function gets called to allow the verb to setup it's own argparse options. This function should always take one parameter which is the :py:class:`argparse.ArgumentParser` for this verb, to which arguments can be added. It can optionally take a second parameter which are the current command line arguments. This is not always needed, but can be useful in some cases. This function should always return the parser. - ``argument_preprocessor``: This function is optional, but allows your verb an opportunity to process the raw arguments before they are passed to argparse's ``parse_args`` function. This can be useful when argparse is not capable of processing the options correctly. - ``main``: This is the implementation of the verb, it gets called last and is passed the parsed arguments. The return type of this function is used for ``sys.exit``, a return type of ``None`` is interpreted as ``0``. Here is an invented example of ``main``, ``prepare_arguments``, and ``argument_preprocessor``: .. code-block:: python def prepare_arguments(parser): parser.add_argument('--some-argument', action='store_true', default=False) return parser def argument_preprocessor(args): extras = {} if '-strange-argument' in args: args.remove('-strange-argument') extras['strange_argument'] = True return args, extras def main(options): print('--some-argument:', options.some_argument) print('-strange-argument:', options.strange_argument) if options.strange_argument: return 1 return 0 The above example is simply to illustrate the signature of these functions and how they might be used. Verb Pattern API ^^^^^^^^^^^^^^^^ .. automodule:: osrf_pycommon.cli_utils.verb_pattern :members: osrf_pycommon-2.1.4/docs/conf.py000066400000000000000000000205601447070040300166410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # osrf_pycommon documentation build configuration file, created by # sphinx-quickstart on Thu May 8 16:45:32 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'osrf_pycommon' copyright = u'2014, Open Source Robotics Foundation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.0' # The full version, including alpha/beta/rc tags. release = '0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'osrf_pycommondoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'osrf_pycommon.tex', u'osrf\\_pycommon Documentation', u'William Woodall', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'osrf_pycommon', u'osrf_pycommon Documentation', [u'William Woodall'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'osrf_pycommon', u'osrf_pycommon Documentation', u'William Woodall', 'osrf_pycommon', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} osrf_pycommon-2.1.4/docs/index.rst000066400000000000000000000062071447070040300172050ustar00rootroot00000000000000``osrf_pycommon`` ================= ``osrf_pycommon`` is a python package which contains commonly used Python boilerplate code and patterns. Things like ansi terminal coloring, capturing colored output from programs using subprocess, or even a simple logging system which provides some nice functionality over the built-in Python logging system. The functionality provided here should be generic enough to be reused in arbitrary scenarios and should avoid bringing in dependencies which are not part of the standard Python library. Where possible Windows, Linux, and macOS should be supported, and where it cannot it should be gracefully degrading. Code should be pure Python 3. Contents: .. toctree:: :maxdepth: 2 cli_utils process_utils terminal_color terminal_utils Installing from Source ---------------------- Given that you have a copy of the source code, you can install ``osrf_pycommon`` like this: .. code-block:: bash $ python setup.py install .. note:: If you are installing to a system Python you may need to use ``sudo``. If you do not want to install ``osrf_pycommon`` into your system Python, or you don't have access to ``sudo``, then you can use a `virtualenv `_. Hacking ------- Because ``osrf_pycommon`` uses `setuptools `_ you can (and should) use the `develop `_ feature: .. code-block:: bash $ python setup.py develop .. note:: If you are developing against the system Python, you may need ``sudo``. This will "install" ``osrf_pycommon`` to your Python path, but rather than copying the source files, it will instead place a marker file in the ``PYTHONPATH`` redirecting Python to your source directory. This allows you to use it as if it were installed but where changes to the source code take immediate affect. When you are done with develop mode you can (and should) undo it like this: .. code-block:: bash $ python setup.py develop -u .. note:: If you are developing against the system Python, you may need ``sudo``. That will "uninstall" the hooks into the ``PYTHONPATH`` which point to your source directory, but you should be wary that sometimes console scripts do not get removed from the bin folder. Testing ------- In order to run the tests you will need to install `flake8 `_. Once you have installed those, then run ``unittest``: .. code-block:: bash $ python3 -m unittest discover -v tests Building the Documentation -------------------------- In order to build the docs you will need to first install `Sphinx `_. You can build the documentation by invoking the Sphinx provided make target in the ``docs`` folder: .. code-block:: bash $ # In the docs folder $ make html $ open _build/html/index.html Sometimes Sphinx does not pickup on changes to modules in packages which utilize the ``__all__`` mechanism, so on repeat builds you may need to clean the docs first: .. code-block:: bash $ # In the docs folder $ make clean $ make html $ open _build/html/index.html osrf_pycommon-2.1.4/docs/make.bat000066400000000000000000000150731447070040300167520ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\osrf_pycommon.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\osrf_pycommon.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end osrf_pycommon-2.1.4/docs/process_utils.rst000066400000000000000000000105271447070040300207740ustar00rootroot00000000000000The ``process_utils`` Module ============================= This module provides functions for doing process management. These are the main sections of this module: - `Asynchronous Process Utilities`_ - `Synchronous Process Utilities`_ - `Utility Functions`_ Asynchronous Process Utilities ------------------------------ There is a function and class which can be used together with your custom `asyncio `_ run loop. The :py:func:`osrf_pycommon.process_utils.async_execute_process` function is a `coroutine `_ which allows you to run a process and get the output back bit by bit in real-time, either with stdout and stderr separated or combined. This function also allows you to emulate the terminal using a pty simply by toggling a flag in the parameters. Along side this coroutine is a `Protocol `_ class, :py:class:`osrf_pycommon.process_utils.AsyncSubprocessProtocol`, from which you can inherit in order to customize how the yielded output is handled. Because this coroutine is built on the ``asyncio`` framework's subprocess functions, it is portable and should behave the same on all major OS's. (including on Windows where an IOCP implementation is used) .. autofunction:: osrf_pycommon.process_utils.async_execute_process .. autoclass:: osrf_pycommon.process_utils.AsyncSubprocessProtocol :members: In addtion to these functions, there is a utility function for getting the correct ``asyncio`` event loop: .. autofunction:: osrf_pycommon.process_utils.get_loop Treatment of File Descriptors ----------------------------- Like Python 3.4's ``subprocess.Popen`` (and newer versions), all of the ``process_utils`` functions do not close `inheritable ` file descriptors before starting subprocesses. This is equivalent to passing ``close_fds=False`` to ``subprocess.Popen`` on all Python versions. For historical context, in Python 3.2, the ``subprocess.Popen`` default for the ``close_fds`` option changed from ``False`` to ``True`` so that file descriptors opened by the parent process were closed before spawning the child process. In Python 3.4, `PEP 0446 `_ additionally made it so even when ``close_fds=False`` file descriptors which are `non-inheritable `_ are still closed before spawning the subprocess. If you want to be able to pass file descriptors to subprocesses in Python 3.4 or higher, you will need to make sure they are `inheritable `. Synchronous Process Utilities ----------------------------- For synchronous execution and output capture of subprocess, there are two functions: - :py:func:`osrf_pycommon.process_utils.execute_process` - :py:func:`osrf_pycommon.process_utils.execute_process_split` These functions are not yet using the ``asyncio`` framework as a back-end and therefore on Windows will not stream the data from the subprocess as it does on Unix machines. Instead data will not be yielded until the subprocess is finished and all output is buffered (the normal warnings about long running programs with lots of output apply). The streaming of output does not work on Windows because on Windows the :py:func:`select.select` method only works on sockets and not file-like objects which are used with subprocess pipes. ``asyncio`` implements Windows subprocess support by implementing a Proactor event loop based on Window's IOCP API. One future option will be to implement this synchronous style method using IOCP in this module, but another option is to just make synchronous the asynchronous calls, but there are issues with that as well. In the mean time, if you need streaming of output in both Windows and Unix, use the asynchronous calls. .. autofunction:: osrf_pycommon.process_utils.execute_process Availability: Unix (streaming), Windows (blocking) .. autofunction:: osrf_pycommon.process_utils.execute_process_split Availability: Unix (streaming), Windows (blocking) Utility Functions ----------------- Currently there is only one utility function, a Python implementation of the ``which`` shell command. .. autofunction:: osrf_pycommon.process_utils.which osrf_pycommon-2.1.4/docs/terminal_color.rst000066400000000000000000000001701447070040300211000ustar00rootroot00000000000000The ``terminal_color`` Module ============================= .. automodule:: osrf_pycommon.terminal_color :members: osrf_pycommon-2.1.4/docs/terminal_utils.rst000066400000000000000000000001701447070040300211220ustar00rootroot00000000000000The ``terminal_utils`` Module ============================= .. automodule:: osrf_pycommon.terminal_utils :members: osrf_pycommon-2.1.4/osrf_pycommon/000077500000000000000000000000001447070040300173015ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/__init__.py000066400000000000000000000000001447070040300214000ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/cli_utils/000077500000000000000000000000001447070040300212705ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/cli_utils/__init__.py000066400000000000000000000000001447070040300233670ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/cli_utils/common.py000066400000000000000000000141661447070040300231420ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Commonly used, CLI related functions.""" import re def extract_jobs_flags(arguments): """Extracts make job flags from a list of other make flags, i.e. -j8 -l8 The input arguments are given as a string separated by whitespace. Make job flags are matched and removed from the arguments, and the Make job flags and what is left over from the input arguments are returned. If no job flags are encountered, then an empty string is returned as the first element of the returned tuple. Examples: .. code-block:: python >> extract_jobs_flags('-j8 -l8') ('-j8 -l8', '') >> extract_jobs_flags('-j8 ') ('-j8', ' ') >> extract_jobs_flags('target -j8 -l8 --some-option') ('-j8 -l8', 'target --some-option') >> extract_jobs_flags('target --some-option') ('', 'target --some-option') :param str arguments: string of space separated arguments which may or may not contain make job flags :returns: tuple of make jobs flags as a space separated string and leftover arguments as a space separated string :rtype: tuple """ regex = ( r'(?:^|\s)(-?(?:j|l)(?:\s*[0-9]+|\s|$))' r'|' r'(?:^|\s)((?:--)?(?:jobs|load-average)(?:(?:=|\s+)[0-9]+|(?:\s|$)))' ) matches = [] leftover = '' last_match_end = 0 for match in re.finditer(regex, arguments) or []: matches.append(match.groups()[0] or match.groups()[1]) leftover += arguments[last_match_end:match.start()] last_match_end = match.end() leftover += arguments[last_match_end:] return ' '.join([m.strip() for m in matches]), leftover def extract_argument_group(args, delimiting_option): """Extract a group of arguments from a list of arguments using a delimiter. Here is an example: .. code-block:: python >>> extract_argument_group(['foo', '--args', 'bar', '--baz'], '--args') (['foo'], ['bar', '--baz']) The group can always be ended using the double hyphen ``--``. In order to pass a double hyphen as arguments, use three hyphens ``---``. Any set of hyphens encountered after the delimiter, and up to ``--``, which have three or more hyphens and are isolated, will be captured and reduced by one hyphen. For example: .. code-block:: python >> extract_argument_group(['foo', '--args', 'bar', '--baz', '---', '--', '--foo-option'], '--args') (['foo', '--foo-option'], ['bar', '--baz', '--']) In the result the ``--`` comes from the ``---`` in the input. The ``--args`` and the corresponding ``--`` are removed entirely. The delimiter and ``--`` terminator combination can also happen multiple times, in which case the bodies of arguments are combined and returned in the order they appeared. For example: .. code-block:: python >> extract_argument_group(['foo', '--args', 'ping', '--', 'bar', '--args', 'pong', '--', 'baz', '--args', '--'], '--args') (['foo', 'bar', 'baz'], ['ping', 'pong']) Note: ``--`` cannot be used as the ``delimiting_option``. :param list args: list of strings which are ordered arguments. :param str delimiting_option: option which denotes where to split the args. :returns: tuple of arguments before and after the delimiter. :rtype: tuple :raises: ValueError if the delimiting_option is ``--``. """ if delimiting_option == '--': raise ValueError("Cannot use '--' as the delimiter") if delimiting_option not in args: return args, [] trimmed_args = args extracted_args = [] # Loop through all arguments extracting groups of arguments while True: try: next_delimiter = trimmed_args.index(delimiting_option) except ValueError: # No delimiter's left in the arguments, stop looking break # Capture and remove args after the delimiter tail = trimmed_args[next_delimiter + 1:] trimmed_args = trimmed_args[:next_delimiter] # Look for a terminator, '--' next_terminator = None try: next_terminator = tail.index('--') except ValueError: pass if next_terminator is None: # No terminator, put all args in extracted_args and stop looking extracted_args.extend(tail) break else: # Terminator found, put args up, but not including terminator # in extracted_args extracted_args.extend(tail[:next_terminator]) # And put arguments after the terminator back in trimmed_args # then continue looking for additional delimiters trimmed_args.extend(tail[next_terminator + 1:]) # Iterate through extracted args and shorted tokens with 3+ -'s only for i, token in enumerate(extracted_args): # '--' should have been removed from extracted_args in the above loop assert token != '--', "this shouldn't happen" # Skip single hyphens if token == '-': continue # Check for non-hyphen characters if [c for c in token if c != '-']: # contains something other than -, continue continue # Must be only hyphens with more than two, Shorted by one - extracted_args[i] = token[1:] return trimmed_args, extracted_args osrf_pycommon-2.1.4/osrf_pycommon/cli_utils/verb_pattern.py000066400000000000000000000174351447070040300243470ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """API for implementing commands and verbs which used the verb pattern.""" import sys import inspect try: import importlib.metadata as importlib_metadata except ModuleNotFoundError: import importlib_metadata def call_prepare_arguments(func, parser, sysargs=None): """Call a prepare_arguments function with the correct number of parameters. The ``prepare_arguments`` function of a verb can either take one parameter, ``parser``, or two parameters ``parser`` and ``args``, where ``args`` are the current arguments being processed. :param func: Callable ``prepare_arguments`` function. :type func: Callable :param parser: parser which is always passed to the function :type parser: :py:class:`argparse.ArgumentParser` :param sysargs: arguments to optionally pass to the function, if needed :type sysargs: list :returns: return value of function or the parser if the function returns None. :rtype: :py:class:`argparse.ArgumentParser` :raises: ValueError if a function with the wrong number of parameters is given """ func_args = [parser] # If the provided function takes two arguments and args were given # also give the args to the function # Remove the following if condition and keep else condition once Xenial is # dropped if sys.version_info[0] < 3: arguments, _, _, defaults = inspect.getargspec(func) else: arguments, _, _, defaults, _, _, _ = inspect.getfullargspec(func) if arguments[0] == 'self': del arguments[0] if defaults: arguments = arguments[:-len(defaults)] if len(arguments) not in [1, 2]: # Remove the following if condition once Xenial is dropped if sys.version_info[0] < 3: raise ValueError("Given function '{0}' must have one or two " "parameters (excluding self), but got '{1}' " "parameters: '{2}'" .format(func.__name__, len(arguments), ', '.join(inspect.getargspec(func)[0]))) raise ValueError("Given function '{0}' must have one or two " "parameters (excluding self), but got '{1}' " "parameters: '{2}'" .format(func.__name__, len(arguments), ', '.join(inspect.getfullargspec(func)[0]))) if len(arguments) == 2: func_args.append(sysargs or []) return func(*func_args) or parser def create_subparsers(parser, cmd_name, verbs, group, sysargs, title=None): """Creates argparse subparsers for each verb which can be discovered. Using the ``verbs`` parameter, the available verbs are iterated through. For each verb a subparser is created for it using the ``parser`` parameter. The ``cmd_name`` is used to fill the title and description of the ``add_subparsers`` function call. The ``group`` parameter is used with each verb to load the verb's ``description``, ``prepare_arguments`` function, and the verb's ``argument_preprocessors`` if available. Each verb's ``prepare_arguments`` function is called, allowing them to add arguments. Finally a list of ``argument_preprocessors`` functions and verb subparsers are returned, one for each verb. :param parser: parser for this command :type parser: :py:class:`argparse.ArgumentParser` :param str cmd_name: name of the command to which the verbs are being added :param list verbs: list of verbs (by name as a string) :param str group: name of the ``entry_point`` group for the verbs :param list sysargs: list of system arguments :param str title: optional custom title for the command :returns: tuple of argument_preprocessors and verb subparsers :rtype: tuple """ metavar = '[' + ' | '.join(verbs) + ']' subparser = parser.add_subparsers( title=title or '{0} command'.format(cmd_name), metavar=metavar, description='Call `{0} {1} -h` for help on a each verb.'.format( cmd_name, metavar), dest='verb' ) argument_preprocessors = {} verb_subparsers = {} for verb in verbs: desc = load_verb_description(verb, group) cmd_parser = subparser.add_parser( desc['verb'], description=desc['description']) cmd_parser = call_prepare_arguments( desc['prepare_arguments'], cmd_parser, sysargs, ) cmd_parser.set_defaults(main=desc['main']) if 'argument_preprocessor' in desc: argument_preprocessors[verb] = desc['argument_preprocessor'] else: argument_preprocessors[verb] = default_argument_preprocessor verb_subparsers[verb] = cmd_parser return argument_preprocessors, verb_subparsers def default_argument_preprocessor(args): """Return unmodified args and an empty dict for extras""" extras = {} return args, extras def list_verbs(group): """List verbs available for a given ``entry_point`` group. :param str group: ``entry_point`` group name for the verbs to list :returns: list of verb names for the given ``entry_point`` group :rtype: list of str """ verbs = [] entry_points = importlib_metadata.entry_points() if hasattr(entry_points, 'select'): groups = entry_points.select(group=group) else: groups = entry_points.get(group, []) for entry_point in groups: verbs.append(entry_point.name) return verbs def load_verb_description(verb_name, group): """Load description of a verb in a given group by name. :param str verb_name: name of the verb to load, as a string :param str group: ``entry_point`` group name which the verb is in :returns: verb description :rtype: dict """ entry_points = importlib_metadata.entry_points() if hasattr(entry_points, 'select'): groups = entry_points.select(group=group) else: groups = entry_points.get(group, []) for entry_point in groups: if entry_point.name == verb_name: return entry_point.load() def split_arguments_by_verb(arguments): """Split arguments by verb. Given a list of arguments (list of strings), the verb, the pre verb arguments, and the post verb arguments are returned. For example: .. code-block:: python >>> args = ['--command-arg1', 'verb', '--verb-arg1', '--verb-arg2'] >>> split_arguments_by_verb(args) ('verb', ['--command-arg1'], ['--verb-arg1', '--verb-arg2']) :param list arguments: list of system arguments :returns: the verb (str), pre verb args (list), and post verb args (list) :rtype: tuple """ verb = None pre_verb_args = [] post_verb_args = [] for index, arg in enumerate(arguments): # If the arg does not start with a `-` then it is a positional argument # The first positional argument must be the verb if not arg.startswith('-'): verb = arg post_verb_args = arguments[index + 1:] break # Otherwise it is a pre-verb option pre_verb_args.append(arg) return verb, pre_verb_args, post_verb_args osrf_pycommon-2.1.4/osrf_pycommon/process_utils/000077500000000000000000000000001447070040300221775ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/process_utils/__init__.py000066400000000000000000000023051447070040300243100ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module provides functions for doing process management. The documentation for this module has a custom layout in process_utils.rst. """ from .async_execute_process import async_execute_process from .async_execute_process import asyncio from .async_execute_process import AsyncSubprocessProtocol from .async_execute_process import get_loop from .impl import execute_process from .impl import execute_process_split from .impl import which __all__ = [ 'async_execute_process', 'asyncio', 'AsyncSubprocessProtocol', 'get_loop', 'execute_process', 'execute_process_split', 'which', ] osrf_pycommon-2.1.4/osrf_pycommon/process_utils/async_execute_process.py000066400000000000000000000214401447070040300271470ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import sys from .async_execute_process_asyncio import async_execute_process from .async_execute_process_asyncio import get_loop from .async_execute_process_asyncio import asyncio __all__ = [ 'async_execute_process', 'AsyncSubprocessProtocol', 'get_loop', ] async_execute_process.__doc__ = """ Coroutine to execute a subprocess and yield the output back asynchronously. This function is meant to be used with the Python :py:mod:`asyncio` module, which is available in Python 3.5 or greater. Here is an example of how to use this function: .. code-block:: python import asyncio from osrf_pycommon.process_utils import async_execute_process from osrf_pycommon.process_utils import AsyncSubprocessProtocol from osrf_pycommon.process_utils import get_loop async def setup(): transport, protocol = await async_execute_process( AsyncSubprocessProtocol, ['ls', '/usr']) returncode = await protocol.complete return returncode retcode = get_loop().run_until_complete(setup()) get_loop().close() Tthe first argument is the default :py:class:`AsyncSubprocessProtocol` protocol class, which simply prints output from stdout to stdout and output from stderr to stderr. If you want to capture and do something with the output or write to the stdin, then you need to subclass from the :py:class:`AsyncSubprocessProtocol` class, and override the ``on_stdout_received``, ``on_stderr_received``, and ``on_process_exited`` functions. See the documentation for the :py:class:`AsyncSubprocessProtocol` class for more details, but here is an example which uses asyncio from Python 3.5: .. code-block:: python import asyncio from osrf_pycommon.process_utils import async_execute_process from osrf_pycommon.process_utils import AsyncSubprocessProtocol from osrf_pycommon.process_utils import get_loop class MyProtocol(AsyncSubprocessProtocol): def __init__(self, file_name, **kwargs): self.fh = open(file_name, 'w') AsyncSubprocessProtocol.__init__(self, **kwargs) def on_stdout_received(self, data): # Data has line endings intact, but is bytes in Python 3 self.fh.write(data.decode('utf-8')) def on_stderr_received(self, data): self.fh.write(data.decode('utf-8')) def on_process_exited(self, returncode): self.fh.write("Exited with return code: {0}".format(returncode)) self.fh.close() async def log_command_to_file(cmd, file_name): def create_protocol(**kwargs): return MyProtocol(file_name, **kwargs) transport, protocol = await async_execute_process( create_protocol, cmd) returncode = await protocol.complete return returncode get_loop().run_until_complete( log_command_to_file(['ls', '/'], '/tmp/out.txt')) get_loop().close() See the :py:class:`subprocess.Popen` class for more details on some of the parameters to this function like ``cwd``, ``env``, and ``shell``. See the :py:func:`osrf_pycommon.process_utils.execute_process` function for more details on the ``emulate_tty`` parameter. :param protocol_class: Protocol class which handles subprocess callbacks :type protocol_class: :py:class:`AsyncSubprocessProtocol` or a subclass :param list cmd: list of arguments where the executable is the first item :param str cwd: directory in which to run the command :param dict env: a dictionary of environment variable names to values :param bool shell: if True, the ``cmd`` variable is interpreted by a the shell :param bool emulate_tty: if True, pty's are passed to the subprocess for stdout and stderr, see :py:func:`osrf_pycommon.process_utils.execute_process`. :param bool stderr_to_stdout: if True, stderr is directed to stdout, so they are not captured separately. """ class AsyncSubprocessProtocol(asyncio.SubprocessProtocol): """ Protocol to subclass to get events from :py:func:`async_execute_process`. When subclassing this Protocol class, you should override these functions: .. code-block:: python def on_stdout_received(self, data): # ... def on_stderr_received(self, data): # ... def on_process_exited(self, returncode): # ... By default these functions just print the data received from stdout and stderr and does nothing when the process exits. Data received by the ``on_stdout_received`` and ``on_stderr_received`` functions is always in ``bytes``. Therefore, it may be necessary to call ``.decode()`` on the data before printing to the screen. Additionally, the data received will not be stripped of new lines, so take that into consideration when printing the result. You can also override these less commonly used functions: .. code-block:: python def on_stdout_open(self): # ... def on_stdout_close(self, exc): # ... def on_stderr_open(self): # ... def on_stderr_close(self, exc): # ... These functions are called when stdout/stderr are opened and closed, and can be useful when using pty's for example. The ``exc`` parameter of the ``*_close`` functions is None unless there was an exception. In addition to the overridable functions this class has a few useful public attributes. The ``stdin`` attribute is a reference to the PipeProto which follows the :py:class:`asyncio.WriteTransport` interface. The ``stdout`` and ``stderr`` attributes also reference their PipeProto. The ``complete`` attribute is a :py:class:`asyncio.Future` which is set to complete when the process exits and its result is the return code. The ``complete`` attribute can be used like this: .. code-block:: python import asyncio from osrf_pycommon.process_utils import async_execute_process from osrf_pycommon.process_utils import AsyncSubprocessProtocol from osrf_pycommon.process_utils import get_loop async def setup(): transport, protocol = await async_execute_process( AsyncSubprocessProtocol, ['ls', '-G', '/usr']) retcode = await protocol.complete print("Exited with", retcode) # This will block until the protocol.complete Future is done. get_loop().run_until_complete(setup()) get_loop().close() """ def __init__(self, stdin=None, stdout=None, stderr=None): self.stdin = stdin self.stdout = stdout self.stderr = stderr self.complete = asyncio.Future() asyncio.SubprocessProtocol.__init__(self) def connection_made(self, transport): self.transport = transport if self.stdin is None: self.stdin = self.transport.get_pipe_transport(0) if self.stdout is None: self.stdout = self.transport.get_pipe_transport(1) if self.stderr is None: self.stderr = self.transport.get_pipe_transport(2) def pipe_data_received(self, fd, data): # This function is only called when pty's are not being used stdout = self.stdout if not isinstance(stdout, int): stdout = 1 if fd == stdout: if hasattr(self, 'on_stdout_received'): self.on_stdout_received(data) else: assert fd == 2 if hasattr(self, 'on_stderr_received'): self.on_stderr_received(data) def _on_stdout_received(self, data): # print(data.__repr__()) print(data.decode(), end='') def _on_stderr_received(self, data): # print(data.__repr__(), file=sys.stderr) print(data.decode(), end='', file=sys.stderr) def process_exited(self): retcode = self.transport.get_returncode() self.complete.set_result(retcode) self.on_process_exited(retcode) def on_process_exited(self, returncode): # print("Exited with", returncode) pass get_loop.__doc__ = """\ This function will return the proper event loop for the subprocess async calls. On Unix this just returns :py:func:`asyncio.get_event_loop`, but on Windows it will set and return a :py:class:`asyncio.ProactorEventLoop` instead. """ osrf_pycommon-2.1.4/osrf_pycommon/process_utils/async_execute_process_asyncio/000077500000000000000000000000001447070040300303215ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/process_utils/async_execute_process_asyncio/__init__.py000066400000000000000000000020351447070040300324320ustar00rootroot00000000000000# Copyright 2016 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. try: try: import asyncio except ImportError: pass else: from .impl import async_execute_process from .impl import get_loop __all__ = [ 'async_execute_process', 'asyncio', 'get_loop', ] except SyntaxError: pass """ This exists as a package rather than a module so that it can be excluded from install in the setup.py when installing for Python2. """ osrf_pycommon-2.1.4/osrf_pycommon/process_utils/async_execute_process_asyncio/impl.py000066400000000000000000000116561447070040300316450ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import os try: import pty has_pty = True except ImportError: has_pty = False from ..get_loop_impl import get_loop_impl def get_loop(): return get_loop_impl(asyncio) async def _async_execute_process_nopty( protocol_class, cmd, cwd, env, shell, stderr_to_stdout=True ): loop = get_loop() stderr = asyncio.subprocess.PIPE if stderr_to_stdout is True: stderr = asyncio.subprocess.STDOUT # Start the subprocess if shell is True: transport, protocol = await loop.subprocess_shell( protocol_class, " ".join(cmd), cwd=cwd, env=env, stderr=stderr, close_fds=False) else: transport, protocol = await loop.subprocess_exec( protocol_class, *cmd, cwd=cwd, env=env, stderr=stderr, close_fds=False) return transport, protocol if has_pty: # If pty is availabe, use them to emulate the tty async def _async_execute_process_pty( protocol_class, cmd, cwd, env, shell, stderr_to_stdout=True ): loop = get_loop() # Create the PTY's stdout_master, stdout_slave = pty.openpty() if stderr_to_stdout: stderr_master, stderr_slave = stdout_master, stdout_slave else: stderr_master, stderr_slave = pty.openpty() def protocol_factory(): return protocol_class( stdin=None, stdout=stdout_master, stderr=stderr_master ) # Start the subprocess if shell is True: transport, protocol = await loop.subprocess_shell( protocol_factory, " ".join(cmd), cwd=cwd, env=env, stdout=stdout_slave, stderr=stderr_slave, close_fds=False) else: transport, protocol = await loop.subprocess_exec( protocol_factory, *cmd, cwd=cwd, env=env, stdout=stdout_slave, stderr=stderr_slave, close_fds=False) # Close our copies of the slaves, # the child's copy of the slave remain open until it terminates os.close(stdout_slave) if not stderr_to_stdout: os.close(stderr_slave) # Create Protocol classes class PtyStdoutProtocol(asyncio.Protocol): def connection_made(self, transport): if hasattr(protocol, 'on_stdout_open'): protocol.on_stdout_open() def data_received(self, data): if hasattr(protocol, 'on_stdout_received'): protocol.on_stdout_received(data) def connection_lost(self, exc): if hasattr(protocol, 'on_stdout_close'): protocol.on_stdout_close(exc) class PtyStderrProtocol(asyncio.Protocol): def connection_made(self, transport): if hasattr(protocol, 'on_stderr_open'): protocol.on_stderr_open() def data_received(self, data): if hasattr(protocol, 'on_stderr_received'): protocol.on_stderr_received(data) def connection_lost(self, exc): if hasattr(protocol, 'on_stderr_close'): protocol.on_stderr_close(exc) # Add the pty's to the read loop # Also store the transport, protocol tuple for each call to # connect_read_pipe, to prevent the destruction of the protocol # class instance, otherwise no data is received. protocol.stdout_tuple = await loop.connect_read_pipe( PtyStdoutProtocol, os.fdopen(stdout_master, 'rb', 0)) if not stderr_to_stdout: protocol.stderr_tuple = await loop.connect_read_pipe( PtyStderrProtocol, os.fdopen(stderr_master, 'rb', 0)) # Return the protocol and transport return transport, protocol else: _async_execute_process_pty = _async_execute_process_nopty async def async_execute_process( protocol_class, cmd=None, cwd=None, env=None, shell=False, emulate_tty=False, stderr_to_stdout=True ): if emulate_tty: transport, protocol = await _async_execute_process_pty( protocol_class, cmd, cwd, env, shell, stderr_to_stdout) else: transport, protocol = await _async_execute_process_nopty( protocol_class, cmd, cwd, env, shell, stderr_to_stdout) return transport, protocol osrf_pycommon-2.1.4/osrf_pycommon/process_utils/execute_process_nopty.py000066400000000000000000000130411447070040300272010ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import errno import os import select from subprocess import PIPE from subprocess import Popen from subprocess import STDOUT import sys _is_linux = sys.platform.lower().startswith('linux') _is_windows = sys.platform.lower().startswith('win') def _process_incoming_lines(incoming, left_over): # This function takes the new data, the left over data from last time # and returns a list of complete lines (separated by sep) as well as # any sep trailing data for the next iteration # This function takes and returns bytes only combined = (left_over + incoming) lines = combined.splitlines(True) if not lines: return None, left_over # Use splitlines because it is magic # comparing against os.linesep is not sufficient if lines[-1].splitlines() != [lines[-1]]: data = b''.join(lines) left_over = b'' else: data = b''.join(lines[:-1]) left_over = lines[-1] return data, left_over def _close_fds(fds_to_close): # This function is used to close (if not already closed) any fds used for s in fds_to_close: if s is None: continue try: os.close(s) except OSError as exc: # This could raise "OSError: [Errno 9] Bad file descriptor" # If it has already been closed, but that's ok if "Bad file descriptor" not in "{0}".format(exc): raise def _yield_data(p, fds, left_overs, linesep, fds_to_close=None): # This function uses select and subprocess.Popen.poll to collect out # from a subprocess until it has finished, yielding it as it goes fds_to_close = [] if fds_to_close is None else fds_to_close def yield_to_stream(data, stream): if stream == fds[0]: return data, None, None else: return None, data, None try: while p.poll() is None: # If Windows if _is_windows: for stream in fds: # This will not produce the best results, but at least # it will function on Windows. A True IOCP implementation # would be required to get streaming from Windows streams. data = stream.readline() if data: yield yield_to_stream(data, stream) continue # Otherwise Unix try: rlist, wlist, xlist = select.select(fds, [], []) except select.error as exc: # Ignore EINTR try: errnum = exc.errno except AttributeError: errnum = exc[0] if errnum == errno.EINTR: continue raise for stream in rlist: left_over = left_overs[stream] fileno = getattr(stream, 'fileno', lambda: stream)() try: incoming = os.read(fileno, 1024) except OSError as exc: # On Linux, when using a pty, in order to get select # to return when the subprocess finishes, os.close # must be called on the slave pty fd after forking # the subprocess with popen. On some versions of # the Linux kernel this causes an Errno 5 OSError, # "Input/output error". Therefore, I am explicitly # catching and passing on this error. In my testing # this error does not occur repeatedly (it does not # become a busy wait). See: # http://stackoverflow.com/a/12207447/671658 if _is_linux and "Input/output error" in "{0}".format(exc): continue raise if not incoming: # In this case, EOF has been reached, see docs for os.read if left_over: yield yield_to_stream(left_over, stream) continue data, left_over = _process_incoming_lines(incoming, left_over) left_overs[stream] = left_over yield yield_to_stream(data, stream) # Done yield None, None, p.returncode finally: # Make sure we don't leak file descriptors _close_fds(fds_to_close) def _execute_process_nopty(cmd, cwd, env, shell, stderr_to_stdout=True): if stderr_to_stdout: p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT, cwd=cwd, env=env, shell=shell, close_fds=False) else: p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env, shell=shell, close_fds=False) # Left over data from read which isn't a complete line yet left_overs = {p.stdout: b'', p.stderr: b''} fds = list(filter(None, [p.stdout, p.stderr])) return _yield_data(p, fds, left_overs, os.linesep) osrf_pycommon-2.1.4/osrf_pycommon/process_utils/execute_process_pty.py000066400000000000000000000057561447070040300266620ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os try: import pty except ImportError: # to support --cover-inclusive on Windows if os.name not in ['nt']: raise from subprocess import Popen from subprocess import STDOUT import time from .execute_process_nopty import _close_fds from .execute_process_nopty import _yield_data def _execute_process_pty(cmd, cwd, env, shell, stderr_to_stdout=True): stdout_master, stdout_slave = None, None stderr_master, stderr_slave = None, None fds_to_close = [stdout_master, stdout_slave, stderr_master, stderr_slave] try: stdout_master, stdout_slave = pty.openpty() if stderr_to_stdout: stderr_master, stderr_slave = stdout_master, stdout_slave else: stderr_master, stderr_slave = pty.openpty() p = None while p is None: try: p = Popen( cmd, stdin=stdout_slave, stdout=stderr_slave, stderr=STDOUT, cwd=cwd, env=env, shell=shell, close_fds=False) except OSError as exc: # This can happen if a file you are trying to execute is being # written to simultaneously on Linux # (doesn't appear to happen on OS X) # It seems like the best strategy is to just try again later # Worst case is that the file eventually gets deleted, then a # different OSError would occur. if 'Text file busy' in '{0}'.format(exc): # This is a transient error, try again shortly time.sleep(0.01) continue raise # This causes the below select to exit when the subprocess closes. # On Linux, this sometimes causes Errno 5 OSError's when os.read # is called from within _yield_data, so on Linux _yield_data # catches and passes on that particular OSError. os.close(stdout_slave) if not stderr_to_stdout: os.close(stderr_slave) left_overs = {stdout_master: b'', stderr_master: b''} fds = [stdout_master] if stderr_master != stdout_master: fds.append(stderr_master) finally: # Make sure we don't leak file descriptors _close_fds(fds_to_close) # The linesep with pty's always seems to be "\r\n", even on OS X return _yield_data(p, fds, left_overs, "\r\n", fds_to_close) osrf_pycommon-2.1.4/osrf_pycommon/process_utils/get_loop_impl.py000066400000000000000000000047771447070040300254210ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import threading import warnings _thread_local = threading.local() def get_loop_impl(asyncio): # See the note in # https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.get_event_loop # noqa # In short, between Python 3.10.0 and 3.10.8, this unconditionally raises a # DeprecationWarning. But after 3.10.8, it only raises the warning if # ther eis no current loop set in the policy. Since we are setting a loop # in the policy, this warning is spurious, and will go away once we get # away from Python 3.10.6 (verified with Python 3.11.3). with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', 'There is no current event loop', DeprecationWarning) global _thread_local if getattr(_thread_local, 'loop_has_been_setup', False): return asyncio.get_event_loop() # Setup this thread's loop and return it if os.name == 'nt': try: loop = asyncio.get_event_loop() if not isinstance(loop, asyncio.ProactorEventLoop): # Before replacing the existing loop, explicitly # close it to prevent an implicit close during # garbage collection, which may or may not be a # problem depending on the loop implementation. loop.close() loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) except (RuntimeError, AssertionError): loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) else: try: loop = asyncio.get_event_loop() except (RuntimeError, AssertionError): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) _thread_local.loop_has_been_setup = True return loop osrf_pycommon-2.1.4/osrf_pycommon/process_utils/impl.py000066400000000000000000000304501447070040300235140ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys from .execute_process_nopty import _execute_process_nopty try: from .execute_process_pty import _execute_process_pty except ImportError: # pty doesn't work on Windows, it will fail to import # so fallback to non pty implementation _execute_process_pty = None def execute_process(cmd, cwd=None, env=None, shell=False, emulate_tty=False): """Executes a command with arguments and returns output line by line. All arguments, except ``emulate_tty``, are passed directly to :py:class:`subprocess.Popen`. ``execute_process`` returns a generator which yields the output, line by line, until the subprocess finishes at which point the return code is yielded. This is an example of how this function should be used: .. code-block:: python from __future__ import print_function from osrf_pycommon.process_utils import execute_process cmd = ['ls', '-G'] for line in execute_process(cmd, cwd='/usr'): if isinstance(line, int): # This is a return code, the command has exited print("'{0}' exited with: {1}".format(' '.join(cmd), line)) continue # break would also be appropriate here # In Python 3, it will be a bytes array which needs to be decoded if not isinstance(line, str): line = line.decode('utf-8') # Then print it to the screen print(line, end='') ``stdout`` and ``stderr`` are always captured together and returned line by line through the returned generator. New line characters are preserved in the output, so if re-printing the data take care to use ``end=''`` or first ``rstrip`` the output lines. When ``emulate_tty`` is used on Unix systems, commands will identify that they are on a tty and should output color to the screen as if you were running it on the terminal, and therefore there should not be any need to pass arguments like ``-c color.ui=always`` to commands like ``git``. Additionally, programs might also behave differently in when ``emulate_tty`` is being used, for example, Python will default to unbuffered output when it detects a tty. ``emulate_tty`` works by using psuedo-terminals on Unix machines, and so if you are running this command many times in parallel (like hundreds of times) then you may get one of a few different :py:exc:`OSError`'s. For example, "OSError: [Errno 24] Too many open files: '/dev/ttyp0'" or "OSError: out of pty devices". You should also be aware that you share pty devices with the rest of the system, so even if you are not using a lot, it is possible to get this error. You can catch this error before getting data from the generator, so when using ``emulate_tty`` you might want to do something like this: .. code-block:: python from __future__ import print_function from osrf_pycommon.process_utils import execute_process cmd = ['ls', '-G', '/usr'] try: output = execute_process(cmd, emulate_tty=True) except OSError: output = execute_process(cmd, emulate_tty=False) for line in output: if isinstance(line, int): print("'{0}' exited with: {1}".format(' '.join(cmd), line)) continue # In Python 3, it will be a bytes array which needs to be decoded if not isinstance(line, str): line = line.decode('utf-8') print(line, end='') This way if a pty cannot be opened in order to emulate the tty then you can try again without emulation, and any other :py:exc:`OSError` should raise again with ``emulate_tty`` set to ``False``. Obviously, you only want to do this if emulating the tty is non-critical to your processing, like when you are using it to capture color. Any color information that the command outputs as ANSI escape sequences is captured by this command. That way you can print the output to the screen and preserve the color formatting. If you do not want color to be in the output, then try setting ``emulate_tty`` to ``False``, but that does not guarantee that there is no color in the output, instead it only will cause called processes to identify that they are not being run in a terminal. Most well behaved programs will not output color if they detect that they are not being executed in a terminal, but you shouldn't rely on that. If you want to ensure there is no color in the output from an executed process, then use this function: :py:func:`osrf_pycommon.terminal_color.remove_ansi_escape_sequences` Exceptions can be raised by functions called by the implementation, for example, :py:class:`subprocess.Popen` can raise an :py:exc:`OSError` when the given command is not found. If you want to check for the existence of an executable on the path, see: :py:func:`which`. However, this function itself does not raise any special exceptions. :param list cmd: list of strings with the first item being a command and subsequent items being any arguments to that command; passed directly to :py:class:`subprocess.Popen`. :param str cwd: path in which to run the command, defaults to None which means :py:func:`os.getcwd` is used; passed directly to :py:class:`subprocess.Popen`. :param dict env: environment dictionary to use for executing the command, default is None which uses the :py:obj:`os.environ` environment; passed directly to :py:class:`subprocess.Popen`. :param bool shell: If True the system shell is used to evaluate the command, default is False; passed directly to :py:class:`subprocess.Popen`. :param bool emulate_tty: If True attempts to use a pty to convince subprocess's that they are being run in a terminal. Typically this is useful for capturing colorized output from commands. This does not work on Windows (no pty's), so it is considered False even when True. Defaults to False. :returns: a generator which yields output from the command line by line :rtype: generator which yields strings """ exp_func = _execute_process_nopty if emulate_tty and _execute_process_pty is not None: exp_func = _execute_process_pty for out, err, ret in exp_func(cmd, cwd, env, shell, stderr_to_stdout=True): if ret is None: yield out continue yield ret def execute_process_split( cmd, cwd=None, env=None, shell=False, emulate_tty=False ): """:py:func:`execute_process`, except ``stderr`` is returned separately. Instead of yielding output line by line until yielding a return code, this function always a triplet of ``stdout``, ``stderr``, and return code. Each time only one of the three will not be None. Once you receive a non-None return code (type will be int) there will be no more ``stdout`` or ``stderr``. Therefore you can use the command like this: .. code-block:: python from __future__ import print_function import sys from osrf_pycommon.process_utils import execute_process_split cmd = ['time', 'ls', '-G'] for out, err, ret in execute_process_split(cmd, cwd='/usr'): # In Python 3, it will be a bytes array which needs to be decoded out = out.decode('utf-8') if out is not None else None err = err.decode('utf-8') if err is not None else None if ret is not None: # This is a return code, the command has exited print("'{0}' exited with: {1}".format(' '.join(cmd), ret)) break if out is not None: print(out, end='') if err is not None: print(err, end='', file=sys.stderr) When using this, it is possible that the ``stdout`` and ``stderr`` data can be returned in a different order than what would happen on the terminal. This is due to the fact that the subprocess is given different buffers for ``stdout`` and ``stderr`` and so there is a race condition on the subprocess writing to the different buffers and this command reading the buffers. This can be avoided in most scenarios by using ``emulate_tty``, because of the use of ``pty``'s, though the ordering can still not be guaranteed and the number of ``pty``'s is finite as explained in the documentation for :py:func:`execute_process`. For situations where output ordering between ``stdout`` and ``stderr`` are critical, they should not be returned separately and instead should share one buffer, and so :py:func:`execute_process` should be used. For all other parameters and documentation see: :py:func:`execute_process` """ exp_func = _execute_process_nopty if emulate_tty and _execute_process_pty is not None: exp_func = _execute_process_pty return exp_func(cmd, cwd, env, shell, stderr_to_stdout=False) try: from shutil import which as _which except ImportError: _which = None def _which_backport(cmd, mode=os.F_OK | os.X_OK, path=None): # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. def _access_check(fn, mode): return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)) # If we're given a path with a directory part, look it up directly rather # than referring to PATH directories. This includes checking relative # to the current directory, e.g. ./script if os.path.dirname(cmd): if _access_check(cmd, mode): return cmd return None if path is None: path = os.environ.get("PATH", os.defpath) if not path: return None path = path.split(os.pathsep) if sys.platform == "win32": # The current directory takes precedence on Windows. if os.curdir not in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. pathext = os.environ.get("PATHEXT", "").split(os.pathsep) # See if the given file matches any of the expected path extensions. # This will allow us to short circuit when given "python.exe". # If it does match, only test that one, otherwise we have to try # others. if any(cmd.lower().endswith(ext.lower()) for ext in pathext): files = [cmd] else: files = [cmd + ext for ext in pathext] else: # On other platforms you don't have things like PATHEXT to tell you # what file suffixes are executable, so just pass on cmd as-is. files = [cmd] seen = set() for directory in path: normdir = os.path.normcase(directory) if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(directory, thefile) if _access_check(name, mode): return name return None def which(cmd, mode=os.F_OK | os.X_OK, path=None, **kwargs): """Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such file. `mode` defaults to ``os.F_OK | os.X_OK``. `path` defaults to the result of ``os.environ.get("PATH")``, or can be overridden with a custom search path. Backported from :py:func:`shutil.which` (``_), available in Python 3.3. """ kwargs.update({'mode': mode, 'path': path}) global _which if _which is not None: return _which(cmd, **kwargs) return _which_backport(cmd, **kwargs) osrf_pycommon-2.1.4/osrf_pycommon/terminal_color/000077500000000000000000000000001447070040300223125ustar00rootroot00000000000000osrf_pycommon-2.1.4/osrf_pycommon/terminal_color/__init__.py000066400000000000000000000166741447070040300244410ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This module provides tools for colorizing terminal output. This module defines the ansi escape sequences used for colorizing the output from terminal programs in Linux. You can access the ansi escape sequences using the :py:func:`ansi` function: .. code-block:: python >>> from osrf_pycommon.terminal_color import ansi >>> print(["This is ", ansi('red'), "red", ansi('reset'), "."]) ['This is ', '\\x1b[31m', 'red', '\\x1b[0m', '.'] You can also use :py:func:`format_color` to do in-line substitution of keys wrapped in ``@{}`` markers for their ansi escape sequences: .. code-block:: python >>> from osrf_pycommon.terminal_color import format_color >>> print(format_color("This is @{bf}blue@{reset}.").split()) ['This', 'is', '\\x1b[34mblue\\x1b[0m.'] This is a list of all of the available substitutions: +-------------------+--------------+--------------+ | Long Form | Shorter | Value | +===================+==============+==============+ | ``@{blackf}`` | ``@{kf}`` | ``\\033[30m`` | +-------------------+--------------+--------------+ | ``@{redf}`` | ``@{rf}`` | ``\\033[31m`` | +-------------------+--------------+--------------+ | ``@{greenf}`` | ``@{gf}`` | ``\\033[32m`` | +-------------------+--------------+--------------+ | ``@{yellowf}`` | ``@{yf}`` | ``\\033[33m`` | +-------------------+--------------+--------------+ | ``@{bluef}`` | ``@{bf}`` | ``\\033[34m`` | +-------------------+--------------+--------------+ | ``@{purplef}`` | ``@{pf}`` | ``\\033[35m`` | +-------------------+--------------+--------------+ | ``@{cyanf}`` | ``@{cf}`` | ``\\033[36m`` | +-------------------+--------------+--------------+ | ``@{whitef}`` | ``@{wf}`` | ``\\033[37m`` | +-------------------+--------------+--------------+ | ``@{blackb}`` | ``@{kb}`` | ``\\033[40m`` | +-------------------+--------------+--------------+ | ``@{redb}`` | ``@{rb}`` | ``\\033[41m`` | +-------------------+--------------+--------------+ | ``@{greenb}`` | ``@{gb}`` | ``\\033[42m`` | +-------------------+--------------+--------------+ | ``@{yellowb}`` | ``@{yb}`` | ``\\033[43m`` | +-------------------+--------------+--------------+ | ``@{blueb}`` | ``@{bb}`` | ``\\033[44m`` | +-------------------+--------------+--------------+ | ``@{purpleb}`` | ``@{pb}`` | ``\\033[45m`` | +-------------------+--------------+--------------+ | ``@{cyanb}`` | ``@{cb}`` | ``\\033[46m`` | +-------------------+--------------+--------------+ | ``@{whiteb}`` | ``@{wb}`` | ``\\033[47m`` | +-------------------+--------------+--------------+ | ``@{escape}`` | | ``\\033`` | +-------------------+--------------+--------------+ | ``@{reset}`` | ``@|`` | ``\\033[0m`` | +-------------------+--------------+--------------+ | ``@{boldon}`` | ``@!`` | ``\\033[1m`` | +-------------------+--------------+--------------+ | ``@{italicson}`` | ``@/`` | ``\\033[3m`` | +-------------------+--------------+--------------+ | ``@{ulon}`` | ``@_`` | ``\\033[4m`` | +-------------------+--------------+--------------+ | ``@{invon}`` | | ``\\033[7m`` | +-------------------+--------------+--------------+ | ``@{boldoff}`` | | ``\\033[22m`` | +-------------------+--------------+--------------+ | ``@{italicsoff}`` | | ``\\033[23m`` | +-------------------+--------------+--------------+ | ``@{uloff}`` | | ``\\033[24m`` | +-------------------+--------------+--------------+ | ``@{invoff}`` | | ``\\033[27m`` | +-------------------+--------------+--------------+ These substitution's values come from the ANSI color escape sequences, see: http://en.wikipedia.org/wiki/ANSI_escape_code Also for any of the keys which have a trailing ``f``, you can safely drop the trailing ``f`` and get the same thing. For example, ``format_color("@{redf}")`` and ``format_color("@{red}")`` are functionally equivalent. Also, many of the substitutions have shorten forms for convenience, such that ``@{redf}``, ``@{rf}``, ``@{red}``, and ``@{r}`` are all the same. Note that a trailing ``b`` is always required when specifying a background. Some of the most common non-color sequences have ``{}``'less versions. For example, ``@{boldon}``'s shorter form is ``@!``. By default, the substitutions (and calls to :py:func:`ansi`) resolve to escape sequences, but if you call :py:func:`disable_ansi_color_substitution_globally` then they will resolve to empty strings. This allows you to always use the substitution strings and disable them globally when desired. On Windows the substitutions are always resolved to empty strings as the ansi escape sequences do not work on Windows. Instead strings annotated with ``@{}`` style substitutions or raw ``\\x1b[xxm`` style ansi escape sequences must be passed to :py:func:`print_color` in order for colors to be displayed on windows. Also the :py:func:`print_ansi_color_win32` function can be used on strings which only contain ansi escape sequences. .. note:: There are existing Python modules like `colorama `_ which provide ansi colorization on multiple platforms, so a valid question is: "why write this module?". The reason for writing this module is to provide the color annotation of strings and functions for removing or replacing ansi escape sequences which are not provided by modules like colorama. This module could have depended on colorama for colorization on Windows, but colorama works by replacing the built-in ``sys.stdout`` and ``sys.stderr``, which we did not want and it has extra functionality that we do not need. So, instead of depending on colorama, the Windows color printing code was used as the inspiration for the Windows color printing in the ``windows.py`` module in this ``terminal_color`` package. The colorama license was placed in the header of that file and the colorama license is compatible with this package's license. """ from .ansi_re import remove_ansi_escape_senquences from .ansi_re import remove_ansi_escape_sequences from .ansi_re import split_by_ansi_escape_sequence from .impl import ansi from .impl import disable_ansi_color_substitution_globally from .impl import enable_ansi_color_substitution_globally from .impl import format_color from .impl import get_ansi_dict from .impl import print_ansi_color_win32 from .impl import print_color from .impl import sanitize from .impl import test_colors __all__ = [ 'ansi', 'disable_ansi_color_substitution_globally', 'enable_ansi_color_substitution_globally', 'format_color', 'get_ansi_dict', 'print_ansi_color_win32', 'print_color', 'remove_ansi_escape_senquences', 'remove_ansi_escape_sequences', 'sanitize', 'split_by_ansi_escape_sequence', 'test_colors', ] osrf_pycommon-2.1.4/osrf_pycommon/terminal_color/ansi_re.py000066400000000000000000000032201447070040300243010ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Provides regular expression functions for matching ansi escape sequences.""" import re _ansi_re = re.compile(r'\033\[\d{1,2}[m]') _ansi_re_group = re.compile(r'(\033\[\d{1,2}[m])') def split_by_ansi_escape_sequence(string, include_delimiters=False): """ Splits a string into a list using any ansi escape sequence as a delimiter. :param string: string to be split :type string: str :param include_delimiters: If True include matched escape sequences in the list (default: False) :type include_delimiters: bool :returns: list of strings, split from original string by escape sequences :rtype: list """ global _ansi_re, _ansi_re_group if include_delimiters: return _ansi_re_group.split(string) return _ansi_re.split(string) def remove_ansi_escape_sequences(string): """ Removes any ansi escape sequences found in the given string and returns it. """ global _ansi_re return _ansi_re.sub('', string) # Backwards compatibility remove_ansi_escape_senquences = remove_ansi_escape_sequences osrf_pycommon-2.1.4/osrf_pycommon/terminal_color/impl.py000066400000000000000000000266301447070040300236340ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import os import string _is_windows = (os.name in ['nt']) if _is_windows: from .windows import _print_ansi_color_win32 _ansi = { # Escape sequence start 'escape': '\x1b', # Reset 'reset': '\x1b[0m', '|': '\x1b[0m', 'atbar': '@|', # Bold on 'boldon': '\x1b[1m', '!': '\x1b[1m', 'atexclamation': '@!', # Bold off 'boldoff': '\x1b[22m', # Italics on 'italicson': '\x1b[3m', '/': '\x1b[3m', 'atfwdslash': '@/', # Intalics off 'italicsoff': '\x1b[23m', # Underline on 'ulon': '\x1b[4m', '_': '\x1b[4m', 'atunderscore': '@_', # Underline off 'uloff': '\x1b[24m', # Invert foreground/background on/off 'invon': '\x1b[7m', 'invoff': '\x1b[27m', # Black foreground 'k': '\x1b[30m', 'kf': '\x1b[30m', 'black': '\x1b[30m', 'blackf': '\x1b[30m', # Black background 'kb': '\x1b[40m', 'blackb': '\x1b[40m', # Blue foreground 'b': '\x1b[34m', 'bf': '\x1b[34m', 'blue': '\x1b[34m', 'bluef': '\x1b[34m', # Blue background 'bb': '\x1b[44m', 'blueb': '\x1b[44m', # Cyan foreground 'c': '\x1b[36m', 'cf': '\x1b[36m', 'cyan': '\x1b[36m', 'cyanf': '\x1b[36m', # Cyan background 'cb': '\x1b[46m', 'cyanb': '\x1b[46m', # Green foreground 'g': '\x1b[32m', 'gf': '\x1b[32m', 'green': '\x1b[32m', 'greenf': '\x1b[32m', # Green background 'gb': '\x1b[42m', 'greenb': '\x1b[42m', # Purple (magenta) foreground 'p': '\x1b[35m', 'pf': '\x1b[35m', 'purple': '\x1b[35m', 'purplef': '\x1b[35m', # Purple (magenta) background 'pb': '\x1b[45m', 'purpleb': '\x1b[45m', # Red foreground 'r': '\x1b[31m', 'rf': '\x1b[31m', 'red': '\x1b[31m', 'redf': '\x1b[31m', # Red background 'rb': '\x1b[41m', 'redb': '\x1b[41m', # White foreground 'w': '\x1b[37m', 'wf': '\x1b[37m', 'white': '\x1b[37m', 'whitef': '\x1b[37m', # White background 'wb': '\x1b[47m', 'whiteb': '\x1b[47m', # Yellow foreground 'y': '\x1b[33m', 'yf': '\x1b[33m', 'yellow': '\x1b[33m', 'yellowf': '\x1b[33m', # Yellow background 'yb': '\x1b[43m', 'yellowb': '\x1b[43m', } # Set all values to empty string _null_ansi = dict([(k, '') for k, v in _ansi.items()]) # Except format preservers used by sanitize _null_ansi.update({ 'atexclamation': '@!', 'atfwdslash': '@/', 'atunderscore': '@_', 'atbar': '@|', }) # Enable by default _enabled = True def ansi(key): """Returns the escape sequence for a given ansi color key.""" global _ansi, _null_ansi, _enabled return (_ansi if _enabled else _null_ansi)[key] def get_ansi_dict(): """Returns a copy of the dictionary of keys and ansi escape sequences.""" global _ansi return dict(_ansi) def enable_ansi_color_substitution_globally(): """ Causes :py:func:`format_color` to replace color annotations with ansi esacpe sequences. It also affects :py:func:`ansi`. This is the case by default, so there is no need to call this everytime. If you have previously caused all substitutions to evaluate to an empty string by calling :py:func:`disable_ansi_color_substitution_globally`, then you can restore the escape sequences for substitutions by calling this function. """ global _enabled _enabled = True def disable_ansi_color_substitution_globally(): """ Causes :py:func:`format_color` to replace color annotations with empty strings. It also affects :py:func:`ansi`. This is not the case by default, so if you want to make all substitutions given to either function mentioned above return empty strings then call this function. The default behavior can be restored by calling :py:func:`enable_ansi_color_substitution_globally`. """ global _enabled _enabled = False def format_color(msg): """ Replaces color annotations with ansi escape sequences. See this module's documentation for the list of available substitutions. If :py:func:`disable_ansi_color_substitution_globally` has been called then all color annotations will be replaced by empty strings. Also, on Windows all color annotations will be replaced with empty strings. If you want colorization on Windows, you must pass annotated strings to :py:func:`print_color`. :param str msg: string message to be colorized :returns: colorized string :rtype: str """ global _ansi, _null_ansi, _enabled ansi_dict = _null_ansi if not _enabled or _is_windows else _ansi return _format_color(msg, ansi_dict) def _format_color(msg, ansi_dict): msg = msg.replace('@!', '@{boldon}') msg = msg.replace('@/', '@{italicson}') msg = msg.replace('@_', '@{ulon}') msg = msg.replace('@|', '@{reset}') class ColorTemplate(string.Template): delimiter = '@' return ColorTemplate(msg).substitute(ansi_dict) def print_ansi_color_win32(*args, **kwargs): """ Prints color string containing ansi escape sequences to console in Windows. If called on a non-Windows system, a :py:exc:`NotImplementedError` occurs. Does not respect :py:func:`disable_ansi_color_substitution_globally`. Does not substitute color annotations like ``@{r}`` or ``@!``, the string must already contain the ``\\033[1m`` style ansi escape sequences. Works by splitting each argument up by ansi escape sequence, printing the text between the sequences, and doing the corresponding win32 action for each ansi sequence encountered. """ if not _is_windows: raise NotImplementedError( "print_ansi_color_win32() is not implemented for this system") return _print_ansi_color_win32(*args, **kwargs) def print_color(*args, **kwargs): """ Colorizes and prints with an implicit ansi reset at the end Calls :py:func:`format_color` on each positional argument and then sends all positional and keyword arguments to :py:obj:`print`. If the ``end`` keyword argument is not present then the default end value ``ansi('reset') + '\\n'`` is used and passed to :py:obj:`print`. :py:obj:`os.linesep` is used to determine the actual value for ``\\n``. Therefore, if you use the ``end`` keyword argument be sure to include an ansi reset escape sequence if necessary. On Windows the substituted arguments and keyword arguments are passed to :py:func:`print_ansi_color_win32` instead of just :py:obj:`print`. """ global _ansi, _null_ansi, _enabled # If no end given, use reset + new line if 'end' not in kwargs: kwargs['end'] = '{0}{1}'.format(ansi('reset'), os.linesep) args = [_format_color(a, _ansi if _enabled else _null_ansi) for a in args] # If windows, pass to win32 print color function if _enabled and _is_windows: return print_ansi_color_win32(*args, **kwargs) return print(*args, **kwargs) def sanitize(msg): """ Sanitizes the given string to prevent :py:func:`format_color` from substituting content. For example, when the string ``'Email: {user}@{org}'`` is passed to :py:func:`format_color` the ``@{org}`` will be incorrectly recognized as a colorization annotation and it will fail to substitute with a :py:exc:`KeyError`: ``org``. In order to prevent this, you can first "sanitize" the string, add color annotations, and then pass the whole string to :py:func:`format_color`. If you give this function the string ``'Email: {user}@{org}'``, then it will return ``'Email: {{user}}@@{{org}}'``. Then if you pass that to :py:func:`format_color` it will return ``'Email: {user}@{org}'``. In this way :py:func:`format_color` is the reverse of this function and so it is safe to call this function on any incoming data if it will eventually be passed to :py:func:`format_color`. In addition to expanding ``{`` => ``{{``, ``}`` => ``}}``, and ``@`` => ``@@``, this function will also replace any instances of ``@!``, ``@/``, ``@_``, and ``@|`` with ``@{atexclamation}``, ``@{atfwdslash}``, ``@{atunderscore}``, and ``@{atbar}`` respectively. And then there are corresponding keys in the ansi dict to convert them back. For example, if you pass the string ``'|@ Notice @|'`` to this function it will return ``'|@@ Notice @{atbar}'``. And since ``ansi('atbar')`` always returns ``@|``, even when :py:func:`disable_ansi_color_substitution_globally` has been called, the result of passing that string to :py:func:`format_color` will be ``'|@ Notice @|'`` again. There are two main strategies for constructing strings which use both the Python :py:func:`str.format` function and the colorization annotations. One way is to just build each piece and concatenate the result: .. code-block:: python print_color("@{r}", "{error}".format(error=error_str)) # Or using print (remember to include an ansi reset) print(format_color("@{r}" + "{error}".format(error=error_str) + "@|")) Another way is to use this function on the format string, concatenate to the annotations, pass the whole string to :py:func:`format_color`, and then format the whole thing: .. code-block:: python print(format_color("@{r}" + sanitize("{error}") + "@|") .format(error=error_str)) However, the most common use for this function is to sanitize incoming strings which may have unknown content: .. code-block:: python def my_func(user_content): print_color("@{y}" + sanitize(user_content)) This function is not intended to be used on strings with color annotations. :param str msg: string message to be sanitized :returns: sanitized string :rtype: str """ msg = msg.replace('@', '@@') msg = msg.replace('{', '{{') msg = msg.replace('}', '}}') # Above line `msg = msg.replace('@', '@@')` will have converted @* to @@* msg = msg.replace('@@!', '@{atexclamation}') msg = msg.replace('@@/', '@{atfwdslash}') msg = msg.replace('@@_', '@{atunderscore}') msg = msg.replace('@@|', '@{atbar}') return msg def test_colors(file=None): """Prints a color testing block using :py:func:`print_color`""" print_color("| Normal | @!Bold Normal", file=file) print_color("| @{kf}Black @|| @!@{kf}Bold Black", file=file) print_color("| @{rf}Red @|| @!@{rf}Bold Red", file=file) print_color("| @{gf}Green @|| @!@{gf}Bold Green", file=file) print_color("| @{yf}Yellow @|| @!@{yf}Bold Yellow", file=file) print_color("| @{bf}Blue @|| @!@{bf}Bold Blue", file=file) print_color("| @{pf}Purple @|| @!@{pf}Bold Purple", file=file) print_color("| @{cf}Cyan @|| @!@{cf}Bold Cyan", file=file) print_color("| @{wf}White @|| @!@{wf}Bold White", file=file) osrf_pycommon-2.1.4/osrf_pycommon/terminal_color/windows.py000066400000000000000000000232771447070040300243710ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Some of the code in this module is taken from the colorama project: # # https://github.com/tartley/colorama # # Which is licensed under the standard 3-claus new BSD license, here is a copy: # # Copyright (c) 2010 Jonathan Hartley # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of the copyright holders, nor those of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # End copy. from __future__ import print_function import ctypes try: import ctypes.wintypes except ValueError: pass import os import sys from .ansi_re import split_by_ansi_escape_sequence # From winbase.h STDOUT = -11 STDERR = -12 NORMAL = 0x00 BRIGHT = 0x08 def _print_ansi_color_win32(*args, **kwargs): global STDOUT, STDERR # Validate the kwargs for kwarg in kwargs: if kwarg not in ['sep', 'end', 'file']: raise TypeError( "'{0}' is an invalid keyword argument for this function" .format(kwarg)) sep = kwargs.get('sep', ' ') end = kwargs.get('end', os.linesep) file = kwargs.get('file', sys.stdout) file = sys.stdout if file is None else file # For each argument, split into more arguments split on ansi escape # sequences and replacing them with (cmd, arg) tuples for Windows # For example: # # `\033]31mred\033]0m` => `[(_win_foreground, 4), 'red', (_win_reset, )]` args = [token for arg in args for token in _tokenize_ansi_string_for_win32(arg) + [sep]] # There is always a trailing sep, strip it args = args[:-1] # Figure out where the data is getting written to try: fn = file.fileno() except AttributeError: fn = None if fn == 1: # stdout handle = STDOUT elif fn == 2: # stderr handle = STDERR else: # something else, which cannot be colored handle = None if handle is None: # Cannot colorize, strip colorization tuples and pass on to print args = [a for a in args if not isinstance(a, tuple)] return print(*args, sep=sep, end=end, file=file) default_attrs = GetConsoleScreenBufferInfo(handle).wAttributes # Print tokenized arguments attrs = _print_args(args, handle, default_attrs, default_attrs, file) # Print ending end = _tokenize_ansi_string_for_win32(end) _print_args(end, handle, attrs, default_attrs, file) def _print_args(args, handle, attrs, default_attrs, file): for i, arg in enumerate(args): if isinstance(arg, tuple): # Do the proper thing on the windows console assert len(arg) > 0, arg if arg[0] == _win_reset: attrs = arg[0](handle, default_attrs) elif arg[0] == _win_style and len(arg) > 1 and arg[1] is None: # Means the style is not supported on Windows continue else: attrs = arg[0](*(arg[1:] + (handle, attrs))) else: print(arg, end='', file=file) return attrs def _unpack_attrs(attrs): return [attrs & 7, (attrs << 4) & 7, attrs & BRIGHT] def _pack_attrs(foreground, background, style): return foreground + (background * 16) + style def _win_reset(handle, attrs): SetConsoleTextAttribute(handle, attrs) return attrs def _win_style(style, handle, attrs): attrs_list = _unpack_attrs(attrs) attrs_list[2] = style attrs = _pack_attrs(*attrs_list) SetConsoleTextAttribute(handle, attrs) return attrs def _win_foreground(foreground, handle, attrs): attrs_list = _unpack_attrs(attrs) attrs_list[0] = foreground attrs = _pack_attrs(*attrs_list) SetConsoleTextAttribute(handle, attrs) return attrs def _win_background(background, handle, attrs): attrs_list = _unpack_attrs(attrs) attrs_list[1] = background attrs = _pack_attrs(*attrs_list) SetConsoleTextAttribute(handle, attrs) return attrs _ansi_to_win32 = { '\x1b[0m': (_win_reset, ), # reset '\x1b[1m': (_win_style, BRIGHT), # boldon, see wincon.h '\x1b[22m': (_win_style, NORMAL), # boldoff, see wincon.h '\x1b[3m': (_win_style, None), # italicson, nothing on Windows '\x1b[23m': (_win_style, None), # italicsoff, nothing on Windows '\x1b[4m': (_win_style, None), # ulon, nothing on Windows '\x1b[24m': (_win_style, None), # uloff, nothing on Windows '\x1b[7m': (_win_style, None), # invon, nothing on Windows '\x1b[27m': (_win_style, None), # invoff, nothing on Windows '\x1b[30m': (_win_foreground, 0), # blackf '\x1b[40m': (_win_background, 0), # blackb '\x1b[34m': (_win_foreground, 1), # bluef '\x1b[44m': (_win_background, 1), # blueb '\x1b[32m': (_win_foreground, 2), # greenf '\x1b[42m': (_win_background, 2), # greenb '\x1b[36m': (_win_foreground, 3), # cyanf '\x1b[46m': (_win_background, 3), # cyanb '\x1b[31m': (_win_foreground, 4), # redf '\x1b[41m': (_win_background, 4), # redb '\x1b[35m': (_win_foreground, 5), # purplef '\x1b[45m': (_win_background, 5), # purpleb '\x1b[33m': (_win_foreground, 6), # yellowf '\x1b[43m': (_win_background, 6), # yellowb '\x1b[37m': (_win_foreground, 7), # whitef '\x1b[47m': (_win_background, 7), # whiteb } def _tokenize_ansi_string_for_win32(msg): global _ansi_to_win32 tokens = filter(None, split_by_ansi_escape_sequence(msg, True)) tokens = [_ansi_to_win32.get(t, t) for t in tokens] return tokens try: ctypes.winDLL ctypes.wintypes except AttributeError: pass else: windll = ctypes.LibraryLoader(ctypes.WinDLL) # Replication of types and defines from winbase.h class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): _fields_ = [ ("dwSize", ctypes.wintypes._COORD), ("dwCursorPosition", ctypes.wintypes._COORD), ("wAttributes", ctypes.wintypes.WORD), ("srWindow", ctypes.wintypes.SMALL_RECT), ("dwMaximumWindowSize", ctypes.wintypes._COORD), ] def __str__(self): return ( '({0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10})' .format( self.dwSize.Y, self.dwSize.X, self.dwCursorPosition.Y, self.dwCursorPosition.X, self.wAttributes, self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right, self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X) ) _GetStdHandle = windll.kernel32.GetStdHandle _GetStdHandle.argtypes = [ ctypes.wintypes.DWORD, ] _GetStdHandle.restype = ctypes.wintypes.HANDLE _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo _GetConsoleScreenBufferInfo.argtypes = [ ctypes.wintypes.HANDLE, ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), ] _GetConsoleScreenBufferInfo.restype = ctypes.wintypes.BOOL _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute _SetConsoleTextAttribute.argtypes = [ ctypes.wintypes.HANDLE, ctypes.wintypes.WORD, ] _SetConsoleTextAttribute.restype = ctypes.wintypes.BOOL handles = { STDOUT: _GetStdHandle(STDOUT), STDERR: _GetStdHandle(STDERR), } def GetConsoleScreenBufferInfo(stream_id=STDOUT): global handles, CONSOLE_SCREEN_BUFFER_INFO, _GetConsoleScreenBufferInfo handle = handles[stream_id] csbi = CONSOLE_SCREEN_BUFFER_INFO() _GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)) return csbi def SetConsoleTextAttribute(stream_id, attrs): global handles, _SetConsoleTextAttribute handle = handles[stream_id] return _SetConsoleTextAttribute(handle, attrs) osrf_pycommon-2.1.4/osrf_pycommon/terminal_utils.py000066400000000000000000000066741447070040300227230ustar00rootroot00000000000000# Copyright 2014 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This module has a miscellaneous set of functions for working with terminals. You can use the :py:func:`get_terminal_dimensions` to get the width and height of the terminal as a tuple. You can also use the :py:func:`is_tty` function to determine if a given object is a tty. """ import os import struct import subprocess __all__ = ['GetTerminalDimensionsError', 'get_terminal_dimensions', 'is_tty'] class GetTerminalDimensionsError(Exception): """Raised when the terminal dimensions cannot be determined.""" pass def _get_terminal_dimensions_windows(): try: from ctypes import create_string_buffer from ctypes import windll except ImportError as exc: raise GetTerminalDimensionsError("Failed to get dimensions: {0}" .format(exc)) STDOUT = -11 h = windll.kernel32.GetStdHandle(STDOUT) buffer_info = create_string_buffer(22) if not windll.kernel32.GetConsoleScreenBufferInfo(h, buffer_info): raise GetTerminalDimensionsError( "Call to windll.kernel32.GetConsoleScreenBufferInfo failed") try: (_, _, _, _, _, left, top, right, bottom, _, _) \ = struct.unpack("hhhhHhhhhhh", buffer_info.raw) except struct.error as exc: raise GetTerminalDimensionsError("Failed to unpack data: {0}" .format(exc)) width = right - left + 1 height = bottom - top + 1 return width, height def _get_terminal_dimensions_unix(): # This function uses `tput` and should work on any Unix system # See: http://en.wikipedia.org/wiki/Tput try: width = subprocess.check_output(['tput', 'cols']) width = int(width.strip()) except (subprocess.CalledProcessError, ValueError) as exc: raise GetTerminalDimensionsError("Failed to get width: {0}" .format(exc)) try: height = subprocess.check_output(['tput', 'lines']) height = int(height.strip()) except (subprocess.CalledProcessError, ValueError) as exc: raise GetTerminalDimensionsError("Failed to get height: {0}" .format(exc)) return width, height def get_terminal_dimensions(): """Returns the width and height of the terminal. :returns: width and height in that order as a tuple :rtype: tuple :raises: GetTerminalDimensionsError when the terminal dimensions cannot be determined """ if os.name in ['nt']: return _get_terminal_dimensions_windows() return _get_terminal_dimensions_unix() def is_tty(stream): """Returns True if the given stream is a tty, else False :param stream: object to be checked for being a tty :returns: True if the given object is a tty, otherwise False :rtype: bool """ return hasattr(stream, 'isatty') and stream.isatty() osrf_pycommon-2.1.4/package.xml000066400000000000000000000012111447070040300165170ustar00rootroot00000000000000 osrf_pycommon 2.1.4 Commonly needed Python modules, used by Python software developed at OSRF. William Woodall Apache License 2.0 William Woodall python3-importlib-metadata ament_python osrf_pycommon-2.1.4/resource/000077500000000000000000000000001447070040300162365ustar00rootroot00000000000000osrf_pycommon-2.1.4/resource/osrf_pycommon000066400000000000000000000000001447070040300210410ustar00rootroot00000000000000osrf_pycommon-2.1.4/setup.py000066400000000000000000000024371447070040300161270ustar00rootroot00000000000000from setuptools import find_packages from setuptools import setup install_requires = [ 'importlib-metadata;python_version<"3.8"', ] package_excludes = ['tests*', 'docs*'] packages = find_packages(exclude=package_excludes) package_name = 'osrf_pycommon' setup( name=package_name, version='2.1.4', packages=packages, data_files=[ ('share/' + package_name, ['package.xml']), ('share/ament_index/resource_index/packages', ['resource/' + package_name]), ], install_requires=install_requires, extras_require={ 'test': [ 'flake8', 'flake8_import_order', 'pytest', ], }, python_requires='>=3.5', zip_safe=True, author='William Woodall', author_email='william@osrfoundation.org', maintainer='William Woodall', maintainer_email='william@openrobotics.org', url='http://osrf-pycommon.readthedocs.org/', keywords=['osrf', 'utilities'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', ], description="Commonly needed Python modules, " "used by Python software developed at OSRF", license='Apache 2.0', test_suite='tests', ) osrf_pycommon-2.1.4/stdeb.cfg000066400000000000000000000002601447070040300161670ustar00rootroot00000000000000[DEFAULT] Depends3: python3-setuptools, python3-importlib-metadata Conflicts3: python-osrf-pycommon Suite3: focal jammy bullseye bookworm X-Python3-Version: >= 3.8 No-Python2: osrf_pycommon-2.1.4/tests/000077500000000000000000000000001447070040300155515ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/__init__.py000066400000000000000000000000001447070040300176500ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/test_code_format.py000066400000000000000000000017261447070040300214520ustar00rootroot00000000000000import os import subprocess import sys def test_flake8(): """Test source code for pyFlakes and PEP8 conformance""" this_dir = os.path.dirname(os.path.abspath(__file__)) source_dir = os.path.join(this_dir, '..', 'osrf_pycommon') cmd = [sys.executable, '-m', 'flake8', source_dir, '--count'] # if flake8_import_order is installed, set the style to google try: import flake8_import_order # noqa cmd.extend(['--import-order-style=google']) except ImportError: pass # ignore error codes from plugins this package doesn't comply with cmd.extend(['--ignore=C,D,Q,I']) # work around for https://gitlab.com/pycqa/flake8/issues/179 cmd.extend(['--jobs', '1']) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = p.communicate() print(stdout) assert p.returncode == 0, \ "Command '{0}' returned non-zero exit code '{1}'".format(' '.join(cmd), p.returncode) osrf_pycommon-2.1.4/tests/unit/000077500000000000000000000000001447070040300165305ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/__init__.py000066400000000000000000000000001447070040300206270ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_cli_utils/000077500000000000000000000000001447070040300215565ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_cli_utils/__init__.py000066400000000000000000000000001447070040300236550ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_cli_utils/test_common.py000066400000000000000000000066611447070040300244700ustar00rootroot00000000000000from osrf_pycommon.cli_utils import common def test_extract_jobs_flags(): extract_jobs_flags = common.extract_jobs_flags valid_mflags = [ '-j8 -l8', 'j8 ', '-j', 'j', '-l8', 'l8', '-l', 'l', '-j18', ' -j8 l9', '-j1 -l1', '--jobs=8', '--jobs 8', '--jobs', '--load-average', '--load-average=8', '--load-average 8', '--jobs=8 -l9' ] results = [ '-j8 -l8', 'j8', '-j', 'j', '-l8', 'l8', '-l', 'l', '-j18', '-j8 l9', '-j1 -l1', '--jobs=8', '--jobs 8', '--jobs', '--load-average', '--load-average=8', '--load-average 8', '--jobs=8 -l9' ] leftover_results = [ '', ' ', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '' ] for mflag, result in zip(valid_mflags, zip(results, leftover_results)): result, leftover_result = result matches, leftovers = extract_jobs_flags(mflag) print('--') print("input: '{0}'".format(mflag)) print("matched: '{0}'".format(matches)) print("expected: '{0}'".format(result)) assert matches == result, "should match '{0}'".format(mflag) assert leftovers == leftover_result, "expected leftovers '{0}': '{1}'"\ .format(leftovers) invalid_mflags = ['', '--jobs= 8', '--jobs8'] for mflag in invalid_mflags: matches, leftovers = extract_jobs_flags(mflag) assert matches == '', "should not match '{0}'".format(mflag) assert leftovers == mflag, "'{0}' should be in leftovers: '{1}'"\ .format(mflag, leftovers) mixed_flags = [ 'target -j8 -l8 --other-option' ] results = [ ('-j8 -l8', 'target --other-option') ] for args, result in zip(mixed_flags, results): expected_matches, expected_leftovers = result matches, leftovers = extract_jobs_flags(args) assert matches == expected_matches, "should have matched '{0}'"\ .format(expected_matches) assert leftovers == expected_leftovers, "should have left '{0}' '{1}'"\ .format(expected_leftovers, leftovers) def test_extract_argument_group(): extract_argument_group = common.extract_argument_group # Example 1 from docstring args = ['foo', '--args', 'bar', '--baz'] expected = (['foo'], ['bar', '--baz']) results = extract_argument_group(args, '--args') assert expected == results, (args, expected, results) # Example 2 from docstring args = ['foo', '--args', 'bar', '--baz', '---', '--', '--foo-option'] expected = (['foo', '--foo-option'], ['bar', '--baz', '--']) results = extract_argument_group(args, '--args') assert expected == results, (args, expected, results) # Example 3 from docstring args = ['foo', '--args', 'ping', '--', 'bar', '--args', 'pong', '--', 'baz', '--args', '--'] expected = (['foo', 'bar', 'baz'], ['ping', 'pong']) results = extract_argument_group(args, '--args') assert expected == results, (args, expected, results) # Example with delimiter but no arguments args = ['foo', '--args'] expected = (['foo'], []) results = extract_argument_group(args, '--args') assert expected == results, (args, expected, results) # Example with no delimiter args = ['foo', 'bar'] expected = (['foo', 'bar'], []) results = extract_argument_group(args, '--args') assert expected == results, (args, expected, results) osrf_pycommon-2.1.4/tests/unit/test_cli_utils/test_verb_pattern.py000066400000000000000000000100601447070040300256570ustar00rootroot00000000000000import sys import unittest from osrf_pycommon.cli_utils import verb_pattern called = None class TestCliUtilsVerbPattern(unittest.TestCase): def test_call_prepare_arguments(self): global called cpa = verb_pattern.call_prepare_arguments # Try with basic, one parameter called = False def fake_prepare_arguments(parser): global called called = True if called: pass return parser r = cpa(fake_prepare_arguments, None) self.assertTrue(called) self.assertIsNone(r) # Try with args called = False def fake_prepare_arguments(parser, args): global called called = True if called: pass return parser r = cpa(fake_prepare_arguments, None) self.assertTrue(called) self.assertIsNone(r) # Try with self called = False class Foo: def fake_prepare_arguments(self, parser, args): global called called = True if called: pass return parser f = Foo() r = cpa(f.fake_prepare_arguments, None) self.assertTrue(called) self.assertIsNone(r) # Try with more than needed called = False class Foo: def fake_prepare_arguments(self, parser, args, extra): global called called = True if called: pass return parser f = Foo() # Remove the following if condition and keep else condition once # Xenial is dropped if sys.version_info[0] < 3: with self.assertRaisesRegexp(ValueError, 'one or two parameters'): r = cpa(f.fake_prepare_arguments, None) else: with self.assertRaisesRegex(ValueError, 'one or two parameters'): r = cpa(f.fake_prepare_arguments, None) # Try with less than needed called = False class Foo: def fake_prepare_arguments(self): global called called = True if called: pass return 'Should not get here' f = Foo() # Remove the following if condition and keep else condition once # Xenial is dropped if sys.version_info[0] < 3: with self.assertRaisesRegexp(ValueError, 'one or two parameters'): r = cpa(f.fake_prepare_arguments, None) else: with self.assertRaisesRegex(ValueError, 'one or two parameters'): r = cpa(f.fake_prepare_arguments, None) # Try with additional optional argument called = False class Foo: def fake_prepare_arguments(self, parser, args, optional=None): global called called = True if called: pass return parser f = Foo() r = cpa(f.fake_prepare_arguments, None) self.assertTrue(called) self.assertIsNone(r) def test_split_arguments_by_verb(self): args = ['--cmd-arg1', 'verb', '--verb-arg1', '--verb-arg2'] expected = ('verb', ['--cmd-arg1'], ['--verb-arg1', '--verb-arg2']) self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected) args = ['verb', '--verb-arg1', '--verb-arg2'] expected = ('verb', [], ['--verb-arg1', '--verb-arg2']) self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected) args = ['--cmd-arg1', 'verb'] expected = ('verb', ['--cmd-arg1'], []) self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected) args = ['verb'] expected = ('verb', [], []) self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected) args = ['--cmd-arg1'] expected = (None, ['--cmd-arg1'], []) self.assertEqual(verb_pattern.split_arguments_by_verb(args), expected) osrf_pycommon-2.1.4/tests/unit/test_process_utils/000077500000000000000000000000001447070040300224655ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/__init__.py000066400000000000000000000000001447070040300245640ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/000077500000000000000000000000001447070040300243365ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/execute_process/000077500000000000000000000000001447070040300275365ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/execute_process/stdout_stderr_ordering.py000077500000000000000000000001611447070040300347070ustar00rootroot00000000000000from __future__ import print_function import sys print("out 1") print("err 1", file=sys.stderr) print("out 2") osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/000077500000000000000000000000001447070040300264615ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/bin/000077500000000000000000000000001447070040300272315ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/bin/bin_only.exe000077500000000000000000000000001447070040300315360ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/bin/exc1.exe000077500000000000000000000000001447070040300305650ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/bin/exc2.exe000077500000000000000000000000001447070040300305660ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/000077500000000000000000000000001447070040300272725ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/bin/000077500000000000000000000000001447070040300300425ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/bin/exc1.exe000077500000000000000000000000001447070040300313760ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/bin/exc2.exe000077500000000000000000000000001447070040300313770ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/local/000077500000000000000000000000001447070040300303645ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/local/bin/000077500000000000000000000000001447070040300311345ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/fixtures/impl_which/usr/local/bin/exc2000066400000000000000000000000001447070040300317060ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_process_utils/impl_aep_asyncio.py000066400000000000000000000006621447070040300263560ustar00rootroot00000000000000from osrf_pycommon.process_utils.async_execute_process import async_execute_process from osrf_pycommon.process_utils import get_loop from .impl_aep_protocol import create_protocol loop = get_loop() async def run(cmd, **kwargs): transport, protocol = await async_execute_process( create_protocol(), cmd, **kwargs) retcode = await protocol.complete return protocol.stdout_buffer, protocol.stderr_buffer, retcode osrf_pycommon-2.1.4/tests/unit/test_process_utils/impl_aep_protocol.py000066400000000000000000000007731447070040300265550ustar00rootroot00000000000000from osrf_pycommon.process_utils import AsyncSubprocessProtocol def create_protocol(): class CustomProtocol(AsyncSubprocessProtocol): def __init__(self, **kwargs): self.stdout_buffer = b"" self.stderr_buffer = b"" AsyncSubprocessProtocol.__init__(self, **kwargs) def on_stdout_received(self, data): self.stdout_buffer += data def on_stderr_received(self, data): self.stderr_buffer += data return CustomProtocol osrf_pycommon-2.1.4/tests/unit/test_process_utils/test_async_execute_process.py000066400000000000000000000100011447070040300304630ustar00rootroot00000000000000import atexit import os import sys import unittest from .impl_aep_asyncio import run from .impl_aep_asyncio import loop this_dir = os.path.dirname(os.path.abspath(__file__)) test_script = os.path.join( this_dir, 'fixtures', 'execute_process', 'stdout_stderr_ordering.py') test_script_quoted = '"%s"' % test_script if ' ' in test_script else test_script python = sys.executable # This atexit handler ensures the loop is closed after all tests were run. @atexit.register def close_loop(): if not loop.is_closed(): loop.close() class TestProcessUtilsAsyncExecuteProcess(unittest.TestCase): def test_async_execute_process_no_emulation_shell_false_combined(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script])) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stdout) self.assertIn('out 2', stdout) self.assertEqual('', stderr) self.assertEqual(0, retcode) def test_async_execute_process_no_emulation_shell_true_combined(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script_quoted], shell=True)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stdout) self.assertIn('out 2', stdout) self.assertEqual('', stderr) self.assertEqual(0, retcode) def test_async_execute_process_no_emulation_shell_false(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script], stderr_to_stdout=False)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stderr) self.assertIn('out 2', stdout) self.assertEqual(0, retcode) def test_async_execute_process_no_emulation_shell_true(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script_quoted], stderr_to_stdout=False, shell=True)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stderr) self.assertIn('out 2', stdout) self.assertEqual(0, retcode) def test_async_execute_process_with_emulation_shell_false_combined(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script], emulate_tty=True)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stdout) self.assertIn('out 2', stdout) self.assertEqual('', stderr) self.assertEqual(0, retcode) def test_async_execute_process_with_emulation_shell_true_combined(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script_quoted], emulate_tty=True, shell=True)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stdout) self.assertIn('out 2', stdout) self.assertEqual('', stderr) self.assertEqual(0, retcode) def test_async_execute_process_with_emulation_shell_false(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script], emulate_tty=True, stderr_to_stdout=False)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stderr) self.assertIn('out 2', stdout) self.assertEqual(0, retcode) def test_async_execute_process_with_emulation_shell_true(self): stdout, stderr, retcode = loop.run_until_complete(run( [python, test_script_quoted], emulate_tty=True, stderr_to_stdout=False, shell=True)) stdout, stderr = stdout.decode(), stderr.decode() self.assertIn('out 1', stdout) self.assertIn('err 1', stderr) self.assertIn('out 2', stdout) self.assertEqual(0, retcode) osrf_pycommon-2.1.4/tests/unit/test_process_utils/test_execute_process_nopty.py000066400000000000000000000102251447070040300305270ustar00rootroot00000000000000import os import sys import unittest from osrf_pycommon.process_utils import execute_process_nopty this_dir = os.path.dirname(os.path.abspath(__file__)) test_script = os.path.join( this_dir, 'fixtures', 'execute_process', 'stdout_stderr_ordering.py') python = sys.executable file_nl = b""" """ nl = os.linesep.encode() class TestProcessUtilsExecuteNoPty(unittest.TestCase): def test__process_incomming_lines(self): pil = execute_process_nopty._process_incoming_lines # Test with no left overs and no new incoming left_overs = b'' incoming = b'' self.assertEqual((None, left_overs), pil(incoming, left_overs)) # Test with left overs, but no new incoming left_overs = b'something' incoming = b'' self.assertEqual((b'', left_overs), pil(incoming, left_overs)) # Test with no left overs, but new incoming left_overs = b'' incoming = nl.join([b'one', b'two']) self.assertEqual((b'one' + nl, b'two'), pil(incoming, left_overs)) # Test with left overs and new incoming with prefixed nl left_overs = b'something' incoming = (nl + b'else') expected = (b'something' + nl, b'else') self.assertEqual(expected, pil(incoming, left_overs)) def test__execute_process_nopty_combined_unbuffered(self): exc_nopty = execute_process_nopty._execute_process_nopty # Test ordering with stdout and stderr combined and Python unbuffered cmd = [python, "-u", test_script] result = b"" for out, err, ret in exc_nopty(cmd, None, None, False, True): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = b"""\ out 1 err 1 out 2 """ expected = expected.replace(file_nl, nl) expected = sorted(expected.splitlines(True)) result = sorted(result.splitlines(True)) self.assertEqual(expected, result) def test__execute_process_nopty_unbuffered(self): exc_nopty = execute_process_nopty._execute_process_nopty # Test ordering with stdout and stderr combined and Python unbuffered cmd = [python, "-u", test_script] result = b"" for out, err, ret in exc_nopty(cmd, None, None, False, False): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = b"""\ out 1 err 1 out 2 """ expected = expected.replace(file_nl, nl) expected = sorted(expected.splitlines(True)) result = sorted(result.splitlines(True)) self.assertEqual(expected, result) def test__execute_process_nopty_combined(self): exc_nopty = execute_process_nopty._execute_process_nopty # Test ordering with stdout and stderr combined cmd = [python, test_script] result = b"" for out, err, ret in exc_nopty(cmd, None, None, False, True): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = b"""\ out 1 err 1 out 2 """ expected = expected.replace(file_nl, nl) expected = sorted(expected.splitlines(True)) result = sorted(result.splitlines(True)) self.assertEqual(expected, result) def test__execute_process_nopty(self): exc_nopty = execute_process_nopty._execute_process_nopty # Test ordering with stdout and stderr separate cmd = [python, test_script] result = b"" for out, err, ret in exc_nopty(cmd, None, None, False, False): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = b"""\ out 1 err 1 out 2 """ expected = expected.replace(file_nl, nl) expected = sorted(expected.splitlines(True)) result = sorted(result.splitlines(True)) self.assertEqual(expected, result) osrf_pycommon-2.1.4/tests/unit/test_process_utils/test_execute_process_pty.py000066400000000000000000000061651447070040300302020ustar00rootroot00000000000000import os import sys import unittest this_dir = os.path.dirname(os.path.abspath(__file__)) test_script = os.path.join( this_dir, 'fixtures', 'execute_process', 'stdout_stderr_ordering.py') python = sys.executable def convert_file_linesep_with_pty_linesep(string): return string.replace(b""" """, b"\r\n") @unittest.skipIf(sys.platform.startswith("win"), "Windows not supported") class TestProcessUtilsExecuteNoPty(unittest.TestCase): def test__execute_process_pty_combined_unbuffered(self): from osrf_pycommon.process_utils import execute_process_pty exc_pty = execute_process_pty._execute_process_pty # Test ordering with stdout and stderr combined and Python unbuffered cmd = [python, "-u", test_script] result = b"" for out, err, ret in exc_pty(cmd, None, None, False, True): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = convert_file_linesep_with_pty_linesep(b"""\ out 1 err 1 out 2 """) self.assertEqual(expected, result) def test__execute_process_pty_unbuffered(self): from osrf_pycommon.process_utils import execute_process_pty exc_pty = execute_process_pty._execute_process_pty # Test ordering with stdout and stderr combined and Python unbuffered cmd = [python, "-u", test_script] result = b"" for out, err, ret in exc_pty(cmd, None, None, False, False): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = convert_file_linesep_with_pty_linesep(b"""\ out 1 err 1 out 2 """) self.assertEqual(expected, result) def test__execute_process_pty_combined(self): from osrf_pycommon.process_utils import execute_process_pty exc_pty = execute_process_pty._execute_process_pty # Test ordering with stdout and stderr combined cmd = [python, test_script] result = b"" for out, err, ret in exc_pty(cmd, None, None, False, True): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = convert_file_linesep_with_pty_linesep(b"""\ out 1 err 1 out 2 """) self.assertEqual(expected, result) def test__execute_process_pty(self): from osrf_pycommon.process_utils import execute_process_pty exc_pty = execute_process_pty._execute_process_pty # Test ordering with stdout and stderr separate cmd = [python, test_script] result = b"" for out, err, ret in exc_pty(cmd, None, None, False, False): if out is not None: result += out if err is not None: result += err if ret is not None: break expected = convert_file_linesep_with_pty_linesep(b"""\ out 1 err 1 out 2 """) self.assertEqual(expected, result) osrf_pycommon-2.1.4/tests/unit/test_process_utils/test_impl.py000066400000000000000000000041641447070040300250440ustar00rootroot00000000000000from __future__ import unicode_literals import os import unittest from osrf_pycommon.process_utils import impl this_dir = os.path.dirname(os.path.abspath(__file__)) class TestProcessUtilsImpl(unittest.TestCase): def test_which(self): which = impl.which j = os.path.join prefix = j(this_dir, 'fixtures', 'impl_which') paths = os.pathsep.join([ j(prefix, 'usr', 'local', 'bin'), j(prefix, 'usr', 'bin'), j(prefix, 'bin'), ]) # bin_only exists +x in bin only expected = j(prefix, 'bin', 'bin_only.exe') self.assertEqual(expected, which('bin_only.exe', path=paths)) self.assertEqual(expected, which(expected, path=paths)) # exc1 exists +x in bin and usr/bin expected = j(prefix, 'usr', 'bin', 'exc1.exe') self.assertEqual(expected, which('exc1.exe', path=paths)) self.assertEqual(expected, which(expected, path=paths)) # exc2 exists +x in bin and usr/bin, but -x in usr/local/bin expected = j(prefix, 'usr', 'bin', 'exc2.exe') self.assertEqual(expected, which('exc2.exe', path=paths)) self.assertEqual(expected, which(expected, path=paths)) # Same as above, with PATH orig_path = os.environ['PATH'] try: os.environ['PATH'] = paths # bin_only exists +x in bin only expected = j(prefix, 'bin', 'bin_only.exe') self.assertEqual(expected, which('bin_only.exe')) self.assertEqual(expected, which(expected)) # exc1 exists +x in bin and usr/bin expected = j(prefix, 'usr', 'bin', 'exc1.exe') self.assertEqual(expected, which('exc1.exe')) self.assertEqual(expected, which(expected)) # exc2 exists +x in bin and usr/bin, but -x in usr/local/bin expected = j(prefix, 'usr', 'bin', 'exc2.exe') self.assertEqual(expected, which('exc2.exe')) self.assertEqual(expected, which(expected)) finally: os.environ['PATH'] = orig_path which(str("exc1.exe"), path=paths) # Make sure unicode/str works osrf_pycommon-2.1.4/tests/unit/test_terminal_color/000077500000000000000000000000001447070040300226005ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_terminal_color/__init__.py000066400000000000000000000000001447070040300246770ustar00rootroot00000000000000osrf_pycommon-2.1.4/tests/unit/test_terminal_color/test_ansi_re.py000066400000000000000000000021621447070040300256320ustar00rootroot00000000000000import unittest from osrf_pycommon.terminal_color import ansi_re class TestTerminalColorAnsiRe(unittest.TestCase): test_str = "\x1b[31mred \033[1mbold red \x1b[0mnormal \x1b[41mred bg" def test_split_by_ansi_escape_sequence(self): split_ansi = ansi_re.split_by_ansi_escape_sequence expected = [ "", "\x1b[31m", "red ", "\x1b[1m", "bold red ", "\x1b[0m", "normal ", "\x1b[41m", "red bg" ] self.assertEqual(expected, split_ansi(self.test_str, True)) expected = ["", "red ", "bold red ", "normal ", "red bg"] self.assertEqual(expected, split_ansi(self.test_str, False)) def test_remove_ansi_escape_sequences(self): remove_ansi = ansi_re.remove_ansi_escape_sequences expected = "red bold red normal red bg" self.assertEqual(expected, remove_ansi(self.test_str)) def test_remove_ansi_escape_sequences_false_positives(self): remove_ansi = ansi_re.remove_ansi_escape_sequences false_positive = "Should not match: \1xb[1234m \033[m \1xb[3Om" self.assertEqual(false_positive, remove_ansi(false_positive)) osrf_pycommon-2.1.4/tests/unit/test_terminal_color/test_impl.py000066400000000000000000000054711447070040300251610ustar00rootroot00000000000000import unittest from osrf_pycommon.terminal_color import impl assert impl._enabled is True class TestTerminalColorImpl(unittest.TestCase): test_format_str = "@{r}red @!bold red @|normal @{rb}red bg" test_str = "\x1b[31mred \033[1mbold red \x1b[0mnormal \x1b[41mred bg" def test_ansi(self): ansi = impl.ansi self.assertEqual('\x1b[0m', ansi('reset')) impl.disable_ansi_color_substitution_globally() self.assertNotEqual('\x1b[0m', ansi('reset')) self.assertEqual('@|', ansi('atbar')) impl.enable_ansi_color_substitution_globally() self.assertEqual('\x1b[0m', ansi('reset')) def test_get_ansi_dict(self): get_ansi_dict = impl.get_ansi_dict ansi_dict = get_ansi_dict() self.assertNotEqual({}, ansi_dict) self.assertEqual('\x1b[0m', ansi_dict['reset']) impl.disable_ansi_color_substitution_globally() ansi_dict = get_ansi_dict() self.assertEqual('\x1b[0m', ansi_dict['reset']) self.assertEqual('@|', ansi_dict['atbar']) self.assertNotEqual({}, ansi_dict) impl.enable_ansi_color_substitution_globally() ansi_dict = get_ansi_dict() self.assertNotEqual({}, ansi_dict) self.assertEqual('\x1b[0m', ansi_dict['reset']) def test_enable_and_disable_ansi_color_substitution_globally(self): enable = impl.enable_ansi_color_substitution_globally disable = impl.disable_ansi_color_substitution_globally is_windows = impl._is_windows enabled = impl._enabled try: impl._is_windows = False impl._enabled = True enable() self.assertEqual('\x1b[0m', impl.ansi('reset')) self.assertEqual('\x1b[0m', impl.format_color('@|')) disable() self.assertEqual('', impl.ansi('reset')) self.assertEqual('', impl.format_color('@|')) enable() self.assertEqual('\x1b[0m', impl.ansi('reset')) self.assertEqual('\x1b[0m', impl.format_color('@|')) finally: impl._is_windows = is_windows impl._enabled = enabled def test_format_color(self): is_windows = impl._is_windows try: impl._is_windows = False format_color = impl.format_color self.assertEqual(self.test_str, format_color(self.test_format_str)) sanitized_str = "|@@ Notice @{atbar}" self.assertEqual("|@ Notice @|", format_color(sanitized_str)) finally: impl._is_windows = is_windows def test_sanitize(self): sanitize = impl.sanitize test_str = "Email: {email}@{org}" self.assertEqual("Email: {{email}}@@{{org}}", sanitize(test_str)) test_str = "|@ Notice @|" self.assertEqual("|@@ Notice @{atbar}", sanitize(test_str)) osrf_pycommon-2.1.4/tests/unit/test_terminal_utils.py000066400000000000000000000011371447070040300231760ustar00rootroot00000000000000import unittest from unittest import mock from osrf_pycommon.terminal_utils import is_tty class TestTerminalUtils(unittest.TestCase): def test_is_tty(self): mock_stream = object() self.assertFalse(is_tty(mock_stream)) mock_stream = mock.MagicMock() mock_stream.isatty = mock.MagicMock(return_value=None) self.assertFalse(is_tty(mock_stream)) mock_stream.isatty = mock.MagicMock(return_value=False) self.assertFalse(is_tty(mock_stream)) mock_stream.isatty = mock.MagicMock(return_value=True) self.assertTrue(is_tty(mock_stream))