././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/0000755000175100001710000000000000000000000015152 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.coveragerc0000644000175100001710000000006200000000000017271 0ustar00runnerdocker[run] omit = .tox/* [report] show_missing = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.editorconfig0000644000175100001710000000030300000000000017623 0ustar00runnerdockerroot = true [*] charset = utf-8 indent_style = tab indent_size = 4 insert_final_newline = true end_of_line = lf [*.py] indent_style = space [*.{yml,yaml}] indent_style = space indent_size = 2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.flake80000644000175100001710000000050600000000000016326 0ustar00runnerdocker[flake8] max-line-length = 88 # jaraco/skeleton#34 max-complexity = 10 extend-ignore = # Black creates whitespace before colon E203 exclude = # Exclude the entire top-level __init__.py file since its only purpose is # to expose the version string and to handle Python 2/3 compatibility. importlib_resources/__init__.py ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.gitattributes0000644000175100001710000000003300000000000020041 0ustar00runnerdocker*.file binary *.zip binary ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1614883644.538228 importlib_resources-5.1.2/.github/0000755000175100001710000000000000000000000016512 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1614883644.550228 importlib_resources-5.1.2/.github/workflows/0000755000175100001710000000000000000000000020547 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.github/workflows/automerge.yml0000644000175100001710000000076100000000000023266 0ustar00runnerdockername: automerge on: pull_request: types: - labeled - unlabeled - synchronize - opened - edited - ready_for_review - reopened - unlocked pull_request_review: types: - submitted check_suite: types: - completed status: {} jobs: automerge: runs-on: ubuntu-latest steps: - name: automerge uses: "pascalgn/automerge-action@v0.12.0" env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.github/workflows/main.yml0000644000175100001710000000262300000000000022221 0ustar00runnerdockername: tests on: [push, pull_request] jobs: test: strategy: matrix: python: [3.6, 3.8, 3.9] platform: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.platform }} steps: - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: Install tox run: | python -m pip install tox - name: Run tests run: tox diffcov: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install tox run: | python -m pip install tox - name: Evaluate coverage run: tox env: TOXENV: diffcov release: needs: test if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install tox run: | python -m pip install tox - name: Release run: tox -e release env: TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.gitignore0000644000175100001710000000222300000000000017141 0ustar00runnerdocker# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ /diffcov.html ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.pre-commit-config.yaml0000644000175100001710000000025700000000000021437 0ustar00runnerdockerrepos: - repo: https://github.com/psf/black rev: 20.8b1 hooks: - id: black - repo: https://github.com/asottile/blacken-docs rev: v1.9.1 hooks: - id: blacken-docs ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/.readthedocs.yml0000644000175100001710000000011700000000000020237 0ustar00runnerdockerversion: 2 python: install: - path: . extra_requirements: - docs ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/CHANGES.rst0000644000175100001710000001461700000000000016765 0ustar00runnerdockerv5.1.2 ====== * Re-release with changes from 5.0.4. v5.0.4 ====== * Fixed non-hermetic test in test_reader, revealed by GH-24670. v5.1.1 ====== * Re-release with changes from 5.0.3. v5.0.3 ====== * Simplified DegenerateFiles.Path. v5.0.2 ====== * #214: Added ``_adapters`` module to ensure that degenerate ``files`` behavior can be made available for legacy loaders whose resource readers don't implement it. Fixes issue where backport compatibility module was masking this fallback behavior only to discover the defect when applying changes to CPython. v5.1.0 ====== * Added ``simple`` module implementing adapters from a low-level resource reader interface to a ``TraversableResources`` interface. Closes #90. v5.0.1 ====== * Remove pyinstaller hook for hidden 'trees' module. v5.0.0 ====== * Removed ``importlib_resources.trees``, deprecated since 1.3.0. v4.1.1 ====== * Fixed badges in README. v4.1.0 ====== * #209: Adopt `jaraco/skeleton `_. * Cleaned up some straggling Python 2 compatibility code. * Refreshed test zip files without .pyc and .pyo files. v4.0.0 ====== * #108: Drop support for Python 2.7. Now requires Python 3.6+. v3.3.1 ====== * Minor cleanup. v3.3.0 ====== * #107: Drop support for Python 3.5. Now requires Python 2.7 or 3.6+. v3.2.1 ====== * #200: Minor fixes and improved tests for namespace package support. v3.2.0 ====== * #68: Resources in PEP 420 Namespace packages are now supported. v3.1.1 ====== * bpo-41490: ``contents`` is now also more aggressive about consuming any iterator from the ``Reader``. v3.1.0 ====== * #110 and bpo-41490: ``path`` method is more aggressive about releasing handles to zipfile objects early, enabling use-cases like ``certifi`` to leave the context open but delete the underlying zip file. v3.0.0 ====== * Package no longer exposes ``importlib_resources.__version__``. Users that wish to inspect the version of ``importlib_resources`` should instead invoke ``.version('importlib_resources')`` from ``importlib-metadata`` ( `stdlib `_ or `backport `_) directly. This change eliminates the dependency on ``importlib_metadata``. Closes #100. * Package now always includes its data. Closes #93. * Declare hidden imports for PyInstaller. Closes #101. v2.0.1 ====== * Select pathlib and contextlib imports based on Python version and avoid pulling in deprecated [pathlib](https://pypi.org/project/pathlib). Closes #97. v2.0.0 ====== * Loaders are no longer expected to implement the ``abc.TraversableResources`` interface, but are instead expected to return ``TraversableResources`` from their ``get_resource_reader`` method. v1.5.0 ====== * Traversable is now a Protocol instead of an Abstract Base Class (Python 2.7 and Python 3.8+). * Traversable objects now require a ``.name`` property. v1.4.0 ====== * #79: Temporary files created will now reflect the filename of their origin. v1.3.1 ====== * For improved compatibility, ``importlib_resources.trees`` is now imported implicitly. Closes #88. v1.3.0 ====== * Add extensibility support for non-standard loaders to supply ``Traversable`` resources. Introduces a new abstract base class ``abc.TraversableResources`` that supersedes (but implements for compatibility) ``abc.ResourceReader``. Any loader that implements (implicitly or explicitly) the ``TraversableResources.files`` method will be capable of supplying resources with subdirectory support. Closes #77. * Preferred way to access ``as_file`` is now from top-level module. ``importlib_resources.trees.as_file`` is deprecated and discouraged. Closes #86. * Moved ``Traversable`` abc to ``abc`` module. Closes #87. v1.2.0 ====== * Traversable now requires an ``open`` method. Closes #81. * Fixed error on ``Python 3.5.{0,3}``. Closes #83. * Updated packaging to resolve version from package metadata. Closes #82. v1.1.0 ====== * Add support for retrieving resources from subdirectories of packages through the new ``files()`` function, which returns a ``Traversable`` object with ``joinpath`` and ``read_*`` interfaces matching those of ``pathlib.Path`` objects. This new function supersedes all of the previous functionality as it provides a more general-purpose access to a package's resources. With this function, subdirectories are supported (Closes #58). The documentation has been updated to reflect that this function is now the preferred interface for loading package resources. It does not, however, support resources from arbitrary loaders. It currently only supports resources from file system path and zipfile packages (a consequence of the ResourceReader interface only operating on Python packages). 1.0.2 ===== * Fix ``setup_requires`` and ``install_requires`` metadata in ``setup.cfg``. Given by Anthony Sottile. 1.0.1 ===== * Update Trove classifiers. Closes #63 1.0 === * Backport fix for test isolation from Python 3.8/3.7. Closes #61 0.8 === * Strip ``importlib_resources.__version__``. Closes #56 * Fix a metadata problem with older setuptools. Closes #57 * Add an ``__all__`` to ``importlib_resources``. Closes #59 0.7 === * Fix ``setup.cfg`` metadata bug. Closes #55 0.6 === * Move everything from ``pyproject.toml`` to ``setup.cfg``, with the added benefit of fixing the PyPI metadata. Closes #54 * Turn off mypy's ``strict_optional`` setting for now. 0.5 === * Resynchronize with Python 3.7; changes the return type of ``contents()`` to be an ``Iterable``. Closes #52 0.4 === * Correctly find resources in subpackages inside a zip file. Closes #51 0.3 === * The API, implementation, and documentation is synchronized with the Python 3.7 standard library. Closes #47 * When run under Python 3.7 this API shadows the stdlib versions. Closes #50 0.2 === * **Backward incompatible change**. Split the ``open()`` and ``read()`` calls into separate binary and text versions, i.e. ``open_binary()``, ``open_text()``, ``read_binary()``, and ``read_text()``. Closes #41 * Fix a bug where unrelated resources could be returned from ``contents()``. Closes #44 * Correctly prevent namespace packages from containing resources. Closes #20 0.1 === * Initial release. .. Local Variables: mode: change-log-mode indent-tabs-mode: nil sentence-end-double-space: t fill-column: 78 coding: utf-8 End: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/LICENSE0000644000175100001710000000107000000000000016155 0ustar00runnerdockerCopyright 2017-2019 Brett Cannon, Barry Warsaw Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/PKG-INFO0000644000175100001710000000442200000000000016251 0ustar00runnerdockerMetadata-Version: 2.1 Name: importlib_resources Version: 5.1.2 Summary: Read resources from Python packages Home-page: https://github.com/python/importlib_resources Author: Barry Warsaw Author-email: barry@python.org License: Apache2 Project-URL: Documentation, https://importlib-resources.readthedocs.io/ Description: .. image:: https://img.shields.io/pypi/v/importlib_resources.svg :target: `PyPI link`_ .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg :target: `PyPI link`_ .. _PyPI link: https://pypi.org/project/importlib_resources .. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: Code style: Black .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ module for older Pythons. Users of Python 3.9 and beyond should use the standard library module, since for these versions, ``importlib_resources`` just delegates to that module. The key goal of this module is to replace parts of `pkg_resources `_ with a solution in Python's stdlib that relies on well-defined APIs. This makes reading resources included in packages easier, with more stable and consistent semantics. Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.6 Provides-Extra: testing Provides-Extra: docs ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/README.rst0000644000175100001710000000257100000000000016646 0ustar00runnerdocker.. image:: https://img.shields.io/pypi/v/importlib_resources.svg :target: `PyPI link`_ .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg :target: `PyPI link`_ .. _PyPI link: https://pypi.org/project/importlib_resources .. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: Code style: Black .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ module for older Pythons. Users of Python 3.9 and beyond should use the standard library module, since for these versions, ``importlib_resources`` just delegates to that module. The key goal of this module is to replace parts of `pkg_resources `_ with a solution in Python's stdlib that relies on well-defined APIs. This makes reading resources included in packages easier, with more stable and consistent semantics. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/codecov.yml0000644000175100001710000000006700000000000017322 0ustar00runnerdockercodecov: token: 5eb1bc45-1b7f-43e6-8bc1-f2b02833dba9 ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1614883644.550228 importlib_resources-5.1.2/docs/0000755000175100001710000000000000000000000016102 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/docs/conf.py0000644000175100001710000000161500000000000017404 0ustar00runnerdocker#!/usr/bin/env python3 # -*- coding: utf-8 -*- extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] master_doc = "index" link_files = { '../CHANGES.rst': dict( using=dict(GH='https://github.com'), replace=[ dict( pattern=r'(Issue #|\B#)(?P\d+)', url='{package_url}/issues/{issue}', ), dict( pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', ), dict( pattern=r'PEP[- ](?P\d+)', url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', ), dict( pattern=r'(Python #|bpo-)(?P\d+)', url='http://bugs.python.org/issue{python}', ), ], ), } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/docs/history.rst0000644000175100001710000000012100000000000020327 0ustar00runnerdocker:tocdepth: 2 .. _changes: History ******* .. include:: ../CHANGES (links).rst ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/docs/index.rst0000644000175100001710000000324200000000000017744 0ustar00runnerdockerWelcome to |project| documentation! =================================== ``importlib_resources`` is a library which provides for access to *resources* in Python packages. It provides functionality similar to ``pkg_resources`` `Basic Resource Access`_ API, but without all of the overhead and performance problems of ``pkg_resources``. In our terminology, a *resource* is a file tree that is located within an importable `Python package`_. Resources can live on the file system or in a zip file, with support for other loader_ classes that implement the appropriate API for reading resources. ``importlib_resources`` is a backport of Python 3.9's standard library `importlib.resources`_ module for Python 2.7, and 3.5 through 3.8. Users of Python 3.9 and beyond are encouraged to use the standard library module. Developers looking for detailed API descriptions should refer to the Python 3.9 standard library documentation. The documentation here includes a general :ref:`usage ` guide and a :ref:`migration ` guide for projects that want to adopt ``importlib_resources`` instead of ``pkg_resources``. .. toctree:: :maxdepth: 2 :caption: Contents: using.rst migration.rst history.rst Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. _`importlib.resources`: https://docs.python.org/3.7/library/importlib.html#module-importlib.resources .. _`Basic Resource Access`: http://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access .. _`Python package`: https://docs.python.org/3/reference/import.html#packages .. _loader: https://docs.python.org/3/reference/import.html#finders-and-loaders ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/docs/migration.rst0000644000175100001710000001341500000000000020631 0ustar00runnerdocker.. _migration: ================= Migration guide ================= The following guide will help you migrate common ``pkg_resources`` APIs to ``importlib_resources``. Only a small number of the most common APIs are supported by ``importlib_resources``, so projects that use other features (e.g. entry points) will have to find other solutions. ``importlib_resources`` primarily supports the following `basic resource access`_ APIs: * ``pkg_resources.resource_filename()`` * ``pkg_resources.resource_stream()`` * ``pkg_resources.resource_string()`` * ``pkg_resources.resource_listdir()`` * ``pkg_resources.resource_isdir()`` Note that although the steps below provide a drop-in replacement for the above methods, for many use-cases, a better approach is to use the ``Traversable`` path from ``files()`` directly. pkg_resources.resource_filename() ================================= ``resource_filename()`` is one of the more interesting APIs because it guarantees that the return value names a file on the file system. This means that if the resource is in a zip file, ``pkg_resources`` will extract the file and return the name of the temporary file it created. The problem is that ``pkg_resources`` also *implicitly* cleans up this temporary file, without control over its lifetime by the programmer. ``importlib_resources`` takes a different approach. Its equivalent API is the ``files()`` function, which returns a Traversable object implementing a subset of the :py:class:`pathlib.Path` interface suitable for reading the contents and provides a wrapper for creating a temporary file on the system in a context whose lifetime is managed by the user. Note though that if the resource is *already* on the file system, ``importlib_resources`` still returns a context manager, but nothing needs to get cleaned up. Here's an example from ``pkg_resources``:: path = pkg_resources.resource_filename('my.package', 'resource.dat') The best way to convert this is with the following idiom:: ref = importlib_resources.files('my.package') / 'resource.dat' with importlib_resources.as_file(ref) as path: # Do something with path. After the with-statement exits, any # temporary file created will be immediately cleaned up. That's all fine if you only need the file temporarily, but what if you need it to stick around for a while? One way of doing this is to use an :py:class:`contextlib.ExitStack` instance and manage the resource explicitly:: from contextlib import ExitStack file_manager = ExitStack() ref = importlib_resources.files('my.package') / 'resource.dat' path = file_manager.enter_context( importlib_resources.as_file(ref)) Now ``path`` will continue to exist until you explicitly call ``file_manager.close()``. What if you want the file to exist until the process exits, or you can't pass ``file_manager`` around in your code? Use an :py:mod:`atexit` handler:: import atexit file_manager = ExitStack() atexit.register(file_manager.close) ref = importlib_resources.files('my.package') / 'resource.dat' path = file_manager.enter_context( importlib_resources.as_file(ref)) Assuming your Python interpreter exits gracefully, the temporary file will be cleaned up when Python exits. pkg_resources.resource_stream() =============================== ``pkg_resources.resource_stream()`` returns a readable file-like object opened in binary mode. When you read from the returned file-like object, you get bytes. E.g.:: with pkg_resources.resource_stream('my.package', 'resource.dat') as fp: my_bytes = fp.read() The equivalent code in ``importlib_resources`` is pretty straightforward:: ref = importlib_resources.files('my.package').joinpath('resource.dat') with ref.open() as fp: my_bytes = fp.read() pkg_resources.resource_string() =============================== In Python 2, ``pkg_resources.resource_string()`` returns the contents of a resource as a ``str``. In Python 3, this function is a misnomer; it actually returns the contents of the named resource as ``bytes``. That's why the following example is often written for clarity as:: from pkg_resources import resource_string as resource_bytes contents = resource_bytes('my.package', 'resource.dat') This can be easily rewritten like so:: ref = importlib_resources.files('my.package').joinpath('resource.dat') contents = f.read_bytes() pkg_resources.resource_listdir() ================================ This function lists the entries in the package, both files and directories, but it does not recurse into subdirectories, e.g.:: for entry in pkg_resources.resource_listdir('my.package', 'subpackage'): print(entry) This is easily rewritten using the following idiom:: for entry in importlib_resources.files('my.package.subpackage').iterdir(): print(entry.name) Note: * ``Traversable.iterdir()`` returns *all* the entries in the subpackage, i.e. both resources (files) and non-resources (directories). * ``Traversable.iterdir()`` returns additional traversable objects, which if directories can also be iterated over (recursively). * ``Traversable.iterdir()``, like ``pathlib.Path`` returns an iterator, not a concrete sequence. * The order in which the elements are returned is undefined. pkg_resources.resource_isdir() ============================== You can ask ``pkg_resources`` to tell you whether a particular resource inside a package is a directory or not:: if pkg_resources.resource_isdir('my.package', 'resource'): print('A directory') The ``importlib_resources`` equivalent is straightforward:: if importlib_resources.files('my.package').joinpath('resource').isdir(): print('A directory') .. _`basic resource access`: http://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/docs/using.rst0000644000175100001710000001754300000000000017773 0ustar00runnerdocker.. _using: =========================== Using importlib_resources =========================== ``importlib_resources`` is a library that leverages Python's import system to provide access to *resources* within *packages*. Given that this library is built on top of the import system, it is highly efficient and easy to use. This library's philosophy is that, if you can import a package, you can access resources within that package. Resources can be opened or read, in either binary or text mode. What exactly do we mean by "a resource"? It's easiest to think about the metaphor of files and directories on the file system, though it's important to keep in mind that this is just a metaphor. Resources and packages **do not** have to exist as physical files and directories on the file system. If you have a file system layout such as:: data/ __init__.py one/ __init__.py resource1.txt resources1/ resource1.1.txt two/ __init__.py resource2.txt then the directories are ``data``, ``data/one``, and ``data/two``. Each of these are also Python packages by virtue of the fact that they all contain ``__init__.py`` files [#fn1]_. That means that in Python, all of these import statements work:: import data import data.one from data import two Each import statement gives you a Python *module* corresponding to the ``__init__.py`` file in each of the respective directories. These modules are packages since packages are just special module instances that have an additional attribute, namely a ``__path__`` [#fn2]_. In this analogy then, resources are just files or directories contained in a package directory, so ``data/one/resource1.txt`` and ``data/two/resource2.txt`` are both resources, as are the ``__init__.py`` files in all the directories. Resources are always accessed relative to the package that they live in. ``resource1.txt`` and ``resources1/resource1.1.txt`` are resources within the ``data.one`` package, and ``two/resource2.txt`` is a resource within the ``data`` package. Example ======= Let's say you are writing an email parsing library and in your test suite you have a sample email message in a file called ``message.eml``. You would like to access the contents of this file for your tests, so you put this in your project under the ``email/tests/data/message.eml`` path. Let's say your unit tests live in ``email/tests/test_email.py``. Your test could read the data file by doing something like:: data_dir = os.path.join(os.path.dirname(__file__), 'tests', 'data') data_path = os.path.join(data_dir, 'message.eml') with open(data_path, encoding='utf-8') as fp: eml = fp.read() But there's a problem with this! The use of ``__file__`` doesn't work if your package lives inside a zip file, since in that case this code does not live on the file system. You could use the `pkg_resources API`_ like so:: # In Python 3, resource_string() actually returns bytes! from pkg_resources import resource_string as resource_bytes eml = resource_bytes('email.tests.data', 'message.eml').decode('utf-8') This requires you to make Python packages of both ``email/tests`` and ``email/tests/data``, by placing an empty ``__init__.py`` files in each of those directories. The problem with the ``pkg_resources`` approach is that, depending on the packages in your environment, ``pkg_resources`` can be expensive just to import. This behavior can have a serious negative impact on things like command line startup time for Python implement commands. ``importlib_resources`` solves this performance challenge by being built entirely on the back of the stdlib :py:mod:`importlib`. By taking advantage of all the efficiencies in Python's import system, and the fact that it's built into Python, using ``importlib_resources`` can be much more performant. The equivalent code using ``importlib_resources`` would look like:: from importlib_resources import files # Reads contents with UTF-8 encoding and returns str. eml = files('email.tests.data').joinpath('message.eml').read_text() Packages or package names ========================= All of the ``importlib_resources`` APIs take a *package* as their first parameter, but this can either be a package name (as a ``str``) or an actual module object, though the module *must* be a package [#fn3]_. If a string is passed in, it must name an importable Python package, and this is first imported. Thus the above example could also be written as:: import email.tests.data eml = files(email.tests.data).joinpath('message.eml').read_text() File system or zip file ======================= In general you never have to worry whether your package is on the file system or in a zip file, as the ``importlib_resources`` APIs hide those details from you. Sometimes though, you need a path to an actual file on the file system. For example, some SSL APIs require a certificate file to be specified by a real file system path, and C's ``dlopen()`` function also requires a real file system path. To support this, ``importlib_resources`` provides an API that will extract the resource from a zip file to a temporary file, and return the file system path to this temporary file as a :py:class:`pathlib.Path` object. In order to properly clean up this temporary file, what's actually returned is a context manager that you can use in a ``with``-statement:: from importlib_resources import files, as_file source = files(email.tests.data).joinpath('message.eml') with as_file(source) as eml: third_party_api_requiring_file_system_path(eml) You can use all the standard :py:mod:`contextlib` APIs to manage this context manager. .. attention:: There is an odd interaction with Python 3.4, 3.5, and 3.6 regarding adding zip or wheel file paths to ``sys.path``. Due to limitations in `zipimport `_, which can't be changed without breaking backward compatibility, you **must** use an absolute path to the zip/wheel file. If you use a relative path, you will not be able to find resources inside these zip files. E.g.: **No**:: sys.path.append('relative/path/to/foo.whl') files('foo') # This will fail! **Yes**:: sys.path.append(os.path.abspath('relative/path/to/foo.whl')) files('foo') Both relative and absolute paths work for Python 3.7 and newer. Extending ========= Starting with Python 3.9 and ``importlib_resources`` 2.0, this package provides an interface for non-standard loaders, such as those used by executable bundlers, to supply resources. These loaders should supply a ``get_resource_reader`` method, which is passed a module name and should return a ``TraversableResources`` instance. .. rubric:: Footnotes .. [#fn1] We're ignoring `PEP 420 `_ style namespace packages, since ``importlib_resources`` does not support resources within namespace packages. Also, the example assumes that the parent directory containing ``data/`` is on ``sys.path``. .. [#fn2] As of `PEP 451 `_ this information is also available on the module's ``__spec__.submodule_search_locations`` attribute, which will not be ``None`` for packages. .. [#fn3] Specifically, this means that in Python 2, the module object must have an ``__path__`` attribute, while in Python 3, the module's ``__spec__.submodule_search_locations`` must not be ``None``. Otherwise a ``TypeError`` is raised. .. _`pkg_resources API`: http://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access .. _`loader`: https://docs.python.org/3/reference/import.html#finders-and-loaders .. _`ResourceReader`: https://docs.python.org/3.7/library/importlib.html#importlib.abc.ResourceReader ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1614883644.550228 importlib_resources-5.1.2/importlib_resources/0000755000175100001710000000000000000000000021245 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/__init__.py0000644000175100001710000000103400000000000023354 0ustar00runnerdocker"""Read resources contained within a package.""" from ._common import ( as_file, files, ) from importlib_resources._py3 import ( Package, Resource, contents, is_resource, open_binary, open_text, path, read_binary, read_text, ) from importlib_resources.abc import ResourceReader __all__ = [ 'Package', 'Resource', 'ResourceReader', 'as_file', 'contents', 'files', 'is_resource', 'open_binary', 'open_text', 'path', 'read_binary', 'read_text', ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/_adapters.py0000644000175100001710000000350600000000000023565 0ustar00runnerdockerfrom contextlib import suppress from . import abc class SpecLoaderAdapter: """ Adapt a package spec to adapt the underlying loader. """ def __init__(self, spec, adapter=lambda spec: spec.loader): self.spec = spec self.loader = adapter(spec) def __getattr__(self, name): return getattr(self.spec, name) class TraversableResourcesLoader: """ Adapt a loader to provide TraversableResources. """ def __init__(self, spec): self.spec = spec def get_resource_reader(self, name): return DegenerateFiles(self.spec)._native() class DegenerateFiles: """ Adapter for an existing or non-existant resource reader to provide a degenerate .files(). """ class Path(abc.Traversable): def iterdir(self): return iter(()) def is_dir(self): return False is_file = exists = is_dir # type: ignore def joinpath(self, other): return DegenerateFiles.Path() def name(self): return '' def open(self): raise ValueError() def __init__(self, spec): self.spec = spec @property def _reader(self): with suppress(AttributeError): return self.spec.loader.get_resource_reader(self.spec.name) def _native(self): """ Return the native reader if it supports files(). """ reader = self._reader return reader if hasattr(reader, 'files') else self def __getattr__(self, attr): return getattr(self._reader, attr) def files(self): return DegenerateFiles.Path() def wrap_spec(package): """ Construct a package spec with traversable compatibility on the spec/loader/reader. """ return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/_common.py0000644000175100001710000000573600000000000023261 0ustar00runnerdockerimport os import pathlib import tempfile import functools import contextlib import types import importlib from typing import Union, Any, Optional from .abc import ResourceReader from ._compat import wrap_spec Package = Union[types.ModuleType, str] def files(package): """ Get a Traversable resource from a package """ return from_package(get_package(package)) def normalize_path(path): # type: (Any) -> str """Normalize a path by ensuring it is a string. If the resulting string contains path separators, an exception is raised. """ str_path = str(path) parent, file_name = os.path.split(str_path) if parent: raise ValueError('{!r} must be only a file name'.format(path)) return file_name def get_resource_reader(package): # type: (types.ModuleType) -> Optional[ResourceReader] """ Return the package's loader if it's a ResourceReader. """ # We can't use # a issubclass() check here because apparently abc.'s __subclasscheck__() # hook wants to create a weak reference to the object, but # zipimport.zipimporter does not support weak references, resulting in a # TypeError. That seems terrible. spec = package.__spec__ reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore if reader is None: return None return reader(spec.name) # type: ignore def resolve(cand): # type: (Package) -> types.ModuleType return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand) def get_package(package): # type: (Package) -> types.ModuleType """Take a package name or module object and return the module. Raise an exception if the resolved module is not a package. """ resolved = resolve(package) if wrap_spec(resolved).submodule_search_locations is None: raise TypeError('{!r} is not a package'.format(package)) return resolved def from_package(package): """ Return a Traversable object for the given package. """ spec = wrap_spec(package) reader = spec.loader.get_resource_reader(spec.name) return reader.files() @contextlib.contextmanager def _tempfile(reader, suffix=''): # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' # blocks due to the need to close the temporary file to work on Windows # properly. fd, raw_path = tempfile.mkstemp(suffix=suffix) try: os.write(fd, reader()) os.close(fd) del reader yield pathlib.Path(raw_path) finally: try: os.remove(raw_path) except FileNotFoundError: pass @functools.singledispatch def as_file(path): """ Given a Traversable object, return that object as a path on the local file system in a context manager. """ return _tempfile(path.read_bytes, suffix=path.name) @as_file.register(pathlib.Path) @contextlib.contextmanager def _(path): """ Degenerate behavior for pathlib.Path objects. """ yield path ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/_compat.py0000644000175100001710000000433100000000000023242 0ustar00runnerdocker# flake8: noqa import abc import sys import pathlib from contextlib import suppress try: from zipfile import Path as ZipPath # type: ignore except ImportError: from zipp import Path as ZipPath # type: ignore try: from typing import runtime_checkable # type: ignore except ImportError: def runtime_checkable(cls): # type: ignore return cls try: from typing import Protocol # type: ignore except ImportError: Protocol = abc.ABC # type: ignore class TraversableResourcesLoader: """ Adapt loaders to provide TraversableResources and other compatibility. """ def __init__(self, spec): self.spec = spec @property def path(self): return self.spec.origin def get_resource_reader(self, name): from . import readers, _adapters def _zip_reader(spec): with suppress(AttributeError): return readers.ZipReader(spec.loader, spec.name) def _namespace_reader(spec): with suppress(AttributeError, ValueError): return readers.NamespaceReader(spec.submodule_search_locations) def _available_reader(spec): with suppress(AttributeError): return spec.loader.get_resource_reader(spec.name) def _native_reader(spec): reader = _available_reader(spec) return reader if hasattr(reader, 'files') else None def _file_reader(spec): if pathlib.Path(self.path).exists(): return readers.FileReader(self) return ( # native reader if it supplies 'files' _native_reader(self.spec) or # local ZipReader if a zip module _zip_reader(self.spec) or # local NamespaceReader if a namespace module _namespace_reader(self.spec) or # local FileReader _file_reader(self.spec) or _adapters.DegenerateFiles(self.spec) ) def wrap_spec(package): """ Construct a package spec with traversable compatibility on the spec/loader/reader. """ from . import _adapters return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/_py3.py0000644000175100001710000001250400000000000022473 0ustar00runnerdockerimport os import io from . import _common from contextlib import suppress from importlib.abc import ResourceLoader from importlib.machinery import ModuleSpec from io import BytesIO, TextIOWrapper from pathlib import Path from types import ModuleType from typing import ContextManager, Iterable, Union from typing import cast from typing.io import BinaryIO, TextIO from collections.abc import Sequence from functools import singledispatch Package = Union[str, ModuleType] Resource = Union[str, os.PathLike] def open_binary(package: Package, resource: Resource) -> BinaryIO: """Return a file-like object opened for binary reading of the resource.""" resource = _common.normalize_path(resource) package = _common.get_package(package) reader = _common.get_resource_reader(package) if reader is not None: return reader.open_resource(resource) spec = cast(ModuleSpec, package.__spec__) # Using pathlib doesn't work well here due to the lack of 'strict' # argument for pathlib.Path.resolve() prior to Python 3.6. if spec.submodule_search_locations is not None: paths = spec.submodule_search_locations elif spec.origin is not None: paths = [os.path.dirname(os.path.abspath(spec.origin))] for package_path in paths: full_path = os.path.join(package_path, resource) try: return open(full_path, mode='rb') except OSError: # Just assume the loader is a resource loader; all the relevant # importlib.machinery loaders are and an AttributeError for # get_data() will make it clear what is needed from the loader. loader = cast(ResourceLoader, spec.loader) data = None if hasattr(spec.loader, 'get_data'): with suppress(OSError): data = loader.get_data(full_path) if data is not None: return BytesIO(data) raise FileNotFoundError( '{!r} resource not found in {!r}'.format(resource, spec.name) ) def open_text( package: Package, resource: Resource, encoding: str = 'utf-8', errors: str = 'strict', ) -> TextIO: """Return a file-like object opened for text reading of the resource.""" return TextIOWrapper( open_binary(package, resource), encoding=encoding, errors=errors ) def read_binary(package: Package, resource: Resource) -> bytes: """Return the binary contents of the resource.""" with open_binary(package, resource) as fp: return fp.read() def read_text( package: Package, resource: Resource, encoding: str = 'utf-8', errors: str = 'strict', ) -> str: """Return the decoded string of the resource. The decoding-related arguments have the same semantics as those of bytes.decode(). """ with open_text(package, resource, encoding, errors) as fp: return fp.read() def path( package: Package, resource: Resource, ) -> 'ContextManager[Path]': """A context manager providing a file path object to the resource. If the resource does not already exist on its own on the file system, a temporary file will be created. If the file was created, the file will be deleted upon exiting the context manager (no exception is raised if the file was deleted prior to the context manager exiting). """ reader = _common.get_resource_reader(_common.get_package(package)) return ( _path_from_reader(reader, _common.normalize_path(resource)) if reader else _common.as_file( _common.files(package).joinpath(_common.normalize_path(resource)) ) ) def _path_from_reader(reader, resource): return _path_from_resource_path(reader, resource) or _path_from_open_resource( reader, resource ) def _path_from_resource_path(reader, resource): with suppress(FileNotFoundError): return Path(reader.resource_path(resource)) def _path_from_open_resource(reader, resource): saved = io.BytesIO(reader.open_resource(resource).read()) return _common._tempfile(saved.read, suffix=resource) def is_resource(package: Package, name: str) -> bool: """True if `name` is a resource inside `package`. Directories are *not* resources. """ package = _common.get_package(package) _common.normalize_path(name) reader = _common.get_resource_reader(package) if reader is not None: return reader.is_resource(name) package_contents = set(contents(package)) if name not in package_contents: return False return (_common.from_package(package) / name).is_file() def contents(package: Package) -> Iterable[str]: """Return an iterable of entries in `package`. Note that not all entries are resources. Specifically, directories are not considered resources. Use `is_resource()` on each entry returned here to check if it is a resource or not. """ package = _common.get_package(package) reader = _common.get_resource_reader(package) if reader is not None: return _ensure_sequence(reader.contents()) transversable = _common.from_package(package) if transversable.is_dir(): return list(item.name for item in transversable.iterdir()) return [] @singledispatch def _ensure_sequence(iterable): return list(iterable) @_ensure_sequence.register(Sequence) def _(iterable): return iterable ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/abc.py0000644000175100001710000000745000000000000022352 0ustar00runnerdockerimport abc from typing import BinaryIO, Iterable, Text from ._compat import runtime_checkable, Protocol class ResourceReader(metaclass=abc.ABCMeta): """Abstract base class for loaders to provide resource reading support.""" @abc.abstractmethod def open_resource(self, resource: Text) -> BinaryIO: """Return an opened, file-like object for binary reading. The 'resource' argument is expected to represent only a file name. If the resource cannot be found, FileNotFoundError is raised. """ # This deliberately raises FileNotFoundError instead of # NotImplementedError so that if this method is accidentally called, # it'll still do the right thing. raise FileNotFoundError @abc.abstractmethod def resource_path(self, resource: Text) -> Text: """Return the file system path to the specified resource. The 'resource' argument is expected to represent only a file name. If the resource does not exist on the file system, raise FileNotFoundError. """ # This deliberately raises FileNotFoundError instead of # NotImplementedError so that if this method is accidentally called, # it'll still do the right thing. raise FileNotFoundError @abc.abstractmethod def is_resource(self, path: Text) -> bool: """Return True if the named 'path' is a resource. Files are resources, directories are not. """ raise FileNotFoundError @abc.abstractmethod def contents(self) -> Iterable[str]: """Return an iterable of entries in `package`.""" raise FileNotFoundError @runtime_checkable class Traversable(Protocol): """ An object with a subset of pathlib.Path methods suitable for traversing directories and opening files. """ @abc.abstractmethod def iterdir(self): """ Yield Traversable objects in self """ def read_bytes(self): """ Read contents of self as bytes """ with self.open('rb') as strm: return strm.read() def read_text(self, encoding=None): """ Read contents of self as text """ with self.open(encoding=encoding) as strm: return strm.read() @abc.abstractmethod def is_dir(self) -> bool: """ Return True if self is a dir """ @abc.abstractmethod def is_file(self) -> bool: """ Return True if self is a file """ @abc.abstractmethod def joinpath(self, child): """ Return Traversable child in self """ def __truediv__(self, child): """ Return Traversable child in self """ return self.joinpath(child) @abc.abstractmethod def open(self, mode='r', *args, **kwargs): """ mode may be 'r' or 'rb' to open as text or binary. Return a handle suitable for reading (same as pathlib.Path.open). When opening as text, accepts encoding parameters such as those accepted by io.TextIOWrapper. """ @abc.abstractproperty def name(self) -> str: """ The base name of this object without any parent references. """ class TraversableResources(ResourceReader): """ The required interface for providing traversable resources. """ @abc.abstractmethod def files(self): """Return a Traversable object for the loaded package.""" def open_resource(self, resource): return self.files().joinpath(resource).open('rb') def resource_path(self, resource): raise FileNotFoundError(resource) def is_resource(self, path): return self.files().joinpath(path).is_file() def contents(self): return (item.name for item in self.files().iterdir()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/py.typed0000644000175100001710000000000000000000000022732 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/readers.py0000644000175100001710000000703000000000000023244 0ustar00runnerdockerimport collections import pathlib from . import abc from ._compat import ZipPath def remove_duplicates(items): return iter(collections.OrderedDict.fromkeys(items)) class FileReader(abc.TraversableResources): def __init__(self, loader): self.path = pathlib.Path(loader.path).parent def resource_path(self, resource): """ Return the file system path to prevent `resources.path()` from creating a temporary copy. """ return str(self.path.joinpath(resource)) def files(self): return self.path class ZipReader(abc.TraversableResources): def __init__(self, loader, module): _, _, name = module.rpartition('.') self.prefix = loader.prefix.replace('\\', '/') + name + '/' self.archive = loader.archive def open_resource(self, resource): try: return super().open_resource(resource) except KeyError as exc: raise FileNotFoundError(exc.args[0]) def is_resource(self, path): # workaround for `zipfile.Path.is_file` returning true # for non-existent paths. target = self.files().joinpath(path) return target.is_file() and target.exists() def files(self): return ZipPath(self.archive, self.prefix) class MultiplexedPath(abc.Traversable): """ Given a series of Traversable objects, implement a merged version of the interface across all objects. Useful for namespace packages which may be multihomed at a single name. """ def __init__(self, *paths): self._paths = list(map(pathlib.Path, remove_duplicates(paths))) if not self._paths: message = 'MultiplexedPath must contain at least one path' raise FileNotFoundError(message) if not all(path.is_dir() for path in self._paths): raise NotADirectoryError('MultiplexedPath only supports directories') def iterdir(self): visited = [] for path in self._paths: for file in path.iterdir(): if file.name in visited: continue visited.append(file.name) yield file def read_bytes(self): raise FileNotFoundError(f'{self} is not a file') def read_text(self, *args, **kwargs): raise FileNotFoundError(f'{self} is not a file') def is_dir(self): return True def is_file(self): return False def joinpath(self, child): # first try to find child in current paths for file in self.iterdir(): if file.name == child: return file # if it does not exist, construct it with the first path return self._paths[0] / child __truediv__ = joinpath def open(self, *args, **kwargs): raise FileNotFoundError('{} is not a file'.format(self)) def name(self): return self._paths[0].name def __repr__(self): return 'MultiplexedPath({})'.format( ', '.join("'{}'".format(path) for path in self._paths) ) class NamespaceReader(abc.TraversableResources): def __init__(self, namespace_path): if 'NamespacePath' not in str(namespace_path): raise ValueError('Invalid path') self.path = MultiplexedPath(*list(namespace_path)) def resource_path(self, resource): """ Return the file system path to prevent `resources.path()` from creating a temporary copy. """ return str(self.path.joinpath(resource)) def files(self): return self.path ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/simple.py0000644000175100001710000000542400000000000023115 0ustar00runnerdocker""" Interface adapters for low-level readers. """ import abc import io import itertools from typing import BinaryIO, List from .abc import Traversable, TraversableResources class SimpleReader(abc.ABC): """ The minimum, low-level interface required from a resource provider. """ @abc.abstractproperty def package(self): # type: () -> str """ The name of the package for which this reader loads resources. """ @abc.abstractmethod def children(self): # type: () -> List['SimpleReader'] """ Obtain an iterable of SimpleReader for available child containers (e.g. directories). """ @abc.abstractmethod def resources(self): # type: () -> List[str] """ Obtain available named resources for this virtual package. """ @abc.abstractmethod def open_binary(self, resource): # type: (str) -> BinaryIO """ Obtain a File-like for a named resource. """ @property def name(self): return self.package.split('.')[-1] class ResourceHandle(Traversable): """ Handle to a named resource in a ResourceReader. """ def __init__(self, parent, name): # type: (ResourceContainer, str) -> None self.parent = parent self.name = name # type: ignore def is_file(self): return True def is_dir(self): return False def open(self, mode='r', *args, **kwargs): stream = self.parent.reader.open_binary(self.name) if 'b' not in mode: stream = io.TextIOWrapper(*args, **kwargs) return stream def joinpath(self, name): raise RuntimeError("Cannot traverse into a resource") class ResourceContainer(Traversable): """ Traversable container for a package's resources via its reader. """ def __init__(self, reader): # type: (SimpleReader) -> None self.reader = reader def is_dir(self): return True def is_file(self): return False def iterdir(self): files = (ResourceHandle(self, name) for name in self.reader.resources) dirs = map(ResourceContainer, self.reader.children()) return itertools.chain(files, dirs) def open(self, *args, **kwargs): raise IsADirectoryError() def joinpath(self, name): return next( traversable for traversable in self.iterdir() if traversable.name == name ) class TraversableReader(TraversableResources, SimpleReader): """ A TraversableResources based on SimpleReader. Resource providers may derive from this class to provide the TraversableResources interface by supplying the SimpleReader interface. """ def files(self): return ResourceContainer(self) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/0000755000175100001710000000000000000000000022407 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/__init__.py0000644000175100001710000000000000000000000024506 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/_compat.py0000644000175100001710000000066300000000000024410 0ustar00runnerdockerimport os try: from test.support import import_helper # type: ignore except ImportError: # Python 3.9 and earlier class import_helper: # type: ignore from test.support import modules_setup, modules_cleanup try: # Python 3.10 from test.support.os_helper import unlink except ImportError: from test.support import unlink as _unlink def unlink(target): return _unlink(os.fspath(target)) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/data01/0000755000175100001710000000000000000000000023461 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/__init__.py0000644000175100001710000000000000000000000025560 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/binary.file0000644000175100001710000000000400000000000025600 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/data01/subdirectory/0000755000175100001710000000000000000000000026177 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/subdirectory/__init__.py0000644000175100001710000000000000000000000030276 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/subdirectory/binary.file0000644000175100001710000000000400000000000030316 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/utf-16.file0000644000175100001710000000005400000000000025343 0ustar00runnerdockerÿþHello, UTF-16 world! ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data01/utf-8.file0000644000175100001710000000002400000000000025261 0ustar00runnerdockerHello, UTF-8 world! ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/data02/0000755000175100001710000000000000000000000023462 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data02/__init__.py0000644000175100001710000000000000000000000025561 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/data02/one/0000755000175100001710000000000000000000000024243 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data02/one/__init__.py0000644000175100001710000000000000000000000026342 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data02/one/resource1.txt0000644000175100001710000000001500000000000026710 0ustar00runnerdockerone resource ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/data02/two/0000755000175100001710000000000000000000000024273 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data02/two/__init__.py0000644000175100001710000000000000000000000026372 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/data02/two/resource2.txt0000644000175100001710000000001500000000000026741 0ustar00runnerdockertwo resource ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/namespacedata01/0000755000175100001710000000000000000000000025336 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/namespacedata01/binary.file0000644000175100001710000000000400000000000027455 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/namespacedata01/utf-16.file0000644000175100001710000000005400000000000027220 0ustar00runnerdockerÿþHello, UTF-16 world! ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/namespacedata01/utf-8.file0000644000175100001710000000002400000000000027136 0ustar00runnerdockerHello, UTF-8 world! ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_files.py0000644000175100001710000000176500000000000025133 0ustar00runnerdockerimport typing import unittest import importlib_resources as resources from importlib_resources.abc import Traversable from . import data01 from . import util class FilesTests: def test_read_bytes(self): files = resources.files(self.data) actual = files.joinpath('utf-8.file').read_bytes() assert actual == b'Hello, UTF-8 world!\n' def test_read_text(self): files = resources.files(self.data) actual = files.joinpath('utf-8.file').read_text() assert actual == 'Hello, UTF-8 world!\n' @unittest.skipUnless( hasattr(typing, 'runtime_checkable'), "Only suitable when typing supports runtime_checkable", ) def test_traversable(self): assert isinstance(resources.files(self.data), Traversable) class OpenDiskTests(FilesTests, unittest.TestCase): def setUp(self): self.data = data01 class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): pass if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_open.py0000644000175100001710000000446700000000000024774 0ustar00runnerdockerimport unittest import importlib_resources as resources from . import data01 from . import util class CommonBinaryTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): with resources.open_binary(package, path): pass class CommonTextTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): with resources.open_text(package, path): pass class OpenTests: def test_open_binary(self): with resources.open_binary(self.data, 'utf-8.file') as fp: result = fp.read() self.assertEqual(result, b'Hello, UTF-8 world!\n') def test_open_text_default_encoding(self): with resources.open_text(self.data, 'utf-8.file') as fp: result = fp.read() self.assertEqual(result, 'Hello, UTF-8 world!\n') def test_open_text_given_encoding(self): with resources.open_text(self.data, 'utf-16.file', 'utf-16', 'strict') as fp: result = fp.read() self.assertEqual(result, 'Hello, UTF-16 world!\n') def test_open_text_with_errors(self): # Raises UnicodeError without the 'errors' argument. with resources.open_text(self.data, 'utf-16.file', 'utf-8', 'strict') as fp: self.assertRaises(UnicodeError, fp.read) with resources.open_text(self.data, 'utf-16.file', 'utf-8', 'ignore') as fp: result = fp.read() self.assertEqual( result, 'H\x00e\x00l\x00l\x00o\x00,\x00 ' '\x00U\x00T\x00F\x00-\x001\x006\x00 ' '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', ) def test_open_binary_FileNotFoundError(self): self.assertRaises( FileNotFoundError, resources.open_binary, self.data, 'does-not-exist' ) def test_open_text_FileNotFoundError(self): self.assertRaises( FileNotFoundError, resources.open_text, self.data, 'does-not-exist' ) class OpenDiskTests(OpenTests, unittest.TestCase): def setUp(self): self.data = data01 class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): def setUp(self): from . import namespacedata01 self.data = namespacedata01 class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): pass if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_path.py0000644000175100001710000000362300000000000024760 0ustar00runnerdockerimport io import unittest import importlib_resources as resources from . import data01 from . import util class CommonTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): with resources.path(package, path): pass class PathTests: def test_reading(self): # Path should be readable. # Test also implicitly verifies the returned object is a pathlib.Path # instance. with resources.path(self.data, 'utf-8.file') as path: self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: text = file.read() self.assertEqual('Hello, UTF-8 world!\n', text) class PathDiskTests(PathTests, unittest.TestCase): data = data01 def test_natural_path(self): """ Guarantee the internal implementation detail that file-system-backed resources do not get the tempdir treatment. """ with resources.path(self.data, 'utf-8.file') as path: assert 'data' in str(path) class PathMemoryTests(PathTests, unittest.TestCase): def setUp(self): file = io.BytesIO(b'Hello, UTF-8 world!\n') self.addCleanup(file.close) self.data = util.create_package( file=file, path=FileNotFoundError("package exists only in memory") ) self.data.__spec__.origin = None self.data.__spec__.has_location = False class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): def test_remove_in_context_manager(self): # It is not an error if the file that was temporarily stashed on the # file system is removed inside the `with` stanza. with resources.path(self.data, 'utf-8.file') as path: path.unlink() if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_read.py0000644000175100001710000000372400000000000024741 0ustar00runnerdockerimport unittest import importlib_resources as resources from . import data01 from . import util from importlib import import_module class CommonBinaryTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): resources.read_binary(package, path) class CommonTextTests(util.CommonTests, unittest.TestCase): def execute(self, package, path): resources.read_text(package, path) class ReadTests: def test_read_binary(self): result = resources.read_binary(self.data, 'binary.file') self.assertEqual(result, b'\0\1\2\3') def test_read_text_default_encoding(self): result = resources.read_text(self.data, 'utf-8.file') self.assertEqual(result, 'Hello, UTF-8 world!\n') def test_read_text_given_encoding(self): result = resources.read_text(self.data, 'utf-16.file', encoding='utf-16') self.assertEqual(result, 'Hello, UTF-16 world!\n') def test_read_text_with_errors(self): # Raises UnicodeError without the 'errors' argument. self.assertRaises(UnicodeError, resources.read_text, self.data, 'utf-16.file') result = resources.read_text(self.data, 'utf-16.file', errors='ignore') self.assertEqual( result, 'H\x00e\x00l\x00l\x00o\x00,\x00 ' '\x00U\x00T\x00F\x00-\x001\x006\x00 ' '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', ) class ReadDiskTests(ReadTests, unittest.TestCase): data = data01 class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): def test_read_submodule_resource(self): submodule = import_module('ziptestdata.subdirectory') result = resources.read_binary(submodule, 'binary.file') self.assertEqual(result, b'\0\1\2\3') def test_read_submodule_resource_by_name(self): result = resources.read_binary('ziptestdata.subdirectory', 'binary.file') self.assertEqual(result, b'\0\1\2\3') if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_reader.py0000644000175100001710000001012600000000000025262 0ustar00runnerdockerimport os.path import sys import pathlib import unittest from importlib import import_module from importlib_resources.readers import MultiplexedPath, NamespaceReader class MultiplexedPathTest(unittest.TestCase): @classmethod def setUpClass(cls): path = pathlib.Path(__file__).parent / 'namespacedata01' cls.folder = str(path) def test_init_no_paths(self): with self.assertRaises(FileNotFoundError): MultiplexedPath() def test_init_file(self): with self.assertRaises(NotADirectoryError): MultiplexedPath(os.path.join(self.folder, 'binary.file')) def test_iterdir(self): contents = {path.name for path in MultiplexedPath(self.folder).iterdir()} try: contents.remove('__pycache__') except (KeyError, ValueError): pass self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'}) def test_iterdir_duplicate(self): data01 = os.path.abspath(os.path.join(__file__, '..', 'data01')) contents = { path.name for path in MultiplexedPath(self.folder, data01).iterdir() } for remove in ('__pycache__', '__init__.pyc'): try: contents.remove(remove) except (KeyError, ValueError): pass self.assertEqual( contents, {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'}, ) def test_is_dir(self): self.assertEqual(MultiplexedPath(self.folder).is_dir(), True) def test_is_file(self): self.assertEqual(MultiplexedPath(self.folder).is_file(), False) def test_open_file(self): path = MultiplexedPath(self.folder) with self.assertRaises(FileNotFoundError): path.read_bytes() with self.assertRaises(FileNotFoundError): path.read_text() with self.assertRaises(FileNotFoundError): path.open() def test_join_path(self): print('test_join_path') prefix = os.path.abspath(os.path.join(__file__, '..')) data01 = os.path.join(prefix, 'data01') path = MultiplexedPath(self.folder, data01) self.assertEqual( str(path.joinpath('binary.file'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'binary.file'), ) self.assertEqual( str(path.joinpath('subdirectory'))[len(prefix) + 1 :], os.path.join('data01', 'subdirectory'), ) self.assertEqual( str(path.joinpath('imaginary'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'imaginary'), ) def test_repr(self): self.assertEqual( repr(MultiplexedPath(self.folder)), "MultiplexedPath('{}')".format(self.folder), ) class NamespaceReaderTest(unittest.TestCase): site_dir = str(pathlib.Path(__file__).parent) @classmethod def setUpClass(cls): sys.path.append(cls.site_dir) @classmethod def tearDownClass(cls): sys.path.remove(cls.site_dir) def test_init_error(self): with self.assertRaises(ValueError): NamespaceReader(['path1', 'path2']) def test_resource_path(self): namespacedata01 = import_module('namespacedata01') reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) self.assertEqual( reader.resource_path('binary.file'), os.path.join(root, 'binary.file') ) self.assertEqual( reader.resource_path('imaginary'), os.path.join(root, 'imaginary') ) def test_files(self): namespacedata01 = import_module('namespacedata01') reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) self.assertIsInstance(reader.files(), MultiplexedPath) self.assertEqual(repr(reader.files()), "MultiplexedPath('{}')".format(root)) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/test_resource.py0000644000175100001710000002026300000000000025652 0ustar00runnerdockerimport sys import unittest import importlib_resources as resources import uuid import pathlib from . import data01 from . import zipdata01, zipdata02 from . import util from importlib import import_module from ._compat import import_helper, unlink class ResourceTests: # Subclasses are expected to set the `data` attribute. def test_is_resource_good_path(self): self.assertTrue(resources.is_resource(self.data, 'binary.file')) def test_is_resource_missing(self): self.assertFalse(resources.is_resource(self.data, 'not-a-file')) def test_is_resource_subresource_directory(self): # Directories are not resources. self.assertFalse(resources.is_resource(self.data, 'subdirectory')) def test_contents(self): contents = set(resources.contents(self.data)) # There may be cruft in the directory listing of the data directory. # It could have a __pycache__ directory, # an artifact of the # test suite importing these modules, which # are not germane to this test, so just filter them out. contents.discard('__pycache__') self.assertEqual( contents, { '__init__.py', 'subdirectory', 'utf-8.file', 'binary.file', 'utf-16.file', }, ) class ResourceDiskTests(ResourceTests, unittest.TestCase): def setUp(self): self.data = data01 class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): pass class ResourceLoaderTests(unittest.TestCase): def test_resource_contents(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C'] ) self.assertEqual(set(resources.contents(package)), {'A', 'B', 'C'}) def test_resource_is_resource(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) self.assertTrue(resources.is_resource(package, 'B')) def test_resource_directory_is_not_resource(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) self.assertFalse(resources.is_resource(package, 'D')) def test_resource_missing_is_not_resource(self): package = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] ) self.assertFalse(resources.is_resource(package, 'Z')) class ResourceCornerCaseTests(unittest.TestCase): def test_package_has_no_reader_fallback(self): # Test odd ball packages which: # 1. Do not have a ResourceReader as a loader # 2. Are not on the file system # 3. Are not in a zip file module = util.create_package( file=data01, path=data01.__file__, contents=['A', 'B', 'C'] ) # Give the module a dummy loader. module.__loader__ = object() # Give the module a dummy origin. module.__file__ = '/path/which/shall/not/be/named' module.__spec__.loader = module.__loader__ module.__spec__.origin = module.__file__ self.assertFalse(resources.is_resource(module, 'A')) class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): ZIP_MODULE = zipdata01 # type: ignore def test_is_submodule_resource(self): submodule = import_module('ziptestdata.subdirectory') self.assertTrue(resources.is_resource(submodule, 'binary.file')) def test_read_submodule_resource_by_name(self): self.assertTrue( resources.is_resource('ziptestdata.subdirectory', 'binary.file') ) def test_submodule_contents(self): submodule = import_module('ziptestdata.subdirectory') self.assertEqual( set(resources.contents(submodule)), {'__init__.py', 'binary.file'} ) def test_submodule_contents_by_name(self): self.assertEqual( set(resources.contents('ziptestdata.subdirectory')), {'__init__.py', 'binary.file'}, ) class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): ZIP_MODULE = zipdata02 # type: ignore def test_unrelated_contents(self): """ Test thata zip with two unrelated subpackages return distinct resources. Ref python/importlib_resources#44. """ self.assertEqual( set(resources.contents('ziptestdata.one')), {'__init__.py', 'resource1.txt'} ) self.assertEqual( set(resources.contents('ziptestdata.two')), {'__init__.py', 'resource2.txt'} ) class DeletingZipsTest(unittest.TestCase): """Having accessed resources in a zip file should not keep an open reference to the zip. """ ZIP_MODULE = zipdata01 def setUp(self): modules = import_helper.modules_setup() self.addCleanup(import_helper.modules_cleanup, *modules) data_path = pathlib.Path(self.ZIP_MODULE.__file__) data_dir = data_path.parent self.source_zip_path = data_dir / 'ziptestdata.zip' self.zip_path = pathlib.Path('{}.zip'.format(uuid.uuid4())).absolute() self.zip_path.write_bytes(self.source_zip_path.read_bytes()) sys.path.append(str(self.zip_path)) self.data = import_module('ziptestdata') def tearDown(self): try: sys.path.remove(str(self.zip_path)) except ValueError: pass try: del sys.path_importer_cache[str(self.zip_path)] del sys.modules[self.data.__name__] except KeyError: pass try: unlink(self.zip_path) except OSError: # If the test fails, this will probably fail too pass def test_contents_does_not_keep_open(self): c = resources.contents('ziptestdata') self.zip_path.unlink() del c def test_is_resource_does_not_keep_open(self): c = resources.is_resource('ziptestdata', 'binary.file') self.zip_path.unlink() del c def test_is_resource_failure_does_not_keep_open(self): c = resources.is_resource('ziptestdata', 'not-present') self.zip_path.unlink() del c @unittest.skip("Desired but not supported.") def test_path_does_not_keep_open(self): c = resources.path('ziptestdata', 'binary.file') self.zip_path.unlink() del c def test_entered_path_does_not_keep_open(self): # This is what certifi does on import to make its bundle # available for the process duration. c = resources.path('ziptestdata', 'binary.file').__enter__() self.zip_path.unlink() del c def test_read_binary_does_not_keep_open(self): c = resources.read_binary('ziptestdata', 'binary.file') self.zip_path.unlink() del c def test_read_text_does_not_keep_open(self): c = resources.read_text('ziptestdata', 'utf-8.file', encoding='utf-8') self.zip_path.unlink() del c class ResourceFromNamespaceTest01(unittest.TestCase): site_dir = str(pathlib.Path(__file__).parent) @classmethod def setUpClass(cls): sys.path.append(cls.site_dir) @classmethod def tearDownClass(cls): sys.path.remove(cls.site_dir) def test_is_submodule_resource(self): self.assertTrue( resources.is_resource(import_module('namespacedata01'), 'binary.file') ) def test_read_submodule_resource_by_name(self): self.assertTrue(resources.is_resource('namespacedata01', 'binary.file')) def test_submodule_contents(self): contents = set(resources.contents(import_module('namespacedata01'))) try: contents.remove('__pycache__') except KeyError: pass self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) def test_submodule_contents_by_name(self): contents = set(resources.contents('namespacedata01')) try: contents.remove('__pycache__') except KeyError: pass self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/update-zips.py0000755000175100001710000000260700000000000025236 0ustar00runnerdocker""" Generate the zip test data files. Run to build the tests/zipdataNN/ziptestdata.zip files from files in tests/dataNN. Replaces the file with the working copy, but does commit anything to the source repo. """ import contextlib import os import pathlib import zipfile def main(): """ >>> from unittest import mock >>> monkeypatch = getfixture('monkeypatch') >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock()) >>> print(); main() # print workaround for bpo-32509 ...data01... -> ziptestdata/... ... ...data02... -> ziptestdata/... ... """ suffixes = '01', '02' tuple(map(generate, suffixes)) def generate(suffix): root = pathlib.Path(__file__).parent.relative_to(os.getcwd()) zfpath = root / f'zipdata{suffix}/ziptestdata.zip' with zipfile.ZipFile(zfpath, 'w') as zf: for src, rel in walk(root / f'data{suffix}'): dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix()) print(src, '->', dst) zf.write(src, dst) def walk(datapath): for dirpath, dirnames, filenames in os.walk(datapath): with contextlib.suppress(KeyError): dirnames.remove('__pycache__') for filename in filenames: res = pathlib.Path(dirpath) / filename rel = res.relative_to(datapath) yield res, rel __name__ == '__main__' and main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/util.py0000644000175100001710000001176600000000000023751 0ustar00runnerdockerimport abc import importlib import io import sys import types from pathlib import Path, PurePath from . import data01 from . import zipdata01 from ..abc import ResourceReader from ._compat import import_helper from importlib.machinery import ModuleSpec class Reader(ResourceReader): def __init__(self, **kwargs): vars(self).update(kwargs) def get_resource_reader(self, package): return self def open_resource(self, path): self._path = path if isinstance(self.file, Exception): raise self.file else: return self.file def resource_path(self, path_): self._path = path_ if isinstance(self.path, Exception): raise self.path else: return self.path def is_resource(self, path_): self._path = path_ if isinstance(self.path, Exception): raise self.path for entry in self._contents: parts = entry.split('/') if len(parts) == 1 and parts[0] == path_: return True return False def contents(self): if isinstance(self.path, Exception): raise self.path yield from self._contents def create_package(file, path, is_package=True, contents=()): name = 'testingpackage' module = types.ModuleType(name) loader = Reader(file=file, path=path, _contents=contents) spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package) module.__spec__ = spec module.__loader__ = loader return module class CommonTests(metaclass=abc.ABCMeta): @abc.abstractmethod def execute(self, package, path): raise NotImplementedError def test_package_name(self): # Passing in the package name should succeed. self.execute(data01.__name__, 'utf-8.file') def test_package_object(self): # Passing in the package itself should succeed. self.execute(data01, 'utf-8.file') def test_string_path(self): # Passing in a string for the path should succeed. path = 'utf-8.file' self.execute(data01, path) def test_pathlib_path(self): # Passing in a pathlib.PurePath object for the path should succeed. path = PurePath('utf-8.file') self.execute(data01, path) def test_absolute_path(self): # An absolute path is a ValueError. path = Path(__file__) full_path = path.parent / 'utf-8.file' with self.assertRaises(ValueError): self.execute(data01, full_path) def test_relative_path(self): # A reative path is a ValueError. with self.assertRaises(ValueError): self.execute(data01, '../data01/utf-8.file') def test_importing_module_as_side_effect(self): # The anchor package can already be imported. del sys.modules[data01.__name__] self.execute(data01.__name__, 'utf-8.file') def test_non_package_by_name(self): # The anchor package cannot be a module. with self.assertRaises(TypeError): self.execute(__name__, 'utf-8.file') def test_non_package_by_package(self): # The anchor package cannot be a module. with self.assertRaises(TypeError): module = sys.modules['importlib_resources.tests.util'] self.execute(module, 'utf-8.file') def test_resource_opener(self): bytes_data = io.BytesIO(b'Hello, world!') package = create_package(file=bytes_data, path=FileNotFoundError()) self.execute(package, 'utf-8.file') self.assertEqual(package.__loader__._path, 'utf-8.file') def test_resource_path(self): bytes_data = io.BytesIO(b'Hello, world!') path = __file__ package = create_package(file=bytes_data, path=path) self.execute(package, 'utf-8.file') self.assertEqual(package.__loader__._path, 'utf-8.file') def test_useless_loader(self): package = create_package(file=FileNotFoundError(), path=FileNotFoundError()) with self.assertRaises(FileNotFoundError): self.execute(package, 'utf-8.file') class ZipSetupBase: ZIP_MODULE = None @classmethod def setUpClass(cls): data_path = Path(cls.ZIP_MODULE.__file__) data_dir = data_path.parent cls._zip_path = str(data_dir / 'ziptestdata.zip') sys.path.append(cls._zip_path) cls.data = importlib.import_module('ziptestdata') @classmethod def tearDownClass(cls): try: sys.path.remove(cls._zip_path) except ValueError: pass try: del sys.path_importer_cache[cls._zip_path] del sys.modules[cls.data.__name__] except KeyError: pass try: del cls.data del cls._zip_path except AttributeError: pass def setUp(self): modules = import_helper.modules_setup() self.addCleanup(import_helper.modules_cleanup, *modules) class ZipSetup(ZipSetupBase): ZIP_MODULE = zipdata01 # type: ignore ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/zipdata01/0000755000175100001710000000000000000000000024204 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/zipdata01/__init__.py0000644000175100001710000000000000000000000026303 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/zipdata01/ziptestdata.zip0000644000175100001710000000155400000000000027271 0ustar00runnerdockerPKC³fQ³‹"<,,ziptestdata/utf-16.fileÿþHello, UTF-16 world! PKC³fQËøÂšziptestdata/utf-8.fileHello, UTF-8 world! PKC³fQziptestdata/__init__.pyPKC³fQ†¹‹ziptestdata/binary.filePKC³fQ$ziptestdata/subdirectory/__init__.pyPKC³fQ†¹‹$ziptestdata/subdirectory/binary.filePKC³fQ³‹"<,,¤ziptestdata/utf-16.filePKC³fQËøÂš¤aziptestdata/utf-8.filePKC³fQ¤©ziptestdata/__init__.pyPKC³fQ†¹‹¤Þziptestdata/binary.filePKC³fQ$¤ziptestdata/subdirectory/__init__.pyPKC³fQ†¹‹$¤Yziptestdata/subdirectory/binary.filePK·Ÿ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5542283 importlib_resources-5.1.2/importlib_resources/tests/zipdata02/0000755000175100001710000000000000000000000024205 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/zipdata02/__init__.py0000644000175100001710000000000000000000000026304 0ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/importlib_resources/tests/zipdata02/ziptestdata.zip0000644000175100001710000000127200000000000027267 0ustar00runnerdockerPKC³fQziptestdata/__init__.pyPKC³fQziptestdata/one/__init__.pyPKC³fQÍùü© ziptestdata/one/resource1.txtone resource PKC³fQziptestdata/two/__init__.pyPKC³fQ]o,‰ ziptestdata/two/resource2.txttwo resource PKC³fQ¤ziptestdata/__init__.pyPKC³fQ¤5ziptestdata/one/__init__.pyPKC³fQÍùü© ¤nziptestdata/one/resource1.txtPKC³fQ¤¶ziptestdata/two/__init__.pyPKC³fQ]o,‰ ¤ïziptestdata/two/resource2.txtPKm7././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1614883644.550228 importlib_resources-5.1.2/importlib_resources.egg-info/0000755000175100001710000000000000000000000022737 5ustar00runnerdocker././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883644.0 importlib_resources-5.1.2/importlib_resources.egg-info/PKG-INFO0000644000175100001710000000442200000000000024036 0ustar00runnerdockerMetadata-Version: 2.1 Name: importlib-resources Version: 5.1.2 Summary: Read resources from Python packages Home-page: https://github.com/python/importlib_resources Author: Barry Warsaw Author-email: barry@python.org License: Apache2 Project-URL: Documentation, https://importlib-resources.readthedocs.io/ Description: .. image:: https://img.shields.io/pypi/v/importlib_resources.svg :target: `PyPI link`_ .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg :target: `PyPI link`_ .. _PyPI link: https://pypi.org/project/importlib_resources .. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 :alt: tests .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: Code style: Black .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest ``importlib_resources`` is a backport of Python standard library `importlib.resources `_ module for older Pythons. Users of Python 3.9 and beyond should use the standard library module, since for these versions, ``importlib_resources`` just delegates to that module. The key goal of this module is to replace parts of `pkg_resources `_ with a solution in Python's stdlib that relies on well-defined APIs. This makes reading resources included in packages easier, with more stable and consistent semantics. Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Requires-Python: >=3.6 Provides-Extra: testing Provides-Extra: docs ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883644.0 importlib_resources-5.1.2/importlib_resources.egg-info/SOURCES.txt0000644000175100001710000000412500000000000024625 0ustar00runnerdocker.coveragerc .editorconfig .flake8 .gitattributes .gitignore .pre-commit-config.yaml .readthedocs.yml CHANGES.rst LICENSE README.rst codecov.yml mypy.ini pyproject.toml pytest.ini setup.cfg setup.py skeleton.md tox.ini .github/workflows/automerge.yml .github/workflows/main.yml docs/conf.py docs/history.rst docs/index.rst docs/migration.rst docs/using.rst importlib_resources/__init__.py importlib_resources/_adapters.py importlib_resources/_common.py importlib_resources/_compat.py importlib_resources/_py3.py importlib_resources/abc.py importlib_resources/py.typed importlib_resources/readers.py importlib_resources/simple.py importlib_resources.egg-info/PKG-INFO importlib_resources.egg-info/SOURCES.txt importlib_resources.egg-info/dependency_links.txt importlib_resources.egg-info/requires.txt importlib_resources.egg-info/top_level.txt importlib_resources/tests/__init__.py importlib_resources/tests/_compat.py importlib_resources/tests/test_files.py importlib_resources/tests/test_open.py importlib_resources/tests/test_path.py importlib_resources/tests/test_read.py importlib_resources/tests/test_reader.py importlib_resources/tests/test_resource.py importlib_resources/tests/update-zips.py importlib_resources/tests/util.py importlib_resources/tests/data01/__init__.py importlib_resources/tests/data01/binary.file importlib_resources/tests/data01/utf-16.file importlib_resources/tests/data01/utf-8.file importlib_resources/tests/data01/subdirectory/__init__.py importlib_resources/tests/data01/subdirectory/binary.file importlib_resources/tests/data02/__init__.py importlib_resources/tests/data02/one/__init__.py importlib_resources/tests/data02/one/resource1.txt importlib_resources/tests/data02/two/__init__.py importlib_resources/tests/data02/two/resource2.txt importlib_resources/tests/namespacedata01/binary.file importlib_resources/tests/namespacedata01/utf-16.file importlib_resources/tests/namespacedata01/utf-8.file importlib_resources/tests/zipdata01/__init__.py importlib_resources/tests/zipdata01/ziptestdata.zip importlib_resources/tests/zipdata02/__init__.py importlib_resources/tests/zipdata02/ziptestdata.zip././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883644.0 importlib_resources-5.1.2/importlib_resources.egg-info/dependency_links.txt0000644000175100001710000000000100000000000027005 0ustar00runnerdocker ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883644.0 importlib_resources-5.1.2/importlib_resources.egg-info/requires.txt0000644000175100001710000000040500000000000025336 0ustar00runnerdocker [:python_version < "3.8"] zipp>=0.4 [docs] sphinx jaraco.packaging>=8.2 rst.linker>=1.9 [testing] pytest>=4.6 pytest-checkdocs>=1.2.3 pytest-flake8 pytest-cov pytest-enabler [testing:platform_python_implementation != "PyPy"] pytest-black>=0.3.7 pytest-mypy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883644.0 importlib_resources-5.1.2/importlib_resources.egg-info/top_level.txt0000644000175100001710000000002400000000000025465 0ustar00runnerdockerimportlib_resources ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/mypy.ini0000644000175100001710000000004500000000000016650 0ustar00runnerdocker[mypy] ignore_missing_imports = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/pyproject.toml0000644000175100001710000000055700000000000020075 0ustar00runnerdocker[build-system] requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4.1"] build-backend = "setuptools.build_meta" [tool.black] skip-string-normalization = true [tool.setuptools_scm] [pytest.enabler.black] addopts = "--black" [pytest.enabler.mypy] addopts = "--mypy" [pytest.enabler.flake8] addopts = "--flake8" [pytest.enabler.cov] addopts = "--cov" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/pytest.ini0000644000175100001710000000030600000000000017202 0ustar00runnerdocker[pytest] norecursedirs=dist build .tox .eggs addopts=--doctest-modules doctest_optionflags=ALLOW_UNICODE ELLIPSIS # workaround for warning pytest-dev/pytest#6178 junit_family=xunit2 filterwarnings= ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1614883644.5582283 importlib_resources-5.1.2/setup.cfg0000644000175100001710000000221100000000000016767 0ustar00runnerdocker[metadata] license_files = LICENSE name = importlib_resources author = Barry Warsaw author_email = barry@python.org description = Read resources from Python packages long_description = file: README.rst url = https://github.com/python/importlib_resources license = Apache2 classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: Apache Software License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only project_urls = Documentation = https://importlib-resources.readthedocs.io/ [options] packages = find_namespace: include_package_data = true python_requires = >=3.6 install_requires = zipp >= 0.4; python_version < '3.8' setup_requires = setuptools_scm[toml] >= 3.4.1 [options.packages.find] exclude = build* dist* docs* tests* [options.extras_require] testing = pytest >= 4.6 pytest-checkdocs >= 1.2.3 pytest-flake8 pytest-black >= 0.3.7; python_implementation != "PyPy" pytest-cov pytest-mypy; python_implementation != "PyPy" pytest-enabler docs = sphinx jaraco.packaging >= 8.2 rst.linker >= 1.9 [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/setup.py0000644000175100001710000000013400000000000016662 0ustar00runnerdocker#!/usr/bin/env python import setuptools if __name__ == "__main__": setuptools.setup() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/skeleton.md0000644000175100001710000002363300000000000017327 0ustar00runnerdocker# Overview This project is merged with [skeleton](https://github.com/jaraco/skeleton). What is skeleton? It's the scaffolding of a Python project jaraco [introduced in his blog](https://blog.jaraco.com/a-project-skeleton-for-python-projects/). It seeks to provide a means to re-use techniques and inherit advances when managing projects for distribution. ## An SCM-Managed Approach While maintaining dozens of projects in PyPI, jaraco derives best practices for project distribution and publishes them in the [skeleton repo](https://github.com/jaraco/skeleton), a Git repo capturing the evolution and culmination of these best practices. It's intended to be used by a new or existing project to adopt these practices and honed and proven techniques. Adopters are encouraged to use the project directly and maintain a small deviation from the technique, make their own fork for more substantial changes unique to their environment or preferences, or simply adopt the skeleton once and abandon it thereafter. The primary advantage to using an SCM for maintaining these techniques is that those tools help facilitate the merge between the template and its adopting projects. Another advantage to using an SCM-managed approach is that tools like GitHub recognize that a change in the skeleton is the _same change_ across all projects that merge with that skeleton. Without the ancestry, with a traditional copy/paste approach, a [commit like this](https://github.com/jaraco/skeleton/commit/12eed1326e1bc26ce256e7b3f8cd8d3a5beab2d5) would produce notifications in the upstream project issue for each and every application, but because it's centralized, GitHub provides just the one notification when the change is added to the skeleton. # Usage ## new projects To use skeleton for a new project, simply pull the skeleton into a new project: ``` $ git init my-new-project $ cd my-new-project $ git pull gh://jaraco/skeleton ``` Now customize the project to suit your individual project needs. ## existing projects If you have an existing project, you can still incorporate the skeleton by merging it into the codebase. ``` $ git merge skeleton --allow-unrelated-histories ``` The `--allow-unrelated-histories` is necessary because the history from the skeleton was previously unrelated to the existing codebase. Resolve any merge conflicts and commit to the master, and now the project is based on the shared skeleton. ## Updating Whenever a change is needed or desired for the general technique for packaging, it can be made in the skeleton project and then merged into each of the derived projects as needed, recommended before each release. As a result, features and best practices for packaging are centrally maintained and readily trickle into a whole suite of packages. This technique lowers the amount of tedious work necessary to create or maintain a project, and coupled with other techniques like continuous integration and deployment, lowers the cost of creating and maintaining refined Python projects to just a few, familiar Git operations. For example, here's a session of the [path project](https://pypi.org/project/path) pulling non-conflicting changes from the skeleton: Thereafter, the target project can make whatever customizations it deems relevant to the scaffolding. The project may even at some point decide that the divergence is too great to merit renewed merging with the original skeleton. This approach applies maximal guidance while creating minimal constraints. ## Periodic Collapse In late 2020, this project [introduced](https://github.com/jaraco/skeleton/issues/27) the idea of a periodic but infrequent (O(years)) collapse of commits to limit the number of commits a new consumer will need to accept to adopt the skeleton. The full history of commits is collapsed into a single commit and that commit becomes the new mainline head. When one of these collapse operations happens, any project that previously pulled from the skeleton will no longer have a related history with that new main branch. For those projects, the skeleton provides a "handoff" branch that reconciles the two branches. Any project that has previously merged with the skeleton but now gets an error "fatal: refusing to merge unrelated histories" should instead use the handoff branch once to incorporate the new main branch. ``` $ git pull https://github.com/jaraco/skeleton 2020-handoff ``` This handoff needs to be pulled just once and thereafter the project can pull from the main head. The archive and handoff branches from prior collapses are indicate here: | refresh | archive | handoff | |---------|-----------------|--------------| | 2020-12 | archive/2020-12 | 2020-handoff | # Features The features/techniques employed by the skeleton include: - PEP 517/518-based build relying on Setuptools as the build tool - Setuptools declarative configuration using setup.cfg - tox for running tests - A README.rst as reStructuredText with some popular badges, but with Read the Docs and AppVeyor badges commented out - A CHANGES.rst file intended for publishing release notes about the project - Use of [Black](https://black.readthedocs.io/en/stable/) for code formatting (disabled on unsupported Python 3.5 and earlier) - Integrated type checking through [mypy](https://github.com/python/mypy/). ## Packaging Conventions A pyproject.toml is included to enable PEP 517 and PEP 518 compatibility and declares the requirements necessary to build the project on Setuptools (a minimum version compatible with setup.cfg declarative config). The setup.cfg file implements the following features: - Assumes universal wheel for release - Advertises the project's LICENSE file (MIT by default) - Reads the README.rst file into the long description - Some common Trove classifiers - Includes all packages discovered in the repo - Data files in the package are also included (not just Python files) - Declares the required Python versions - Declares install requirements (empty by default) - Declares setup requirements for legacy environments - Supplies two 'extras': - testing: requirements for running tests - docs: requirements for building docs - these extras split the declaration into "upstream" (requirements as declared by the skeleton) and "local" (those specific to the local project); these markers help avoid merge conflicts - Placeholder for defining entry points Additionally, the setup.py file declares `use_scm_version` which relies on [setuptools_scm](https://pypi.org/project/setuptools_scm) to do two things: - derive the project version from SCM tags - ensure that all files committed to the repo are automatically included in releases ## Running Tests The skeleton assumes the developer has [tox](https://pypi.org/project/tox) installed. The developer is expected to run `tox` to run tests on the current Python version using [pytest](https://pypi.org/project/pytest). Other environments (invoked with `tox -e {name}`) supplied include: - a `docs` environment to build the documentation - a `release` environment to publish the package to PyPI A pytest.ini is included to define common options around running tests. In particular: - rely on default test discovery in the current directory - avoid recursing into common directories not containing tests - run doctests on modules and invoke Flake8 tests - in doctests, allow Unicode literals and regular literals to match, allowing for doctests to run on Python 2 and 3. Also enable ELLIPSES, a default that would be undone by supplying the prior option. - filters out known warnings caused by libraries/functionality included by the skeleton Relies on a .flake8 file to correct some default behaviors: - disable mutually incompatible rules W503 and W504 - support for Black format ## Continuous Integration The project is pre-configured to run Continuous Integration tests. ### Github Actions [Github Actions](https://docs.github.com/en/free-pro-team@latest/actions) are the preferred provider as they provide free, fast, multi-platform services with straightforward configuration. Configured in `.github/workflows`. Features include: - test against multiple Python versions - run on late (and updated) platform versions - automated releases of tagged commits - [automatic merging of PRs](https://github.com/marketplace/actions/merge-pull-requests) (requires [protecting branches with required status checks](https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/enabling-required-status-checks), [not possible through API](https://github.community/t/set-all-status-checks-to-be-required-as-branch-protection-using-the-github-api/119493)) ### Continuous Deployments In addition to running tests, an additional publish stage is configured to automatically release tagged commits to PyPI using [API tokens](https://pypi.org/help/#apitoken). The release process expects an authorized token to be configured with each Github project (or org) `PYPI_TOKEN` [secret](https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets). Example: ``` pip-run -q jaraco.develop -- -m jaraco.develop.add-github-secrets ``` ## Building Documentation Documentation is automatically built by [Read the Docs](https://readthedocs.org) when the project is registered with it, by way of the .readthedocs.yml file. To test the docs build manually, a tox env may be invoked as `tox -e docs`. Both techniques rely on the dependencies declared in `setup.cfg/options.extras_require.docs`. In addition to building the Sphinx docs scaffolded in `docs/`, the docs build a `history.html` file that first injects release dates and hyperlinks into the CHANGES.rst before incorporating it as history in the docs. ## Cutting releases By default, tagged commits are released through the continuous integration deploy stage. Releases may also be cut manually by invoking the tox environment `release` with the PyPI token set as the TWINE_PASSWORD: ``` TWINE_PASSWORD={token} tox -e release ``` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1614883612.0 importlib_resources-5.1.2/tox.ini0000644000175100001710000000167300000000000016474 0ustar00runnerdocker[tox] envlist = python minversion = 3.2 # https://github.com/jaraco/skeleton/issues/6 tox_pip_extensions_ext_venv_update = true toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = commands = pytest {posargs} usedevelop = True extras = testing [testenv:docs] extras = docs testing changedir = docs commands = python -m sphinx . {toxinidir}/build/html [testenv:diffcov] deps = diff-cover commands = pytest {posargs} --cov-report xml diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html diff-cover coverage.xml --compare-branch=origin/main --fail-under=100 [testenv:release] skip_install = True deps = build twine[keyring]>=1.13 path jaraco.develop>=7.1 passenv = TWINE_PASSWORD GITHUB_TOKEN setenv = TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = python -c "import path; path.Path('dist').rmtree_p()" python -m build python -m twine upload dist/* python -m jaraco.develop.create-github-release