pax_global_header 0000666 0000000 0000000 00000000064 13461154274 0014521 g ustar 00root root 0000000 0000000 52 comment=c5a7405e5333cee9557bf0b056c88ba175bda7c2
reentry-1.3.1/ 0000775 0000000 0000000 00000000000 13461154274 0013213 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.coveragerc 0000664 0000000 0000000 00000000107 13461154274 0015332 0 ustar 00root root 0000000 0000000 [run]
source = reentry
omit = *test*
[html]
directory = coverage/html
reentry-1.3.1/.gitignore 0000664 0000000 0000000 00000000405 13461154274 0015202 0 ustar 00root root 0000000 0000000 *.pyc
*~
*.project
*.pydevproject
.settings
.DS_Store
*/.DS_Store
*/*/.DS_Store
*/*/*/.DS_Store
.metadata
*.cache/
*.pytest_cache/
__pycache__
/venv*/
/.idea/
/*egg-info/
*dist
build/
*.orig
*_BACKUP_*
*_BASE_*
*_LOCAL_*
*_REMOTE_*
.coverage
.tox
*.log
*.swp
reentry-1.3.1/.pre-commit-config.yaml 0000664 0000000 0000000 00000001133 13461154274 0017472 0 ustar 00root root 0000000 0000000 repos:
- repo: git://github.com/pre-commit/mirrors-yapf
sha: v0.24.0
hooks:
- id: yapf
language: system
- repo: local
hooks:
- id: prospector
language: system
types: [file, python]
name: prospector
description: "This hook runs Prospector: https://github.com/landscapeio/prospector"
entry: prospector
- id: travis-linter
name: travis
entry: travis lint
files: .travis.yml
language: ruby
additional_dependencies: ['travis']
- id: version-updater
name: version
language: system
entry: python ops/update_version.py
always_run: true
reentry-1.3.1/.prospector.yaml 0000664 0000000 0000000 00000000170 13461154274 0016353 0 ustar 00root root 0000000 0000000 ignore-paths:
- doc
- examples
- test
- utils
pylint:
max-line-length: 140
pyflakes:
run: false
reentry-1.3.1/.pylintrc 0000664 0000000 0000000 00000035052 13461154274 0015065 0 ustar 00root root 0000000 0000000 [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,test,examples,setup.py
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=1
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,useless-object-inheritance
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[BASIC]
# Naming hint for argument names
argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct argument names
argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Naming hint for attribute names
attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct attribute names
attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=5
# Naming hint for function names
function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct function names
function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_, _INPUT_FILE_NAME, _OUTPUT_FILE_NAME
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for method names
method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct method names
method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*)|(setUp)|(tearDown))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_,setUp,tearDown
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Naming hint for variable names
variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct variable names
variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=140
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method
max-args=6
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=20
# Maximum number of parents for a class (see R0901).
max-parents=20
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=1
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,_get_linkname_retrieved
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
reentry-1.3.1/.style.yapf 0000664 0000000 0000000 00000000063 13461154274 0015311 0 ustar 00root root 0000000 0000000 [style]
based_on_style = google
column_limit = 140
reentry-1.3.1/.travis-tests/ 0000775 0000000 0000000 00000000000 13461154274 0015741 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/host-pkg/ 0000775 0000000 0000000 00000000000 13461154274 0017475 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/host-pkg/pyproject.toml 0000664 0000000 0000000 00000000075 13461154274 0022413 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools", "wheel", "reentry"]
reentry-1.3.1/.travis-tests/host-pkg/reentry_test_host/ 0000775 0000000 0000000 00000000000 13461154274 0023261 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/host-pkg/reentry_test_host/__init__.py 0000664 0000000 0000000 00000000000 13461154274 0025360 0 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/host-pkg/reentry_test_host/builtin.py 0000664 0000000 0000000 00000000245 13461154274 0025302 0 ustar 00root root 0000000 0000000 """Test plugin class for integration tests."""
class PluginClass(object):
test_string = 'TEST'
def get_test_string(self):
return self.test_string
reentry-1.3.1/.travis-tests/host-pkg/reentry_test_host/tests.py 0000664 0000000 0000000 00000004557 13461154274 0025010 0 ustar 00root root 0000000 0000000 """CLI for reentry integration test.
Note: This requires the test packages in .travis-tests/ to be installed first.
"""
from __future__ import print_function
import click
try:
# prefer the backport for Python <3.5
from pathlib2 import Path
except ImportError:
from pathlib import Path
from reentry import manager
from reentry.config import get_datafile
@click.command()
@click.option('--with-noreg', is_flag=True)
def main(with_noreg):
"""Test automatic scanning / registering"""
entry_point_map = manager.get_entry_map(groups='reentry_test', ep_names=['test-plugin', 'test-noreg', 'builtin'])
data_file = Path(get_datafile())
assert entry_point_map, 'The \'reentry_test\' entry point group was not found\nMap:\n{}\n\nData File: {}\n\nContents:\n{}'.format(
manager.get_entry_map(), str(data_file), data_file.read_text())
try:
test_entry_point = entry_point_map['reentry_test']['test-plugin']
builtin_entry_point = entry_point_map['reentry_test']['builtin']
if with_noreg:
# note: `reentry scan` for this work
noreg_entry_point = entry_point_map['reentry_test']['test-noreg']
except Exception as err:
print('datafile: {}'.format(str(data_file)))
print('\nCurrent relevant entry point map:\n\n')
print(manager.format_map(entry_point_map))
print('\n')
scan_map = manager.scan(groups=['reentry_test'], commit=False)
print('\nFull entry point map after scan:\n\n')
print(manager.format_map(scan_map))
raise err
plugin_class = test_entry_point.load()
builtin_class = builtin_entry_point.load()
assert plugin_class.test_string == 'TEST', 'The test string was incorrect'
assert builtin_class.test_string == 'TEST', 'The test string was incorrect'
if with_noreg:
noreg_class = noreg_entry_point.load()
assert noreg_class.test_string == 'TEST', 'The test string was incorrect'
plugin_list = [ep.load() for ep in manager.iter_entry_points('reentry_test')]
assert plugin_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
assert builtin_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
if with_noreg:
assert noreg_class in plugin_list, 'iter_entry_points found differing test entry point from get_entry_map.'
reentry-1.3.1/.travis-tests/host-pkg/setup.py 0000664 0000000 0000000 00000000715 13461154274 0021212 0 ustar 00root root 0000000 0000000 # pylint: disable=missing-docstring
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='reentry-test-host',
packages=find_packages(),
install_requires=['reentry'],
reentry_register=True,
entry_points={
'console_scripts': ['reentry-test-hooks = reentry_test_host.tests:main'],
'reentry_test': ['builtin = reentry_test_host.builtin:PluginClass']
})
reentry-1.3.1/.travis-tests/noep-pkg/ 0000775 0000000 0000000 00000000000 13461154274 0017461 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noep-pkg/pyproject.toml 0000664 0000000 0000000 00000000075 13461154274 0022377 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools", "wheel", "reentry"]
reentry-1.3.1/.travis-tests/noep-pkg/reentry_test_noep/ 0000775 0000000 0000000 00000000000 13461154274 0023231 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noep-pkg/reentry_test_noep/__init__.py 0000664 0000000 0000000 00000000000 13461154274 0025330 0 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noep-pkg/setup.py 0000664 0000000 0000000 00000000301 13461154274 0021165 0 ustar 00root root 0000000 0000000 # pylint: disable=missing-docstring
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='reentry-test-noep', packages=find_packages(), reentry_register=True)
reentry-1.3.1/.travis-tests/noreg-pkg/ 0000775 0000000 0000000 00000000000 13461154274 0017632 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noreg-pkg/reentry_test_noreg/ 0000775 0000000 0000000 00000000000 13461154274 0023553 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noreg-pkg/reentry_test_noreg/__init__.py 0000664 0000000 0000000 00000000000 13461154274 0025652 0 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/noreg-pkg/reentry_test_noreg/plugin.py 0000664 0000000 0000000 00000000245 13461154274 0025424 0 ustar 00root root 0000000 0000000 """Test plugin class for integration tests."""
class PluginClass(object):
test_string = 'TEST'
def get_test_string(self):
return self.test_string
reentry-1.3.1/.travis-tests/noreg-pkg/setup.py 0000664 0000000 0000000 00000000427 13461154274 0021347 0 ustar 00root root 0000000 0000000 # pylint: disable=missing-docstring
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='reentry-test-noreg',
packages=find_packages(),
entry_points={'reentry_test': ['test-noreg = reentry_test_plugin.plugin:PluginClass']})
reentry-1.3.1/.travis-tests/plugin-pkg/ 0000775 0000000 0000000 00000000000 13461154274 0020016 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/plugin-pkg/pyproject.toml 0000664 0000000 0000000 00000000075 13461154274 0022734 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools", "wheel", "reentry"]
reentry-1.3.1/.travis-tests/plugin-pkg/reentry_test_plugin/ 0000775 0000000 0000000 00000000000 13461154274 0024123 5 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/plugin-pkg/reentry_test_plugin/__init__.py 0000664 0000000 0000000 00000000000 13461154274 0026222 0 ustar 00root root 0000000 0000000 reentry-1.3.1/.travis-tests/plugin-pkg/reentry_test_plugin/plugin.py 0000664 0000000 0000000 00000000245 13461154274 0025774 0 ustar 00root root 0000000 0000000 """Test plugin class for integration tests."""
class PluginClass(object):
test_string = 'TEST'
def get_test_string(self):
return self.test_string
reentry-1.3.1/.travis-tests/plugin-pkg/setup.py 0000664 0000000 0000000 00000000472 13461154274 0021533 0 ustar 00root root 0000000 0000000 # pylint: disable=missing-docstring
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='reentry-test-plugin',
packages=find_packages(),
reentry_register=True,
entry_points={'reentry_test': ['test-plugin = reentry_test_plugin.plugin:PluginClass']})
reentry-1.3.1/.travis.yml 0000664 0000000 0000000 00000000770 13461154274 0015330 0 ustar 00root root 0000000 0000000 notifications:
email: false
language: python
python:
- 2.7
- 3.6
cache: pip
install:
- pip install -e .[dev]
- pip install tox-travis coveralls
env:
- TEST_TYPE="pre-commit"
- TEST_TYPE="unittests"
script:
- if [ "$TEST_TYPE" == "unittests" ] ; then ./run_tests.sh ; fi
- if [ "$TEST_TYPE" == "pre-commit" ] ; then pre-commit run --all-files || ( git diff; pip freeze | grep yapf; exit 1; ) ; fi
after_success:
- if [ "$TEST_TYPE" == "unittests" ] ; then reentry dev coveralls; fi
reentry-1.3.1/CHANGELOG.md 0000664 0000000 0000000 00000006603 13461154274 0015031 0 ustar 00root root 0000000 0000000 # Changelog
All notable changes to this project after version 1.0.3 will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [1.3.0] - 2019-04-10
### Changed
- drop support for `reentry_scan` hook since pip 19 introduced build-system isolation that restricts
scans performed by the hook to the build-system (containing only setuptools, wheel, reentry, etc.).
- `manager.scan`: renamed negated options (`nocommit` => `commit`, `nodelete` => `delete`)
### Added
- add `reentry clear` command to clear the reentry cache
## [1.2.2] - 2018-10-07
### Changed
- replace py with pathlib/pathlib2, reducing dependencies for python 3
- config file:
- use hashed (shorter) filename
- resolve symlinks in file path
- recognize `XDG_CONFIG_HOME` environment variable
- move entry points, classifiers etc. to `setup.json` file
## [1.2.1] - 2018-06-11
### Changed
- data file is now composed of the directory in which the python executable sits and the python major version (2 or 3)
- setuptools relevant entry points can now be registered if specifically asked for
- `PluginManager.scan`: `group_re` kwarg now allows string regex too
- `PluginManager.iter_entry_points`: now scans by default if group or entry point not found
- `PluginManager`: new constructor kwarg `scan_for_not_found` defaults to `True`, set to `False` to fail faster when no cached group / entry point is found.
- `JsonBackend`, `BackendInterface`: The `write_*_dist()` methods have been replaced by `scan_*dist()`, the output of which can be passed to the `write_dist_map()` method.
- `JsonBackend.epmap`: promoted to read-only property, returns copy of internal map.
## [1.2.0] - 2018-04-19
### Changed
- data file name based on `sys.executable` to make sure entry points registered during install phase are available afterwards
- `reentry_scan` during install no longer overwrites, but only adds (distributions installed simultaneously in one `pip` invocation can not discover each other's entry points and might overwrite each other)
### Added
- setup coveralls
- added coverage badge to REAME
- cli: `reentry dev coveralls` runs coveralls only if TRAVIS env variable is set
- read data dir from REENTRY_DATADIR env variable (env > rcfile > default)
- CI test for registering entry points from the plugin host
- documented limitations of `reentry_scan`
- documented compatibility issues with `setup_requires`
## [1.1.2]
### Changed
- fixed a bug that prevented from installing on windows
## [1.1.1]
### Changed
- fixed a bug that prevented from installing in python 2.7 (added regression test)
## [1.1.0]
### Added
- reentry now reads configuration from ~/.reentryrc or ~/.config/reentry/config if exists
- configuration key: datadir, defaults to ~/.config/reentry/data/
- entry points are now stored in a file in ~/.config/reentry/data/, named individually per installation
- setup-hook `reentry_register` is now working and tested on Travis-CI
- `reentry_register` now checks and does nothing if distribution has no entry points
- setup-hook `reentry_scan` is now working and tested on Travis-CI
### Changed
- reentry can now cache entry points even if the user has no write permission in it's install dir
- JsonBackend API: small changes to the distribution writing methods
## [1.0.3]
reentry-1.3.1/LICENSE 0000664 0000000 0000000 00000002040 13461154274 0014214 0 ustar 00root root 0000000 0000000 Copyright 2017 Rico Haeuselmann
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
reentry-1.3.1/MANIFEST.in 0000664 0000000 0000000 00000000134 13461154274 0014747 0 ustar 00root root 0000000 0000000 include README.rst
include LICENSE
include setup.json
recursive-include reentry tests/*json
reentry-1.3.1/README.rst 0000664 0000000 0000000 00000015710 13461154274 0014706 0 ustar 00root root 0000000 0000000 .. image:: https://travis-ci.org/DropD/reentry.svg?branch=master
:target: https://travis-ci.org/DropD/reentry
.. image:: https://coveralls.io/repos/github/DropD/reentry/badge.svg
:target: https://coveralls.io/github/DropD/reentry
=======
Reentry
=======
A plugin manager based on setuptools entry points with 10x the speed
Features
--------
* finding plugins: reentry keeps a map of entry points in a file
* speed: reentry provides an EntryPoint implementation that trades extra checks for search and load speed
* automatic registering: use ``reentry_register: True`` in your ``setup.py`` to automatically register plugins
Note that ``reentry_register`` creates a *build-time*
dependency on ``reentry``. The suggested way to resolve that is using the
method described in `PEP518 `_, for
which `support has been added in pip 10 `_:
next to ``setup.py``, put a file ``pyproject.toml`` containing::
[build-system]
# Minimum requirements for the build system to execute.
requires = ["setuptools", "wheel", "reentry"]
An alternative way for specifying a build dependency is to put::
setup(
...
setup_requires=[reentry],
...
)
in your ``setup.py``.
This alternative works with all versions of ``pip``, but fails on systems,
where python is linked to old ``SSL`` libraries (such as the system python for
some versions of OS X).
Limitations
-----------
* entry points with extra dependencies (``name = module_name:attrs [extras]``)
are still supported. Trying to load them, however, will lead to importing ``pkg_resources`` and
forego the speedup.
Quickstart
----------
Use the following in your plugins's ``setup.py``::
setup(
...
setup_requires=['reentry'],
reentry_register=True,
entry_points={
'my_plugins': ['this_plugin = this_package.subpackage:member'],
...
}
And iterate over installed plugin from the host package::
from reentry import manager
available_plugins = manager.iter_entry_points(group='my_plugins')
for plugin in available_plugins:
plugin_object = plugin.load()
plugin_object.use()
The syntax is consistent with ``setuptools``'s ``pkg_resources``, so you may use it as a fallback::
try:
from reentry import manager as entry_pt_manager
except:
import pkg_resources as entry_pt_manager
entry_pt_manager.iter_entry_points(...)
...
Reentry Configuration
---------------------
Reentry supports getting information from a configuration file. The file will
be searched at the following paths:
* /.reentryrc
* /.config/reentry/config
The configuration file has an ``ini`` format and supports the following keys::
[general]
datadir=/path/to/data/dir
data_filename=name
The ``datadir`` is the folder in which ``reentry`` stores the data file
that contains the information about the registered entry points.
If the config file doesn't exist in one of the above paths, the ``datadir`` is
set to ``/.config/reentry/data``.
``data_filename`` is the name of the data file, in case you want to pick the
name by your own instead of letting ``reentry`` choose it.
Warning: By default, ``reentry`` creates a separate data file for every python
interpreter in order not to mix entry points between different python
environments on your system. Setting a ``data_filename`` in the configuration
file tells ``reentry`` to *always* use this data file and may result in
unexpected behavior if you use ``reentry`` in multiple python environments.
You can also set configuration options for ``reentry`` via environment
variables:
* ``datadir`` can be defined by ``REENTRY_DATADIR``.
* ``data_filename`` can be defined by ``REENTRY_DATA_FILENAME``.
Environment variables take precedence over the configuration file.
What for?
---------
To make entry points usable for plugins in time-critical situations such as
command line interfaces!
Setuptool's entry point system is convenient to use for plugin-based
python applications. It allows separate python packages to act as plugins
to a host package (or to each other), making it easy for the host to find and
iterate over the relevant data structures from plugins.
However, the time spent on importing `setuptools` scales badly with the
number of installed distributions and can easily reach 0.5 seconds for
moderately complex environments.
Finding and loading of plugins can be time-critical, for example in command
line tools that need to load subcommands, where 100 ms are a noticeable delay.
Importing setuptools's `pkg_resources` takes time, because it verifies that
dependencies are installed correctly for all distributions present in the
environment. This allows entry points to have additional dependencies or
"extras" (``entry_point = module_name:attrs [extras]``).
Reentry forgoes this dependency check for entry points without 'extras'
and thereby manages to be fast and scale better with the number
of plugins installed.
Standalone Manager Usage
------------------------
Sometimes it might be necessary to update the cached entry points, for example
* after uninstalling a plugin (there are no uninstall hooks by setuptools at the moment)
* after installing a plugin that does not use install hooks
* while developing a plugin / plugin host
for those cases reentry has a commandline interface::
$ reentry --help
Usage: reentry [OPTIONS] COMMAND [ARGS]...
manage your reentry python entry point cache
Options:
--help Show this message and exit.
Commands:
clear Clear entry point map.
dev Development related commands.
map Print out a map of cached entry points
scan Scan for python entry points to cache for faster loading.
::
$ reentry scan --help
Usage: reentry scan [OPTIONS] PATTERN
Scan for python entry points to cache for faster loading.
Scan only for specific PATTERNs or leave empty to scan all
Options:
-r, --regex Treat PATTERNs as regular expresions
--help Show this message and exit.
::
$ reentry map --help
Usage: reentry map [OPTIONS]
Options:
--dist TEXT limit map to a distribution
--group TEXT limit map to an entry point group
--name TEXT limit map to entrypoints that match NAME
--help Show this message and exit.
Note: Where needed (e.g. in jupyter notebooks), these operations also be
performed in python using the reentry ``manager``, e.g.::
from reentry import manager
manager.scan()
CLI Example
-----------
Reentry provides a drop-in replacement for iter_entry_points::
import click
from click_plugins import with_plugins
from reentry.manager import iter_entry_points
@with_plugins(iter_entry_points('cli_plugins'))
@click.group()
def cli():
"""
command with subcommands loaded from plugin entry points
"""
For this to work, reentry has to be installed and must have been used to
scan for entry points in the 'cli_plugins' group once.
reentry-1.3.1/ops/ 0000775 0000000 0000000 00000000000 13461154274 0014014 5 ustar 00root root 0000000 0000000 reentry-1.3.1/ops/update_version.py 0000664 0000000 0000000 00000010030 13461154274 0017407 0 ustar 00root root 0000000 0000000 """Update version numbers everywhere based on git tags."""
from __future__ import print_function
import os
import re
import json
import fileinput
import contextlib
import subprocess
try:
# prefer the backport for Python <3.5
from pathlib2 import Path
except ImportError:
from pathlib import Path
import collections
from packaging import version
def subpath(*args):
return os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', *args))
@contextlib.contextmanager
def file_input(*args, **kwargs):
"""Context manager for a FileInput object."""
input_fo = fileinput.FileInput(*args, **kwargs)
try:
yield input_fo
finally:
input_fo.close()
class VersionUpdater(object):
"""
Version number synchronisation interface.
Updates the version information in
* setup.json
* aiida_vasp/__init__.py
to the current version number.
The current version number is either parsed from the output of ``git describe --tags --match v*.*.*``, or if the command fails for
any reason, from setup.json. The current version number is decided on init, syncronization can be executed by calling ``.sync()``.
"""
version_pat = re.compile(r'\d+.\d+.\d+(-(alpha|beta|rc)(.\d+){0,3}){0,1}')
init_version_pat = re.compile(r'(__version__ = )([\'"])(.*?)([\'"])', re.DOTALL | re.MULTILINE)
replace_tmpl = r'\1\g<2>{}\4'
def __init__(self):
"""Initialize with documents that should be kept up to date and actual version."""
self.top_level_init = Path(subpath('reentry', '__init__.py'))
self.setup_json = Path(subpath('setup.json'))
self.version = self.get_version()
def write_to_init(self):
init_content = self.top_level_init.read_text()
self.top_level_init.write_text(re.sub(self.init_version_pat, self.new_version_str, init_content, re.DOTALL | re.MULTILINE))
def write_to_setup(self):
"""Write the updated version number to setup.json."""
with open(str(self.setup_json), 'r') as setup_fo:
# preserve order
setup = json.load(setup_fo, object_pairs_hook=collections.OrderedDict)
setup['version'] = str(self.version)
with open(str(self.setup_json), 'w') as setup_fo:
json.dump(setup, setup_fo, indent=4, separators=(',', ': '))
@property
def new_version_str(self):
return self.replace_tmpl.format(str(self.version))
@property
def setup_version(self):
"""Grab the parsed version from setup.json."""
with open(str(self.setup_json), 'r') as setup_fo:
setup = json.load(setup_fo)
try:
version_string = setup['version']
except KeyError:
raise AttributeError('No version found in setup.json')
return version.parse(version_string)
@property
def init_version(self):
"""Grab the parsed version from the init file."""
match = re.search(self.init_version_pat, self.top_level_init.read_text())
if not match:
raise AttributeError('No __version__ found in top-level __init__.py')
return version.parse(match.groups()[2])
@property
def tag_version(self):
"""Get the current version number from ``git describe``, fall back to setup.json."""
try:
describe_byte_string = subprocess.check_output(['git', 'describe', '--tags', '--match', 'v*.*.*'])
match = re.search(self.version_pat, describe_byte_string.decode(encoding='UTF-8'))
version_string = match.string[match.pos:match.end()]
return version.parse(version_string)
except subprocess.CalledProcessError:
return self.setup_version
def get_version(self):
return max(self.setup_version, self.init_version, self.tag_version)
def sync(self):
if self.version > self.init_version:
self.write_to_init()
if self.version > self.setup_version:
self.write_to_setup()
if __name__ == '__main__':
VERSION_UPDATER = VersionUpdater()
VERSION_UPDATER.sync()
reentry-1.3.1/reentry/ 0000775 0000000 0000000 00000000000 13461154274 0014703 5 ustar 00root root 0000000 0000000 reentry-1.3.1/reentry/__init__.py 0000664 0000000 0000000 00000000215 13461154274 0017012 0 ustar 00root root 0000000 0000000 """Expose the default manager"""
from reentry.default_manager import DEFAULT_MANAGER as manager
__version__ = '1.3.1'
__all__ = ['manager']
reentry-1.3.1/reentry/abcbackend.py 0000664 0000000 0000000 00000005610 13461154274 0017314 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""Abstract base class for backends"""
import abc
import six
class BackendInterface(object):
"""
Backend interface, subclass to implement a concrete backend.
All methods without a method body need to be implemented in a backend.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_map(self, dist=None, group=None, name=None):
"""
get a map of entry points, filtered by
:param dist: distribution name or sequence of distribution names
:param groups: single group name or sequence of group names
:param name: entry point name pattern or sequence of name patterns
The map is structured as follows::
map = {
'': {
':
...
},
...
},
"""
@abc.abstractmethod
def iter_group(self, group):
"""
returns a list of entry points for the given group name
"""
@abc.abstractmethod
def get_group_names(self):
"""
returns a list of group names
"""
@abc.abstractmethod
def get_dist_names(self):
"""
returns a list of distribution names
"""
@abc.abstractmethod
def get_dist_map(self, dist):
"""
returns a map {group:[entry_points, ...], ...} for the given dist name
"""
@abc.abstractmethod
def scan_st_dist(self, dist):
"""Scan a distribution given by a name, empty by default."""
@abc.abstractmethod
def scan_install_dist(self, dist):
"""Add an incomplete distribution as passed by setuptools during it's installation."""
def scan_dist(self, distribution):
"""Take a distribution's project name, add the distribution."""
if isinstance(distribution, six.string_types):
dist_name, entry_point_map = self.scan_st_dist(distribution)
else:
dist_name, entry_point_map = self.scan_install_dist(distribution)
return dist_name, entry_point_map
@abc.abstractmethod
def write_dist_map(self, distname, entry_point_map=None):
"""Write a distribution given the name and entry point map"""
@abc.abstractmethod
def rm_dist(self, distname):
"""
removes a distribution completely
"""
@abc.abstractmethod
def clear(self):
"""Clears all stored entry points"""
@staticmethod
def pr_dist_map(dist):
dname = dist.project_name
epmap = dist.get_entry_map()
return dname, epmap
@staticmethod
def pr_dist_from_name(distname):
from pkg_resources import get_distribution
dist = get_distribution(distname)
return dist
@abc.abstractproperty
def epmap(self):
"""Full map {distribution: {group: [{name: entry_point}]}}."""
reentry-1.3.1/reentry/cli.py 0000664 0000000 0000000 00000003744 13461154274 0016034 0 ustar 00root root 0000000 0000000 """Command line interface for reentry"""
import sys
import os
import subprocess
import click
from reentry.config import get_datafile
@click.group()
def reentry():
"""manage your reentry python entry point cache"""
@reentry.command()
@click.argument('groups', nargs=-1, metavar='PATTERN')
@click.option('-r', '--regex', is_flag=True, help='Treat PATTERNs as regular expresions')
def scan(groups, regex):
"""
Scan for python entry points to cache for faster loading.
Scan only for specific PATTERNs or leave empty to scan all
"""
from reentry import manager
if regex:
if not groups:
# nothing to do
sys.exit(0)
import re
matchstr = re.compile("|".join(['({})'.format(i) for i in groups]))
manager.scan(group_re=matchstr)
else:
manager.scan(groups)
@reentry.command('map')
@click.option('--dist', help='limit map to a distribution')
@click.option('--group', help='limit map to an entry point group')
@click.option('--name', help='limit map to entrypoints that match NAME')
def map_(dist, group, name):
"""Print out a map of cached entry points"""
import pprint
from reentry import manager
if dist is None:
res = {d: manager.get_entry_map(d, group, name) for d in manager.distribution_names}
else:
res = manager.get_entry_map(dist, group, name)
click.echo(pprint.pformat(res))
@reentry.command('clear')
def clear():
"""
Clear entry point map.
"""
from reentry import manager
manager.clear()
@reentry.group('dev')
def dev():
"""Development related commands."""
def echo_call(cmd):
click.echo('calling: {}'.format(' '.join(cmd)), err=True)
@dev.command()
def coveralls():
"""Run coveralls only on travis."""
if os.getenv('TRAVIS'):
cmd = ['coveralls']
echo_call(cmd)
subprocess.call(cmd)
@dev.command('datafile')
def datafile():
"""Print the path to the current datafile."""
click.echo(get_datafile())
reentry-1.3.1/reentry/config.py 0000664 0000000 0000000 00000006140 13461154274 0016523 0 ustar 00root root 0000000 0000000 """Find and read user settings."""
import os
import sys
import hashlib
import platform
try:
# prefer the backport for Python <3.5
from pathlib2 import Path
except ImportError:
from pathlib import Path
import six
from six.moves import configparser
__all__ = ['find_config', 'get_config', 'get_datafile']
def _get_default_config_dir():
return Path(os.getenv('XDG_CONFIG_HOME', '~/.config')).expanduser().joinpath('reentry')
def find_config():
"""
Search for a config file in the following places and order:
* /.reentryrc
* /.config/reentry/config
"""
rc_file = Path.home().joinpath('.reentryrc')
config_file = _get_default_config_dir().joinpath('config')
# pylint: disable=no-else-return
if rc_file.exists(): # pylint: disable=no-member
return rc_file
elif config_file.exists(): # pylint: disable=no-member
return config_file
return rc_file
def make_config_parser(*args, **kwargs):
"""Get the correct ConfigParser class depending on python version."""
# pylint: disable=no-else-return
if six.PY2:
return configparser.SafeConfigParser(*args, **kwargs)
elif six.PY3:
return configparser.ConfigParser(*args, **kwargs)
return None
def get_config(config_file_name=str(find_config())):
"""Create config parser with defaults and read in the config file."""
default_config_dir = _get_default_config_dir()
default_config_values = {'datadir': str(default_config_dir.joinpath('data')), 'data_filename': hashed_data_file_name()}
parser = make_config_parser(default_config_values)
parser.add_section('general')
parser.read([config_file_name])
env_datadir = os.getenv('REENTRY_DATADIR')
if env_datadir:
env_datadir_path = Path(env_datadir)
if env_datadir_path.exists() and not env_datadir_path.is_dir(): # pylint: disable=no-member
raise ValueError('environment variable $REENTRY_DATADIR={} exists, but is not a directory'.format(env_datadir))
parser.set('general', 'datadir', str(env_datadir_path))
env_data_filename = os.getenv('REENTRY_DATA_FILENAME')
if env_data_filename:
parser.set('general', 'data_filename', env_data_filename)
return parser
def hashed_data_file_name():
"""Find the path to the reentry executable and mangle it into a file name."""
fname = 'u{bin_dir}_{impl}-{ver}'.format(bin_dir=Path(sys.executable).resolve().parent,
impl=platform.python_implementation(),
ver=platform.python_version())
path_hash = hashlib.sha256(fname.encode('utf-8'))
return path_hash.hexdigest()
def get_datafile():
"""Create the path to the data file used to store entry points."""
config = get_config()
pkg_path_filename = config.get('general', 'data_filename')
datafile = Path(config.get('general', 'datadir')).joinpath(pkg_path_filename)
if not datafile.exists(): # pylint: disable=no-member
datafile.parent.mkdir(parents=True, exist_ok=True)
datafile.write_text(u'{}')
return str(datafile)
reentry-1.3.1/reentry/default_manager.py 0000664 0000000 0000000 00000015074 13461154274 0020402 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""Manager for entry point based plugins. Main client facing API"""
from __future__ import print_function
import re
import six
from reentry.jsonbackend import JsonBackend
__all__ = ['PluginManager', 'get_default_backend', 'DEFAULT_MANAGER']
IGNORE_BY_DEFAULT = {
'console_scripts', 'gui_scripts', 'distutils.commands', 'distutils.setup_keywords', 'setuptools.installation',
'setuptools.file_finders', 'egg_info.writers'
}
def clean_map(entry_point_map, exceptions=None):
"""Extract entry points that are clearly not for plugins unless excepted."""
ep_map = entry_point_map.copy()
ignore_set = IGNORE_BY_DEFAULT.difference(set(exceptions or []))
for ignore_group in ignore_set:
ep_map.pop(ignore_group, None)
return ep_map
def get_default_backend():
from reentry.config import get_datafile
return JsonBackend(datafile=get_datafile())
class PluginManager(object):
"""Manage a set of cached entry points"""
def __init__(self, backend=get_default_backend(), scan_for_not_found=True):
self._backend = backend
self._scan_for_not_found = scan_for_not_found
def iter_entry_points(self, group, name=None):
"""
Iterate over all registered entry points in `group`, or if `name` is
given, only the ones matching `name`.
If no entry point is found in the group (or none called `name`), a scan
is triggered, which may take a while. This behaviour can be configured
when creating the plugin manager.
The backend may only import pkg_resources if any of the entry points
contain extras requirements.
"""
if self._scan_for_not_found:
if not self.get_entry_map(groups=[group]):
self.scan(groups=[group])
elif name and name not in self.get_entry_map(groups=[group], ep_names=[name]).get(group, {}).keys():
self.scan(groups=[group])
for entry_point in self._backend.iter_group(group):
if name:
if name in entry_point.name:
yield entry_point
else:
yield entry_point
def get_entry_map(self, dist_names=None, groups=None, ep_names=None):
"""
return the entry point map for `group` or the whole map for `dist`
The backend may only load pkg_resources if any of the entry points contain extras requirements
"""
return self._backend.get_map(dist=dist_names, group=groups, name=ep_names)
def get_dist_map(self, dist=None):
"""Get a map of entry points sorted by distribution."""
return self._backend.get_dist_map(dist=dist)
@staticmethod
def format_map(entry_point_map, indent=1):
tabs = '\t' * indent
newl = '\n' + tabs
return tabs + newl.join(['{} -> {}'.format(dname, dmap) for dname, dmap in entry_point_map.items()])
def register(self, distribution):
"""
Registers the distribution's entry points with the backend.
The backend may load pkg_resources to resolve the distribution name
Takes either a string or a Distribution object as passed by setuptools to hooks during install.
"""
dist_name, entry_point_map = self._backend.scan_dist(distribution)
entry_point_map = clean_map(entry_point_map)
self._backend.write_dist_map(dist_name, entry_point_map)
return dist_name, entry_point_map
def scan(self, groups=None, group_re=None, commit=True, delete=True): # pylint: disable=too-many-branches
"""Walk through all distributions available and register entry points.
Note: This imports pkg_resources.
:param groups: a list of group names to register entry points for.
If None, registers all entry points found.
:param group_re: a regular expression for group names.
Groups matched by the regular expression are appended to `groups`
:type group_re: str or a compiled expression from re.compile
:param commit: If False, performs just a dry run
:param delete: If False, append to existing entry point map
"""
import pkg_resources as pr
pr_env = pr.AvailableDistributions()
pr_env.scan()
groups = groups or []
if group_re:
all_groups = self.scan_all_group_names()
if isinstance(group_re, six.string_types):
group_re = re.compile(group_re)
groups.extend({group for group in all_groups if group_re.match(group)})
if delete:
full_map = {}
if commit:
if groups:
for group in groups:
self._backend.rm_group(group)
else:
self.clear()
else:
full_map = self._backend.epmap.copy()
# ~ for dists in pr_env._distmap.values(): # pylint: disable=protected-access
for dist in pr_env:
# note: in pip 19, the *installing* distribution is part of pr_env but
# pkg_resources.get_distribution() fails on it
try:
dname, emap = self._backend.scan_dist(dist)
except pr.DistributionNotFound:
continue
dmap = full_map.get(dname, {})
if groups:
new_dmap = {k: v for k, v in six.iteritems(emap) if k in groups}
dmap.update(new_dmap)
else:
dmap.update(emap)
# extract entry points that are reserved for other purposes unless excepted
dmap = clean_map(dmap, exceptions=groups)
if commit:
self._backend.write_dist_map(dname, entry_point_map=dmap)
full_map[dname] = [dmap]
return full_map
def scan_all_group_names(self):
"""Use `pkg_resources` to get a set of all available (not only cached) groups."""
import pkg_resources as pr
pr_env = pr.AvailableDistributions()
pr_env.scan()
all_groups = set()
for dist_name in pr_env:
_, dmap = self._backend.scan_dist(dist_name)
all_groups.update(dmap.keys())
return all_groups
def unregister(self, distname):
"""
unregisters the distribution's entry points with the backend
The backend may load pkg_resources to resolve the distribution name
"""
self._backend.rm_dist(distname)
def clear(self):
"""Clear entry point map."""
self._backend.clear()
@property
def distribution_names(self):
return self._backend.get_dist_names()
DEFAULT_MANAGER = PluginManager()
reentry-1.3.1/reentry/entrypoint.py 0000664 0000000 0000000 00000003244 13461154274 0017473 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""Light weight entry point implementation"""
import re
class EntryPoint(object):
"""
Lightweight analogue for pkg_resources.EntryPoint
"""
pattern = re.compile(r'\s*(?P.+?)\s*=\s*(?P[\w.]+)\s*(:\s*(?P[\w.]+))?\s*(?P\[.*\])?\s*$')
def __init__(self, name, module_name, attrs=(), distname=None):
self.name = name
self.module_name = module_name
self.attrs = attrs
self.distname = distname
@classmethod
def parse(cls, src, distname=None):
"""
pasted from pkg_resources, fall back on their EntryPoints when extras are required
"""
match = cls.pattern.match(src)
res = match.groupdict()
if res['extras']:
import pkg_resources as pr
dist = pr.get_distribution(distname) if distname else None
return pr.EntryPoint.parse(src, dist=dist)
attrs = res['attr'].split('.') if res['attr'] else ()
return cls(res['name'], res['module'], attrs, distname)
def load(self):
"""
pasted from pkg_resources
"""
import functools
from importlib import import_module
module = import_module(self.module_name)
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
raise ImportError(str(exc))
def __str__(self):
string_form = '{} = {}'.format(self.name, self.module_name)
if self.attrs:
string_form += ':{}'.format('.'.join(self.attrs))
return string_form
def __repr__(self):
return 'reentry.EntryPoint.parse({})'.format(str(self))
reentry-1.3.1/reentry/hooks.py 0000664 0000000 0000000 00000003164 13461154274 0016404 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""
Define a setuptools extension.
Usage::
setup(
...,
reentry_register = True,
...
)
"""
from __future__ import print_function
import sys
from reentry.config import get_datafile
def is_bool(value):
"""Return True if `value` is a boolean."""
return bool(value) == value
def register_dist(dist, attr, value):
"""If value is True, register the distribution's entry points in reentrys storage."""
from distutils.errors import DistutilsSetupError # pylint: disable=import-error,no-name-in-module
# assert is boolean
if not is_bool(value):
raise DistutilsSetupError('{} must be a boolean, got {}'.format(attr, value))
if value:
print('[ REENTRY ] registering entry points with reentry...', file=sys.stderr)
from reentry import manager
dist_name, entry_point_map = manager.register(dist)
print('[ REENTRY ] ... registered to {}'.format(get_datafile()), file=sys.stderr)
print('[ REENTRY ] Following entrypoints were registered\n', file=sys.stderr)
print(manager.format_map({dist_name: entry_point_map}), file=sys.stderr)
print('[ REENTRY ] Current entry point map at {}:'.format(get_datafile()), file=sys.stderr)
print(manager.format_map(manager.get_dist_map()), file=sys.stderr)
def ensure_list(value, attr):
"""raise an error if `value` is not a list"""
from distutils.errors import DistutilsSetupError # pylint: disable=import-error,no-name-in-module
if not isinstance(value, list):
raise DistutilsSetupError('{} must be a list, got {}'.format(attr, value.__class__))
reentry-1.3.1/reentry/js_data 0000664 0000000 0000000 00000010505 13461154274 0016234 0 ustar 00root root 0000000 0000000 {"pep8-naming": {"flake8.extension": {"N8": "N8 = pep8ext_naming:NamingChecker"}, "flint.extension": {"N81": "N81 = pep8ext_naming:NamingChecker", "N80": "N80 = pep8ext_naming:NamingChecker"}}, "pytest-cov": {"pytest11": {"pytest_cov": "pytest_cov = pytest_cov.plugin"}}, "mccabe": {"flake8.extension": {"C90": "C90 = mccabe:McCabeChecker"}}, "isort": {"pylama.linter": {"isort": "isort = isort.pylama_isort:Linter"}}, "ipython": {"pygments.lexers": {"ipython": "ipython = IPython.lib.lexers:IPythonLexer", "ipythonconsole": "ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer", "ipython3": "ipython3 = IPython.lib.lexers:IPython3Lexer"}}, "flake8": {"flake8.extension": {"pycodestyle.comparison_type": "pycodestyle.comparison_type = pycodestyle:comparison_type", "pycodestyle.missing_whitespace_after_import_keyword": "pycodestyle.missing_whitespace_after_import_keyword = pycodestyle:missing_whitespace_after_import_keyword", "pycodestyle.imports_on_separate_lines": "pycodestyle.imports_on_separate_lines = pycodestyle:imports_on_separate_lines", "pycodestyle.continued_indentation": "pycodestyle.continued_indentation = pycodestyle:continued_indentation", "pycodestyle.maximum_line_length": "pycodestyle.maximum_line_length = pycodestyle:maximum_line_length", "pycodestyle.python_3000_not_equal": "pycodestyle.python_3000_not_equal = pycodestyle:python_3000_not_equal", "pycodestyle.trailing_whitespace": "pycodestyle.trailing_whitespace = pycodestyle:trailing_whitespace", "pycodestyle.whitespace_around_operator": "pycodestyle.whitespace_around_operator = pycodestyle:whitespace_around_operator", "pycodestyle.python_3000_raise_comma": "pycodestyle.python_3000_raise_comma = pycodestyle:python_3000_raise_comma", "F": "F = flake8.plugins.pyflakes:FlakesChecker", "pycodestyle.python_3000_has_key": "pycodestyle.python_3000_has_key = pycodestyle:python_3000_has_key", "pycodestyle.comparison_negative": "pycodestyle.comparison_negative = pycodestyle:comparison_negative", "pycodestyle.blank_lines": "pycodestyle.blank_lines = pycodestyle:blank_lines", "pycodestyle.python_3000_backticks": "pycodestyle.python_3000_backticks = pycodestyle:python_3000_backticks", "pycodestyle.indentation": "pycodestyle.indentation = pycodestyle:indentation", "pycodestyle.comparison_to_singleton": "pycodestyle.comparison_to_singleton = pycodestyle:comparison_to_singleton", "pycodestyle.tabs_or_spaces": "pycodestyle.tabs_or_spaces = pycodestyle:tabs_or_spaces", "pycodestyle.module_imports_on_top_of_file": "pycodestyle.module_imports_on_top_of_file = pycodestyle:module_imports_on_top_of_file", "pycodestyle.missing_whitespace": "pycodestyle.missing_whitespace = pycodestyle:missing_whitespace", "pycodestyle.whitespace_around_named_parameter_equals": "pycodestyle.whitespace_around_named_parameter_equals = pycodestyle:whitespace_around_named_parameter_equals", "pycodestyle.explicit_line_join": "pycodestyle.explicit_line_join = pycodestyle:explicit_line_join", "pycodestyle.whitespace_around_comma": "pycodestyle.whitespace_around_comma = pycodestyle:whitespace_around_comma", "pycodestyle.missing_whitespace_around_operator": "pycodestyle.missing_whitespace_around_operator = pycodestyle:missing_whitespace_around_operator", "pycodestyle.compound_statements": "pycodestyle.compound_statements = pycodestyle:compound_statements", "pycodestyle.trailing_blank_lines": "pycodestyle.trailing_blank_lines = pycodestyle:trailing_blank_lines", "pycodestyle.extraneous_whitespace": "pycodestyle.extraneous_whitespace = pycodestyle:extraneous_whitespace", "pycodestyle.whitespace_around_keywords": "pycodestyle.whitespace_around_keywords = pycodestyle:whitespace_around_keywords", "pycodestyle.tabs_obsolete": "pycodestyle.tabs_obsolete = pycodestyle:tabs_obsolete", "pycodestyle.break_around_binary_operator": "pycodestyle.break_around_binary_operator = pycodestyle:break_around_binary_operator", "pycodestyle.whitespace_before_parameters": "pycodestyle.whitespace_before_parameters = pycodestyle:whitespace_before_parameters", "pycodestyle.whitespace_before_comment": "pycodestyle.whitespace_before_comment = pycodestyle:whitespace_before_comment"}, "flake8.report": {"default": "default = flake8.formatting.default:Default", "quiet-filename": "quiet-filename = flake8.formatting.default:FilenameOnly", "pylint": "pylint = flake8.formatting.default:Pylint", "quiet-nothing": "quiet-nothing = flake8.formatting.default:Nothing"}}} reentry-1.3.1/reentry/jsonbackend.py 0000664 0000000 0000000 00000023241 13461154274 0017540 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""A backend that uses a json file to store entry points."""
import json
import six
from reentry.abcbackend import BackendInterface
from reentry.entrypoint import EntryPoint
class JsonBackend(BackendInterface):
"""
Backend using json
"""
def __init__(self, datafile=None):
super(JsonBackend, self).__init__()
from os.path import join, dirname, exists
self.datafile = join(dirname(__file__), 'js_data')
self.datafile = datafile or self.datafile
if not exists(self.datafile):
with open(self.datafile, 'w') as datafp:
datafp.write('{}')
self._epmap = self.read()
@property
def epmap(self):
return self._epmap.copy()
def read(self):
"""
read state from storage
"""
with open(self.datafile, 'r') as cache_file_obj:
return json.load(cache_file_obj)
def write(self):
"""
write the current state to storage
"""
with open(self.datafile, 'w') as cache_file_obj:
json.dump(self._epmap, cache_file_obj)
@staticmethod
def scan_pr_dist(dist):
"""Add a distribution, empty by default."""
dname = dist.project_name
epmap = dist.get_entry_map()
return dname, epmap
def write_dist_map(self, distname, entry_point_map=None):
dname = distname
entry_point_map = {k: {kk: str(vv) for kk, vv in six.iteritems(v)} for k, v in six.iteritems(entry_point_map)}
# update entry point storage
# --> only if there is something to update though
if entry_point_map:
if not self._epmap.get(dname):
self._epmap[dname] = {}
self._epmap[dname].update(entry_point_map)
self.write()
def scan_st_dist(self, dist):
"""Add a distribution by name."""
return self.scan_pr_dist(self.pr_dist_from_name(dist))
def scan_install_dist(self, dist):
"""Add a distribution during it's install."""
distname = dist.get_name()
entrypoint_map = {}
dist_map = {}
if hasattr(dist, 'entry_points'):
dist_map = dist.entry_points or {}
for group, entrypoint_list in dist_map.items():
entrypoint_map[group] = {}
for entrypoint_string in entrypoint_list:
entry_point = EntryPoint.parse(entrypoint_string)
entrypoint_map[group][entry_point.name] = entrypoint_string
return distname, entrypoint_map
def iter_group(self, group):
"""Iterate over entry points within a given group."""
for dist in self._epmap:
for _, entry_point_spec in six.iteritems(self._epmap[dist].get(group, {})):
yield EntryPoint.parse(entry_point_spec)
def get_pr_dist_map(self, dist):
return self.get_dist_map(dist.project_name)
def get_dist_map(self, dist=None):
"""Return the entry map of a given distribution."""
if not dist:
return self._epmap.copy()
dmap = self._epmap.get(dist, {}).copy()
for gname in dmap:
for epname in dmap[gname]:
dmap[gname][epname] = EntryPoint.parse(dmap[gname][epname])
return dmap
def get_ep(self, group, name, dist=None):
"""
Get an entry point.
:param group: the group name
:param name: the entry point name
:param dist: if not given, search in all dists
if no dist was given, and the search turned up more than one
entry point with the same name, returns a list of entrypoints
else, returns an entry point
"""
if not dist:
specs = []
for dist_name in self._epmap.keys():
spc = self.get_ep(group, name, dist=dist_name)
if spc:
specs.append(spc)
# pylint: disable=no-else-return
if len(specs) > 1:
return specs
elif len(specs) == 1:
return specs[0]
distribution_map = self._epmap.get(dist, {})
group_map = distribution_map.get(group, {})
spec = group_map.get(name)
if spec:
return EntryPoint.parse(spec)
return None
def get_dist_names(self):
"""
Returns a list of distribution names
"""
return self._epmap.keys()
def get_group_names(self):
"""
Returns a list of group names
"""
glist = []
for dist in self.get_dist_names():
glist.extend(self._epmap[dist].keys())
return list(set(glist))
def rm_dist(self, distname):
"""
removes a distributions entry points from the storage
"""
if distname in self.get_dist_names():
self._epmap.pop(distname)
self.write()
def rm_group(self, group):
"""
removes a group from all dists
"""
for dist in self._epmap:
self._epmap[dist].pop(group, None)
self.write()
def clear(self):
"""
completely clear entry_point storage
"""
self._epmap = {}
self.write()
def get_map(self, dist=None, group=None, name=None):
"""See BackendInterface docs."""
# sanitize dist kwarg
dist_list = self._dist_list_from_arg(dist)
# sanitize groups kwarg
group_list = self._group_list_from_arg(group)
# sanitize name kwarg
name_list = _listify(name)
filtered_entry_points = self._filter_entry_points(dist_list, group_list, name_list)
entry_point_map = {}
for entry_point, ep_info in six.iteritems(filtered_entry_points):
if not ep_info['group'] in entry_point_map:
entry_point_map[ep_info['group']] = {}
entry_point_map[ep_info['group']][ep_info['name']] = EntryPoint.parse(entry_point)
return entry_point_map
def _filter_groups_by_distribution(self, distribution_list, group_list=None):
"""List only groups (optionally from a given list of groups) registered for the given list of distributions"""
if group_list is None:
group_list = self.get_group_names()
group_set = set()
for distribution in distribution_list:
if distribution not in self._epmap:
raise ValueError("The {} distribution was not found.".format(distribution))
else:
group_set.update([group_name for group_name in self._epmap[distribution].keys() if group_name in group_list])
return group_set
def _filter_entry_points(self, dist_list, group_list, name_list):
"""
Get a flat dict of annotated entry points, filtered by various criteria
The dict is formatted like _flat_entry_points() output
filter by::
* dist_list: list of distribution names
* group_list: list of group names
* name_list: list of regex patterns for entry point names
Example::
>> backend.epmap
{
'A': {
'B': {'entry_point_C': 'entry_point_c = A.foo:bar'},
...
},
'other_dist': {
'B': { ... },
...
},
...
}
>> backend._filter_entry_points(dist_list=['A'], group_list=['B'], name_list=['.*C.*'])
{'B':
{'entry_point_C': 'entry_point_c = A.foo:bar'}
}
"""
entry_points = self._flat_entry_points()
def matches(entry_point):
"""True if the entry point matches the filters."""
result = self._match_pattern_list_exact(entry_point['dist'], dist_list)
result &= self._match_pattern_list_exact(entry_point['group'], group_list)
result &= self._match_pattern_list_regex(entry_point['name'], name_list)
return result
return {k: v for k, v in six.iteritems(entry_points) if matches(v)}
@staticmethod
def _match_pattern_list_regex(name, pattern_list):
"""True if the entry point name matches one of a list of regex patterns."""
import re
if not pattern_list:
return True
return any([re.match(pattern, name) for pattern in pattern_list])
@staticmethod
def _match_pattern_list_exact(name, pattern_list):
if not pattern_list:
return True
return bool(name in pattern_list)
def _group_list_from_arg(self, group_arg):
group_list = _listify(group_arg)
if group_list is None:
group_list = self.get_group_names()
return group_list
def _dist_list_from_arg(self, dist_arg):
dist_list = _listify(dist_arg)
if dist_list is None:
dist_list = self.get_dist_names()
return dist_list
def _flat_entry_points(self):
"""Get a flat dict of entry points (keys) annotated with {name: .., group: .., dist: ..} (values)"""
epflat = {}
for distribution, dist_dict in six.iteritems(self._epmap):
for group, group_dict in six.iteritems(dist_dict):
for ep_name, entry_point in six.iteritems(group_dict):
epflat[entry_point] = {'name': ep_name, 'group': group, 'dist': distribution}
return epflat
def _listify(sequence_or_name):
"""Wrap a single name in a list, leave sequences and None unchanged"""
from collections import Sequence
# pylint: disable=no-else-return
if sequence_or_name is None:
return None
elif not isinstance(sequence_or_name, Sequence) or isinstance(sequence_or_name, six.string_types):
return [sequence_or_name]
return sequence_or_name
reentry-1.3.1/reentry/tests/ 0000775 0000000 0000000 00000000000 13461154274 0016045 5 ustar 00root root 0000000 0000000 reentry-1.3.1/reentry/tests/fixtures.py 0000664 0000000 0000000 00000002224 13461154274 0020270 0 ustar 00root root 0000000 0000000 # pylint: disable=unused-import,redefined-outer-name
"""Unit test fixtures"""
from os.path import join, dirname
import pytest
TEST_DATA_FILE = join(dirname(__file__), 'test_data.json')
@pytest.fixture
def test_data():
"""yield a temporary test cache file"""
import tempfile
import os
test_data = None
with open(TEST_DATA_FILE, 'r') as test_data_file_obj:
test_data = test_data_file_obj.read()
file_handle, test_file = tempfile.mkstemp()
with open(test_file, 'w') as test_file_obj:
test_file_obj.write(test_data)
yield test_file
os.close(file_handle)
os.remove(test_file)
@pytest.fixture
def bkend(test_data):
"""create a backend with test data"""
from reentry.jsonbackend import JsonBackend
test_file = test_data
return JsonBackend(datafile=test_file)
@pytest.fixture
def manager(bkend):
from reentry.default_manager import PluginManager
manager = PluginManager(backend=bkend)
yield manager
@pytest.fixture
def noscan_manager(bkend):
from reentry.default_manager import PluginManager
manager = PluginManager(backend=bkend, scan_for_not_found=False)
yield manager
reentry-1.3.1/reentry/tests/test_config.py 0000664 0000000 0000000 00000003536 13461154274 0020732 0 ustar 00root root 0000000 0000000 # pylint: disable=unused-import,redefined-outer-name
"""Unit tests for config tools."""
try:
# prefer the backport for Python <3.5
from pathlib2 import Path
except ImportError:
from pathlib import Path
import os
import pytest # pylint: disable=unused-import
import six
from six.moves import configparser
from reentry import config
def test_find_config():
config_file = config.find_config()
assert isinstance(config_file, Path)
def test_make_config_parser():
"""Make sure we get the right configparser type."""
parser = config.make_config_parser()
if six.PY2:
assert isinstance(parser, configparser.SafeConfigParser)
else:
assert isinstance(parser, configparser.ConfigParser)
def _check_config_valid(parser, expected_filename=None):
"""
Perform validations for a given config.
If expected_filename is given, check its value too.
"""
if six.PY2:
assert isinstance(parser, configparser.SafeConfigParser)
else:
assert isinstance(parser, configparser.ConfigParser)
assert parser.get('general', 'datadir')
assert parser.get('general', 'data_filename')
if expected_filename:
assert parser.get('general', 'data_filename') == expected_filename
def test_get_config():
"""Make sure the configparser gets created correctly."""
parser = config.get_config()
_check_config_valid(parser)
def test_get_config_with_env_var():
"""Make sure the configparser gets created correctly when REENTRY_DATA_FILENAME is set."""
data_filename = 'entrypoints'
os.environ['REENTRY_DATA_FILENAME'] = data_filename
parser = config.get_config()
os.environ.pop('REENTRY_DATA_FILENAME')
_check_config_valid(parser, data_filename)
def test_get_datafile():
datafile = Path(config.get_datafile())
assert datafile.is_file()
assert datafile.exists()
reentry-1.3.1/reentry/tests/test_data.json 0000664 0000000 0000000 00000000625 13461154274 0020713 0 ustar 00root root 0000000 0000000 {
"distA": {
"groupA": {
"distA.epA": "distA.epA = pkgA.moduleA:ClassA"
},
"groupB": {
"distA.epB": "distA.epB = pkgA.moduleB:ClassB"
}
},
"distB": {
"groupB": {
"distB.epB": "distB.epB = pkgB.moduleB:ClassB"
},
"groupC": {
"distB.epC": "distB.epC = pkgB.moduleC:ClassC"
}
}
}
reentry-1.3.1/reentry/tests/test_json_backend.py 0000664 0000000 0000000 00000006677 13461154274 0022116 0 ustar 00root root 0000000 0000000 # pylint: disable=unused-import,redefined-outer-name
"""Unit tests for js-backend"""
import json
import pytest
from fixtures import test_data, bkend
from reentry.entrypoint import EntryPoint
def test_get_map_default(bkend):
"""Test the backend's get_map method without arguments"""
ep_map = bkend.get_map()
assert 'groupB' in ep_map
assert 'distA.epB' in ep_map['groupB']
assert 'distB.epB' in ep_map['groupB']
assert isinstance(ep_map['groupB']['distB.epB'], EntryPoint)
def test_get_map_by_dist(bkend):
"""Test get_map output filtering with dist"""
ep_map = bkend.get_map(dist='distA')
assert 'distA.epB' in ep_map['groupB']
assert 'distB.epB' not in ep_map['groupB']
ep_map = bkend.get_map(dist=['distA', 'distB'])
assert 'distA.epB' in ep_map['groupB']
assert 'distB.epB' in ep_map['groupB']
def test_get_map_by_group(bkend):
"""Test get_map output filtering with group"""
ep_map = bkend.get_map(group='groupA')
assert 'groupA' in ep_map
assert 'groupB' not in ep_map
ep_map = bkend.get_map(group=['groupA', 'groupB'])
assert 'groupA' in ep_map
assert 'groupB' in ep_map
assert 'groupC' not in ep_map
def test_get_map_by_name(bkend):
"""Test get_map ouptut filering with name"""
ep_map = bkend.get_map(name=r'.*\.epB')
assert 'distA.epB' in ep_map['groupB']
assert 'distB.epB' in ep_map['groupB']
assert 'groupA' not in ep_map
ep_map = bkend.get_map(name=[r'.*\.epB', 'distB.epC'])
assert 'distA.epB' in ep_map['groupB']
assert 'distB.epC' in ep_map['groupC']
assert 'groupA' not in ep_map
def test_iter_group(bkend):
ep_list = list(bkend.iter_group('groupB'))
assert 'distA.epB' in [i.name for i in ep_list]
assert 'distB.epB' in [i.name for i in ep_list]
def test_group_names(bkend):
group_names = bkend.get_group_names()
assert 'groupA' in group_names
assert 'groupB' in group_names
assert 'groupC' in group_names
def test_dist_names(bkend):
dist_names = bkend.get_dist_names()
assert 'distA' in dist_names
assert 'distB' in dist_names
def test_write_dist_map(bkend):
entry_point_map = {'test_group': {'test_ep': 'test_ep = test_dist.test_module:test_member'}}
bkend.write_dist_map(distname='test_dist', entry_point_map=entry_point_map)
assert 'test_dist' in list(bkend.get_dist_names())
assert bkend.get_map(dist='test_dist')
def test_write_pr_dist(bkend):
"""Test caching entry points for a given pkg_resources - distribution"""
this_dist = bkend.pr_dist_from_name('reentry')
dist_name, ep_map = bkend.scan_pr_dist(this_dist)
assert dist_name == 'reentry'
assert 'test_entry_points' in ep_map
def test_write_st_dist(bkend):
"""Test caching entry points for a distribution given by name."""
dist_name, ep_map = bkend.scan_st_dist('reentry')
assert dist_name == 'reentry'
assert 'test_entry_points' in ep_map
def test_scan_install_dist(bkend):
"""Test caching entry points for a pkg_resources - distribution at install time."""
this_dist = bkend.pr_dist_from_name('reentry')
this_dist.get_name = lambda: 'reentry'
this_dist.entry_points = {'foo': ['bar = foo.bar:baz']}
dist_name, ep_map = bkend.scan_install_dist(this_dist)
assert dist_name == 'reentry'
assert 'foo' in ep_map
def test_rm_dist(bkend):
bkend.rm_dist('distA')
assert 'distA' not in bkend.epmap
def test_clear(bkend):
bkend.clear()
assert not bkend.get_map()
reentry-1.3.1/reentry/tests/test_manager.py 0000664 0000000 0000000 00000005567 13461154274 0021105 0 ustar 00root root 0000000 0000000 # pylint: disable=unused-import,redefined-outer-name
"""Unit tests for manager functions"""
import re
import pytest
from fixtures import test_data, bkend, manager, noscan_manager
def test_get_entry_map(manager):
"""test full map"""
epmap = manager.get_entry_map()
assert 'groupA' in epmap
assert 'groupB' in epmap
assert 'groupC' in epmap
def test_get_entry_map_dist(manager):
"""
The map for distA in the fixture should contain
a group and an entry point
"""
epmap = manager.get_entry_map(dist_names='distA')
assert 'groupA' in epmap
assert 'groupB' in epmap
assert 'groupC' not in epmap
assert 'distA.epA' in epmap.get('groupA', {})
assert 'distA.epB' in epmap.get('groupB', {})
def test_entry_map_dist_group(manager):
"""Check filtering entry point map for a distribution by group"""
epmap = manager.get_entry_map(dist_names='distA', groups='groupA')
assert 'groupA' in epmap
assert 'groupB' not in epmap
assert 'distA.epA' in epmap.get('groupA', {})
def test_iter_entry_points(manager):
"""Test the drop-in replacement for pkg_resources.iter_entry_points"""
entry_points = manager.iter_entry_points(group='groupA')
assert 'distA.epA' in [e.name for e in entry_points]
entry_points = list(manager.iter_entry_points(group='groupB'))
assert 'distA.epB' in [e.name for e in entry_points]
assert 'distB.epB' in [e.name for e in entry_points]
def test_register(manager):
"""Test registering a distribution"""
manager.register('reentry')
ep_map = manager.get_entry_map(dist_names='reentry')
assert 'test_entry_points' in ep_map
assert 'console_scripts' not in ep_map
def test_scan(manager):
"""Test scanning for entry points"""
manager.scan()
ep_map = manager.get_entry_map(dist_names='reentry')
assert 'test_entry_points' in ep_map
assert 'console_scripts' not in ep_map
def test_scan_group(manager):
manager.scan(groups=['test_entry_points'])
ep_map = manager.get_entry_map(dist_names='reentry')
assert 'test_entry_points' in ep_map
assert 'console_scripts' not in ep_map
def test_scan_group_re(manager):
manager.scan(group_re=re.compile(r'test_[a-z]+_[a-z]+'))
assert 'test_entry_points' in manager.get_entry_map(dist_names='reentry')
manager.scan(group_re=r'console.*')
assert 'console_scripts' in manager.get_entry_map(dist_names='reentry')
def test_unregister(manager):
manager.unregister('distA')
assert 'distA' not in manager.distribution_names
def test_iter_scan(manager):
assert 'test' in [ep.name for ep in manager.iter_entry_points(group='test_entry_points')]
assert 'reentry' in [ep.name for ep in manager.iter_entry_points(group='console_scripts', name='reentry')]
def test_iter_noscan(noscan_manager):
assert 'test' not in [ep.name for ep in noscan_manager.iter_entry_points(group='test_entry_points')]
reentry-1.3.1/run_tests.sh 0000775 0000000 0000000 00000000335 13461154274 0015601 0 ustar 00root root 0000000 0000000 set -ev
# clean up from previous tests
rm -rf dist/ .tox/ tox-pip.log
# build both source distribution and wheel
# (source distribution enables tests with different python version)
python setup.py sdist bdist_wheel
tox
reentry-1.3.1/setup.json 0000664 0000000 0000000 00000002625 13461154274 0015253 0 ustar 00root root 0000000 0000000 {
"name": "reentry",
"version": "1.3.1",
"author": "Rico Haeuselmann",
"license": "MIT License",
"description": "A plugin manager based on setuptools entry points mechanism",
"entry_points": {
"distutils.setup_keywords": [
"reentry_register = reentry.hooks:register_dist"
],
"console_scripts": [
"reentry = reentry.cli:reentry"
],
"test_entry_points": [
"test = reentry.cli:reentry"
]
},
"classifiers": [
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Development Status :: 5 - Production/Stable",
"Environment :: Plugins",
"Intended Audience :: Developers",
"Topic :: Software Development"
],
"install_requires": [
"setuptools >= 36.2",
"click",
"six",
"pathlib2; python_version < '3.5'"
],
"extras_require": {
"dev": [
"pre-commit==1.14.4",
"prospector==1.1.5",
"pylint==1.9.4; python_version<'3.0'",
"pylint==2.2.2; python_version>='3.0'",
"astroid==2.1.0; python_version>='3.0'",
"yapf==0.27.0",
"flake8",
"pytest",
"coverage",
"pytest-cov",
"tox",
"packaging"
]
}
}
reentry-1.3.1/setup.py 0000664 0000000 0000000 00000001261 13461154274 0014725 0 ustar 00root root 0000000 0000000 # -*- coding: utf8 -*-
"""
Install configuration for setuptools / pip
"""
from os import path
from setuptools import setup, find_packages
import json
README_PATH = path.join(path.dirname(path.abspath(__file__)), 'README.rst')
JSON_PATH = path.join(path.dirname(path.abspath(__file__)), 'setup.json')
# Provide static information in setup.json
# such that it can be discovered automatically
with open(JSON_PATH, 'r') as info:
kwargs = json.load(info)
setup(packages=find_packages(),
include_package_data=True,
package_data={'': ['js_data', 'README.rst']},
long_description=open(README_PATH).read(),
long_description_content_type='text/x-rst',
**kwargs)
reentry-1.3.1/tox.ini 0000664 0000000 0000000 00000001205 13461154274 0014524 0 ustar 00root root 0000000 0000000 [tox]
envlist = py27,py36
[testenv]
whitelist_externals = test {toxinidir}/run_coveralls.sh
passenv = TRAVIS TRAVIS_*
setenv = REENTRY_DATADIR={env:TOX_REENTRY_DATADIR:{toxworkdir}/reentry_data}
# install a number of "fake" packages for testing
deps = .[dev]
.travis-tests/noreg-pkg
.travis-tests/host-pkg
.travis-tests/plugin-pkg
.travis-tests/noep-pkg
install_command = pip install --pre --find-links={toxinidir}/dist --no-cache-dir --log=tox-pip.log {opts} {packages}
commands = pytest --cov-report=term-missing --cov={envsitepackagesdir}/reentry
reentry-test-hooks
reentry scan
reentry-test-hooks --with-noreg