pax_global_header00006660000000000000000000000064137356500340014520gustar00rootroot0000000000000052 comment=974efed465416636ba9246b2ba42faaedd001d59 reclass-1.7.0/000077500000000000000000000000001373565003400131615ustar00rootroot00000000000000reclass-1.7.0/.gitignore000066400000000000000000000001411373565003400151450ustar00rootroot00000000000000*.py[co] .*.sw? .DS_Store /reclass-config.yml /reclass.egg-info /build /dist /.coverage .kitchen reclass-1.7.0/.kitchen-verify.sh000077500000000000000000000010161373565003400165230ustar00rootroot00000000000000#!/bin/bash #set -x # setup source /*.env INVENTORY_BASE_URI=/tmp/kitchen/test/model/$MODEL RECLASS=/tmp/kitchen # prereq python -m ensurepip --default-pip pip install pipenv # env cd $RECLASS pipenv --venv || pipenv install --python ${PYVER} test -e /etc/reclsss || mkdir /etc/reclass cp -avf $INVENTORY_BASE_URI/reclass-config* /etc/reclass # verify for n in $(ls $INVENTORY_BASE_URI/nodes/*|sort); do pipenv run python${PYVER} ./reclass.py --inventory-base-uri=$INVENTORY_BASE_URI --nodeinfo $(basename $n .yml) done reclass-1.7.0/.kitchen.yml000066400000000000000000000015441373565003400154130ustar00rootroot00000000000000--- driver: name: docker priviledged: false use_sudo: false volume: - <%= ENV['PWD'] %>:/tmp/kitchen provisioner: name: shell script: .kitchen-verify.sh verifier: name: inspec <%- pyver = ENV['PYTHON_VERSION'] || '2.7' %> platforms: <% `find test/model -maxdepth 1 -mindepth 1 -type d |sort -u`.split().each do |model| %> <% model=model.split('/')[2] %> - name: <%= model %> driver_config: image: python:<%= pyver %> platform: ubuntu hostname: reclass provision_command: #FIXME, setup reclass env (prereq, configs, upload models) #- apt-get install -y rsync - echo " export LC_ALL=C.UTF-8;\n export LANG=C.UTF-8;\n export PYVER=<%= pyver %>;\n export MODEL=<%= model %>;\n " > /kitchen.env <% end %> suites: - name: model reclass-1.7.0/.pylintrc000066400000000000000000000000371373565003400150260ustar00rootroot00000000000000[MASTER] [REPORTS] reports=no reclass-1.7.0/.travis.yml000066400000000000000000000047321373565003400153000ustar00rootroot00000000000000sudo: required language: python dist: trusty cache: pip python: - '2.7' - '3.6' service: - docker #apt: #update: true #stages: #- name: test #- name: coverage #- name: models #- name: build # if: fork = false #- name: publish # if: tag =~ ^v.* and fork = false and branch = 'master' env: global: - PACKAGENAME="reclass" install: &pyinst - pip install -r requirements.txt #- pip install pyparsing #- pip install PyYAML # To test example models with kitchen: - | test -e Gemfile || cat < Gemfile source 'https://rubygems.org' gem 'rake' gem 'test-kitchen' gem 'kitchen-docker' gem 'kitchen-inspec' gem 'inspec' - bundle install script: - python setup.py install - find . reclass -name 'test_*.py' | sort | xargs -n1 -i% bash -c "echo %; python %" # To test example models with kitchen: - export PYTHON_VERSION=$TRAVIS_PYTHON_VERSION - kitchen list - kitchen test # NOTE: travis stage builds, below saved for future reference #jobs: # include: # - stage: test # script: &unittest # - python setup.py install # - find . reclass -name 'test_*.py' | sort | xargs -n1 -i% bash -c "echo %; python %" # # - stage: coverage # install: *pyinst # script: # - python3 -m pytest --cov=. --cov-report=term-missing:skip-covered # - coverage xml # #- coveralls # #- | # #[ ! -z "${CODACY_PROJECT_TOKEN}" ] && python-codacy-coverage -r coverage.xml || echo "Codacy coverage NOT exported" # # - stage: lint # script: # - python3 -m flake8 # # - stage: models # install: &kitchen # - pip install PyYAML # - pip install virtualenv # - | # test -e Gemfile || cat < Gemfile # source 'https://rubygems.org' # gem 'rake' # gem 'test-kitchen' # gem 'kitchen-docker' # gem 'kitchen-inspec' # gem 'inspec' # - bundle install # script: # - export PYTHON_VERSION=$TRAVIS_PYTHON_VERSION # - kitchen list # #FIXME- kitchen test # # - stage: build # install: *pyinst # script: [] # # - stage: publish # install: # - "/bin/true" # script: # - "/bin/true" # deploy: # provider: pypi # user: epcim # password: # secure: TBD # on: # tags: true # repo: salt-formulas/reclass # branch: master # #FIXME, $TRAVIS_PYTHON_VERSION == '2.7' notifications: webhooks: on_success: change # options: [always|never|change] default: always on_failure: never on_start: never on_cancel: never on_error: never email: true reclass-1.7.0/ChangeLog.rst000077700000000000000000000000001373565003400222632doc/source/changelog.rstustar00rootroot00000000000000reclass-1.7.0/LICENSE000066400000000000000000000222061373565003400141700ustar00rootroot00000000000000reclass is © 2007–2013 by martin f. krafft Released under the terms of the Artistic Licence 2.0. "The Artistic Licence 2.0" Copyright (c) 2000-2006, The Perl Foundation. http://www.perlfoundation.org/legal/licenses/artistic-2_0.html Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble ~~~~~~~~ This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software. You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement. Definitions ~~~~~~~~~~~ "Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package. "Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures. "You" and "your" means any person who would like to copy, distribute, or modify the Package. "Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version. "Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization. "Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees. "Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder. "Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder. "Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future. "Source" form means the source code, documentation source, and configuration files for the Package. "Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form. Permission for Use and Modification Without Distribution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version. Permissions for Redistribution of the Standard Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package. (3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License. Distribution of Modified Versions of the Package as Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following: (a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version. (b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version. (c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under (i) the Original License or (ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed. Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license. (6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version. Aggregating or Linking the Package ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation. (8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package. Items That are Not Considered Part of a Modified Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license. General Provisions ~~~~~~~~~~~~~~~~~~ (10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license. (11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license. (12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder. (13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed. (14) Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. reclass-1.7.0/MANIFEST.in000066400000000000000000000006011373565003400147140ustar00rootroot00000000000000# Include the license and changelog include LICENSE ChangeLog.rst # Exclude development tooling exclude Makefile requirements.txt .pylintrc reclass.py # Exclude testing infra exclude run_tests.py prune reclass/tests prune reclass/datatypes/tests prune reclass/storage/tests prune reclass/utils/tests prune reclass/values/tests # Exclude "source only" content prune doc prune examples reclass-1.7.0/Makefile000066400000000000000000000025261373565003400146260ustar00rootroot00000000000000# # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # PYFILES = $(shell find -name .git -o -name dist -o -name build -prune -o -name '*.py' -print) tests: python ./run_tests.py .PHONY: tests lint: @echo pylint --rcfile=.pylintrc $(ARGS) … @pylint --rcfile=.pylintrc $(ARGS) $(PYFILES) .PHONY: lint lint-errors: ARGS=--errors-only lint-errors: lint .PHONY: lint-errors lint-report: ARGS=--report=y lint-report: lint .PHONY: lint-report coverage: .coverage python-coverage -r -m .PHONY: coverage .coverage: $(PYFILES) python-coverage -x setup.py nosetests docs: $(MAKE) -C doc man html GH_BRANCH=gh-pages HTMLDIR=doc/build/html docspub: ifeq ($(shell git branch --list $(GH_BRANCH)-base),) @echo "Please fetch the $(GH_BRANCH)-base branch from Github to be able to publish documentation:" >&2 @echo " git branch gh-pages-base origin/gh-pages-base" >&2 @false else $(MAKE) docs git checkout $(GH_BRANCH) || git checkout -b $(GH_BRANCH) $(GH_BRANCH)-base git reset --hard $(GH_BRANCH)-base git add $(HTMLDIR) git mv $(HTMLDIR)/* . git commit -m'Webpage update' git push -f $(shell git config --get branch.$(GH_BRANCH)-base.remote) $(GH_BRANCH) git checkout '@{-1}' endif docsclean: $(MAKE) -C doc clean reclass-1.7.0/Pipfile000066400000000000000000000004051373565003400144730ustar00rootroot00000000000000[[source]] url = "https://pypi.python.org/simple" verify_ssl = true name = "pypi" [dev-packages] [packages] pyparsing = "*" PyYAML = "*" six = "*" pyyaml = "*" enum34 = "*" # FIXME, issues with compile phase #"pygit2" = "*" [requires] python_version = "2.7" reclass-1.7.0/README-extensions.rst000066400000000000000000000436041373565003400170540ustar00rootroot00000000000000Escaping of References and Inventory Queries -------------------------------------------- Reference and inventory queries can be escaped to produce literal strings, for example: .. code-block:: yaml parameters: colour: Blue unescaped: The colour is ${colour} escaped: The colour is \${colour} double_escaped: The colour is \\${colour} This would produce: .. code-block:: yaml parameters: colour: Blue unescaped: The colour is Blue escaped: The colour is ${colour} double_escaped: The colour is \Blue Ignore class not found ---------------------- At some cases (bootstrapping, development) it can be convenient to ignore some missing classes. To control the feature there are two options available: .. code-block:: yaml ignore_class_notfound: False ignore_class_notfound_regexp: ['.*'] If you set regexp pattern to ``service.*`` all missing classes starting 'service.' will be logged with warning, but will not fail to return rendered reclass. Assuming all parameter interpolation passes. Merging Referenced Lists and Dictionaries ----------------------------------------- Referenced lists or dicts can now be merged: .. code-block:: yaml # nodes/test.yml classes: - test1 - test2 parameters: one: a: 1 b: 2 two: c: 3 d: 4 three: e: 5 # classes/test1.yml parameters: three: ${one} # classes/test2.yml parameters: three: ${two} ``running reclass.py --nodeinfo node1`` then gives: .. code-block:: yaml parameters: one: a: 1 b: 2 three: a: 1 b: 2 c: 3 d: 4 e: 5 two: c: 3 d: 4 This first sets the parameter three to the value of parameter one (class test1) then merges parameter two into parameter three (class test2) and finally merges the parameter three definition given in the node definition into the final value. Allow override list and dicts by empty entity,None instead of merge ------------------------------------------------------------------- With settings: .. code-block:: yaml allow_none_override: True # default True # note dict,list over None is allowed and not configurable Referenced lists or dicts can now be overriden by None or empty type of dict, list: .. code-block:: yaml # nodes/test.yml parameters: one: a: 1 b: 2 two: {} three: None # classes/test1.yml parameters: one: ${two} # classes/test2.yml parameters: three: ${one} Constant Parameters -------------------------- Parameters can be labeled as constant by using the prefix ``=`` .. code-block:: yaml parameters: =one: 1 If in the normal parameter merging a constant parameter would be changed then depending on the setting of ``strict_constant_parameters`` either an exception is raised (``strict_constant_parameters`` true) or the parameter is left unchanged and no notification or error is given (``strict_constant_parameters`` false) For example with: .. code-block:: yaml # nodes/node1.yml classes: - first - second # classes/first.yml parameters: =one: 1 # classes/second.yml parameters: one: 2 ``reclass.py --nodeinfo node1`` then gives an ''Attempt to change constant value'' error if ``strict_constant_parameters`` is true or gives: .. code-block:: yaml parameters: alpha: one: 1 if ``strict_constant_parameters`` is false Default value for ``strict_constant_parameters`` is True .. code-block:: yaml strict_constant_parameters: True Nested References ----------------- References can now be nested, for example: .. code-block:: yaml # nodes/node1.yml parameters: alpha: one: ${beta:${alpha:two}} two: a beta: a: 99 ``reclass.py --nodeinfo node1`` then gives: .. code-block:: yaml parameters: alpha: one: 99 two: a beta: a: 99 The ``${beta:${alpha:two}}`` construct first resolves the ``${alpha:two}`` reference to the value 'a', then resolves the reference ``${beta:a}`` to the value 99. Ignore overwritten missing references ------------------------------------- Given the following classes: .. code-block:: yaml # node1.yml classes: - class1 - class2 - class3 # class1.yml parameters: a: ${x} # class2.yml parameters: a: ${y} # class3.yml parameters: y: 1 The parameter ``a`` only depends on the parameter ``y`` through the reference set in class2. The fact that the parameter ``x`` referenced in class1 is not defined does not affect the final value of the parameter ``a``. For such overwritten missing references by default a warning is printed but no error is raised, providing the final value of the parameter being evaluated is a scalar. If the final value is a dictionary or list an error will always be raised in the case of a missing reference. Default value is True to keep backward compatible behavior. .. code-block:: yaml ignore_overwritten_missing_reference: True Print summary of missed references ---------------------------------- Instead of failing on the first undefinded reference error all missing reference errors are printed at once. .. code-block:: yaml reclass --nodeinfo mynode -> dontpanic Cannot resolve ${_param:kkk}, at mkkek3:tree:to:fail, in yaml_fs:///test/classes/third.yml Cannot resolve ${_param:kkk}, at mkkek3:tree:another:xxxx, in yaml_fs:///test/classes/third.yml Cannot resolve ${_param:kkk}, at mykey2:tree:to:fail, in yaml_fs:///test/classes/third.yml .. code-block:: yaml group_errors: True Use references in class names ----------------------------- Allows to use references in the class names. References pointed to in class names cannot themselves reference another key, they should be simple strings. To avoid pitfalls do not over-engineer your class references. They should be used only for core conditions and only for them. A short example: `- system.wrodpress.db.${_class:database_backend}`. Best practices: - use references in class names always load your global class specification prior the reference is used. - structure your class references under parameters under one key (for example `_class`). - use class references as a kind of "context" or "global" available options you always know what they are set. Class referencing for existing reclass users. Frequently when constructing your models you had to load or not load some classes based on your setup. In most cases this lead to fork of a model or introducing kind of template generator (like cookiecutter) to create a model based on the base "context" or "global" variables. Class referencing is a simple way how to avoid "pre-processors" like this and if/else conditions around class section. Assuming following class setup: * node is loading `third.yml` class only Classes: .. code-block:: yaml #/etc/reclass/classes/global.yml parameters: _class: env: override: 'env.dev' lab: name: default #/etc/reclass/classes/lab/env/dev.yml parameters: lab: name: dev #/etc/reclass/classes/second.yml classes: - global - lab.${_class:env:override} #/etc/reclass/classes/third.yml classes: - global - second Reclass --nodeinfo then returns: .. code-block:: yaml ... ... applications: [] environment: base exports: {} classes: - global - lab.${_class:env:override} - second parameters: _class: env: override: env.dev lab: name: dev ... ... Load classes with relative names -------------------------------- Load referenced class from a relative location to the current class. To load class from relative location start the class uri with "." or ".." char. The only supported reference is to nested tree structure below the current class. You are allowed to use syntax for relative uri to required class on any place on your model (first class loaded, init.yml, regular class .yml). The feature is expected to improve flexibility while sharing classes between your models. Please mpte that you can't use '..' without any calss following. If you want simply up in the sctructure, type in '..init'. It's a new feature use it with care and mind that using "relative syntax" lower traceability of your pillar composition. Example usage of relative class name using '.' and '..': .. code-block:: yaml #/etc/reclass/classes/component/defaults.yml classes: component: config: a: b .. code-block:: yaml #/etc/reclass/classes/component/init.yml classes: - .defaults .. code-block:: yaml #/etc/reclass/classes/component/configuration/init.yml classes: - ..defaults Inventory Queries ----------------- Inventory querying works using a new key type - exports to hold values which other node definitions can read using a $[] query, for example with: .. code-block:: yaml # nodes/node1.yml exports: test_zero: 0 test_one: name: ${name} value: 6 test_two: ${dict} parameters: name: node1 dict: a: 1 b: 2 exp_value_test: $[ exports:test_two ] exp_if_test0: $[ if exports:test_zero == 0 ] exp_if_test1: $[ exports:test_one if exports:test_one:value == 7 ] exp_if_test2: $[ exports:test_one if exports:test_one:name == self:name ] # nodes/node2.yml exports: test_zero: 0 test_one: name: ${name} value: 7 test_two: ${dict} parameters: name: node2 dict: a: 11 b: 22 ``running reclass.py --nodeinfo node1`` gives (listing only the exports and parameters): .. code-block:: yaml exports: test_one: name: node1 value: 6 test_two: a: 1 b: 2 parameters: dict: a: 1 b: 2 exp_if_test0: - node1 - node2 exp_if_test1: node2: name: node2 value: 7 exp_if_test2: node1: name: node1 value: 6 exp_value_test: node1: a: 1 b: 2 node2: a: 11 b: 22 name: node1 Exports defined for a node can be a simple value or a reference to a parameter in the node definition. The ``$[]`` inventory queries are calculated for simple value expressions, ``$[ exports:key ]``, by returning a dictionary with an element (``{ node_name: key value }``) for each node which defines 'key' in the exports section. For tests with a preceeding value, ``$[ exports:key if exports:test_key == test_value ]``, the element (``{ node_name: key value }``) is only added to the returned dictionary if the test_key defined in the node exports section equals the test value. For tests without a preceeding value, ``$[ if exports:test_key == test_value ]``, a list of nodes which pass the test is returned. For either test form the test value can either be a simple value or a node parameter. And as well as an equality test a not equals test (``!=``) can also be used. **Inventory query options** By default inventory queries only look at nodes in the same environment as the querying node. This can be overriden using the +AllEnvs option: .. code-block:: yaml $[ +AllEnvs exports:test ] Any errors in rendering the export parameters for a node will give an error for the inventory query as a whole. This can be overriden using the ``+IgnoreErrors`` option: .. code-block:: yaml $[ +IgnoreErrors exports:test ] With the ``+IgnoreErrors`` option nodes which generate an error evaluating ``exports:test`` will be ignored. Inventory query options can be combined: .. code-block:: yaml $[ +AllEnvs +IgnoreErrors exports:test ] **Logical operators and/or** The logical operators and/or can be used in inventory queries: .. code-block:: yaml $[ exports:test_value if exports:test_zero == 0 and exports:test_one == self:value ] The individual elements of the if statement are evaluated and combined with the logical operators starting from the left and working to the right. **Inventory query example** Defining a cluster of machines using an inventory query, for example to open access to a database server to a group of nodes. Given exports/parameters for nodes of the form: .. code-block:: yaml # for all nodes requiring access to the database server exports: host: ip_address: aaa.bbb.ccc.ddd cluster: _some_cluster_name_ .. code-block:: yaml # for the database server parameters: cluster_name: production-cluster postgresql: server: clients: $[ exports:host:ip_address if exports:cluster == self:cluster_name ] This will generate a dictionary with an entry for node where the ``export:cluster`` key for a node is equal to the ``parameter:cluster_name`` key of the node on which the inventory query is run on. Each entry in the generated dictionary will contain the value of the ``exports:host:ip_address`` key. The output dictionary (depending on node definitions) would look like: .. code-block:: yaml node1: ip_address: aaa.bbb.ccc.ddd node2: ip_address: www.xxx.yyy.zzz For nodes where exports:cluster key is not defined or where the key is not equal to self:cluster_name no entry is made in the output dictionary. In practise the exports:cluster key can be set using a parameter reference: .. code-block:: yaml exports: cluster: ${cluster_name} parameters: cluster_name: production-cluster The above exports and parameter definitions could be put into a separate class and then included by nodes which require access to the database and included by the database server as well. Compose node name --------------------------- Nodes can be defined in subdirectories. However, node names (filename) must be unique across all subdirectories. For example, the following file structure is invalid: .. code-block:: yaml inventory/nodes/prod/mysql.yml inventory/nodes/staging/mysql.yml With setting: .. code-block:: yaml compose_node_name: True # default False This adds the subfolder to the node name and the structure above can then be used. It generates the following reclass objects: .. code-block:: yaml nodes: prod.mysql: ... staging.mysql: ... If the subfolder path starts with the underscore character ``_``, then the subfolder path is NOT added to the node name. Git storage type ---------------- Reclass node and class yaml files can be read from a remote git repository with the yaml_git storage type. Use nodes_uri and classes_uri to define the git repos to use for nodes and classes. These can be the same repo. For salt masters using ssh connections the private and public keys must be readable by the salt daemon, which requires the private key NOT be password protected. For stand alone reclass using ssh connections if the privkey and pubkey options are not defined then any in memory key (from ssh-add) will be used. Salt master reclass config example: .. code-block:: yaml storage_type:yaml_git nodes_uri: # branch to use branch: master # cache directory (default: ~/.reclass/git/cache) cache_dir: /var/cache/reclass/git # lock directory (default: ~/.reclass/git/lock) lock_dir: /var/cache/reclass/lock # private key for ssh connections (no default, but will used keys stored # by ssh-add in memory if privkey and pubkey are not set) privkey: /root/salt_rsa # public key for ssh connections pubkey: /root/salt_rsa.pub repo: git+ssh://gitlab@remote.server:salt/nodes.git classes_uri: # branch to use or __env__ to use the branch matching the node # environment name branch: __env__ # cache directory (default: ~/.reclass/git/cache) cache_dir: /var/cache/reclass/git # lock directory (default: ~/.reclass/git/lock) lock_dir: /var/cache/reclass/lock # private key for ssh connections (no default, but will used keys stored # by ssh-add in memory if privkey and pubkey are not set) privkey: /root/salt_rsa # public key for ssh connections pubkey: /root/salt_rsa.pub # branch/env overrides for specific branches env_overrides: # prod env uses master branch - prod: branch: master # use master branch for nodes with no environment defined - none: branch: master repo: git+ssh://gitlab@remote.server:salt/site.git # root directory of the class hierarcy in git repo # defaults to root directory of git repo if not given root: classes Mixed storage type ------------------ Use a mixture of storage types. Salt master reclass config example, which by default uses yaml_git storage but overrides the location for classes for the pre-prod environment to use a directory on the local disc: .. code-block:: yaml storage_type: mixed nodes_uri: # storage type to use storage_type: yaml_git # yaml_git storage options branch: master cache_dir: /var/cache/reclass/git lock_dir: /var/cache/reclass/lock privkey: /root/salt_rsa pubkey: /root/salt_rsa.pub repo: git+ssh://gitlab@remote.server:salt/nodes.git classes_uri: # storage type to use storage_type: yaml_git # yaml_git storage options branch: __env__ cache_dir: /var/cache/reclass/git lock_dir: /var/cache/reclass/lock privkey: /root/salt_rsa pubkey: /root/salt_rsa.pub repo: git+ssh://gitlab@remote.server:salt/site.git root: classes env_overrides: - prod: branch: master - none: branch: master - pre-prod: # override storage type for this environment storage_type: yaml_fs # options for yaml_fs storage type uri: /srv/salt/env/pre-prod/classes Support to use current node parameters as references in class name ------------------------------------------------------------------ With the following reclass config: .. code-block:: => /etc/reclass/nodes/mynode.yml classes: - common parameters: project: myproject => /etc/reclass/classes/common.yml class: - ${project} => /etc/reclass/classes/myproject.yml parameters: some: project: parameters Will get the following result for the parameters: .. code-block:: yaml parameters: project: myproject some: project: parameters reclass-1.7.0/README.rst000066400000000000000000000023541373565003400146540ustar00rootroot00000000000000Reclass README ========================= This is the fork of original **reclass** that is available at: https://github.com/madduck/reclass Extentions ========== List of the core features: * Escaping of References and Inventory Queries * Merging Referenced Lists and Dictionaries * Nested References * Inventory Queries * Ignore class notfound/regexp option Documentation ============= .. _README-extensions: README-extensions.rst Documentation covering the original version is in the doc directory. See the `README-extensions`_ file for documentation on the extentions. .. include:: ./README-extensions.rst Reclass related projects/tools ============================== Queries: * yg, yaml grep with 'jq' syntax - https://gist.github.com/epcim/f1c5b748fa7c942de50677aef04f29f8, (https://asciinema.org/a/84173) * reclass-graph - https://github.com/tomkukral/reclass-graph Introspection, manupulation: * reclass-tools, for manipulating reclass models - https://github.com/dis-xcom/reclass_tools YAML merge tools: * spruce, general purpose YAML & JSON merging tool - https://github.com/geofffranks/spruce Other: * saltclass, new pillar/master_tops module for salt with the behaviour of reclass - https://github.com/saltstack/salt/pull/42349 reclass-1.7.0/doc/000077500000000000000000000000001373565003400137265ustar00rootroot00000000000000reclass-1.7.0/doc/.gitignore000066400000000000000000000000071373565003400157130ustar00rootroot00000000000000/build reclass-1.7.0/doc/Makefile000066400000000000000000000127141373565003400153730ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -N PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/reclass.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/reclass.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/reclass" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/reclass" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." reclass-1.7.0/doc/source/000077500000000000000000000000001373565003400152265ustar00rootroot00000000000000reclass-1.7.0/doc/source/ansible.rst000066400000000000000000000200441373565003400173750ustar00rootroot00000000000000========================== Using reclass with Ansible ========================== .. warning:: I was kicked out of the Ansible community, presumably for `asking the wrong questions`_, and therefore I have no interest in developing this adapter anymore. If you use it and have changes, I will take your patch. .. _asking the wrong questions: https://github.com/madduck/reclass/issues/6 Quick start with Ansible ------------------------ The following steps should get you up and running quickly with |reclass| and `Ansible`_. Generally, we will be working in ``/etc/ansible``. However, if you are using a source-code checkout of Ansible, you might also want to work inside the ``./hacking`` directory instead. Or you can also just look into ``./examples/ansible`` of your |reclass| checkout, where the following steps have already been prepared. /…/reclass refers to the location of your |reclass| checkout. .. todo:: With |reclass| now in Debian, as well as installable from source, the following should be checked for path consistency… #. Complete the installation steps described in the :doc:`installation section `. #. Symlink ``/usr/share/reclass/reclass-ansible`` (or wherever your distro put that file), or ``/…/reclass/reclass/adapters/ansible.py`` (if running from source) to ``/etc/ansible/hosts`` (or ``./hacking/hosts``). #. Copy the two directories ``nodes`` and ``classes`` from the example subdirectory in the |reclass| checkout to ``/etc/ansible`` If you prefer to put those directories elsewhere, you can create ``/etc/ansible/reclass-config.yml`` with contents such as:: storage_type: yaml_fs inventory_base_uri: /srv/reclass Note that ``yaml_fs`` is currently the only supported ``storage_type``, and it's the default if you don't set it. #. Check out your inventory by invoking :: $ ./hosts --list which should return 5 groups in JSON format, and each group has exactly one member ``localhost``. 4. See the node information for ``localhost``:: $ ./hosts --host localhost This should print a set of keys and values, including a greeting, a colour, and a sub-class called ``__reclas__``. 5. Execute some ansible commands, e.g.:: $ ansible -i hosts \* --list-hosts $ ansible -i hosts \* -m ping $ ansible -i hosts \* -m debug -a 'msg="${greeting}"' $ ansible -i hosts \* -m setup $ ansible-playbook -i hosts test.yml 6. You can also invoke |reclass| directly, which gives a slightly different view onto the same data, i.e. before it has been adapted for Ansible:: $ /…/reclass/reclass.py --pretty-print --inventory $ /…/reclass/reclass.py --pretty-print --nodeinfo localhost Or, if |reclass| is properly installed, just use the |reclass| command. Integration with Ansible ------------------------ The integration between |reclass| and Ansible is performed through an adapter, and needs not be of our concern too much. However, Ansible has no concept of "nodes", "applications", "parameters", and "classes". Therefore it is necessary to explain how those correspond to Ansible. Crudely, the following mapping exists: ================= =============== |reclass| concept Ansible concept ================= =============== nodes hosts classes groups applications playbooks parameters host_vars ================= =============== |reclass| does not provide any ``group_vars`` because of its node-centric perspective. While class definitions include parameters, those are inherited by the node definitions and hence become node_vars. |reclass| also does not provide playbooks, nor does it deal with any of the related Ansible concepts, i.e. ``vars_files``, vars, tasks, handlers, roles, etc.. Let it be said at this point that you'll probably want to stop using ``host_vars``, ``group_vars`` and ``vars_files`` altogether, and if only because you should no longer need them, but also because the variable precedence rules of Ansible are full of surprises, at least to me. |reclass|' Ansible adapter massage the |reclass| output into Ansible-usable data, namely: - Every class in the ancestry of a node becomes a group to Ansible. This is mainly useful to be able to target nodes during interactive use of Ansible, e.g.:: $ ansible debiannode@wheezy -m command -a 'apt-get upgrade' → upgrade all Debian nodes running wheezy $ ansible ssh.server -m command -a 'invoke-rc.d ssh restart' → restart all SSH server processes $ ansible mailserver -m command -a 'tail -n1000 /var/log/mail.err' → obtain the last 1,000 lines of all mailserver error log files The attentive reader might stumble over the use of singular words, whereas it might make more sense to address all ``mailserver*s*`` with this tool. This is convention and up to you. I prefer to think of my node as a (singular) mailserver when I add ``mailserver`` to its parent classes. - Every entry in the list of a host's applications might well correspond to an Ansible playbook. Therefore, |reclass| creates a (Ansible-)group for every application, and adds ``_hosts`` to the name. This postfix can be configured with a CLI option (``--applications-postfix``) or in the configuration file (``applications_postfix``). For instance, the ssh.server class adds the ssh.server application to a node's application list. Now the admin might create an Ansible playbook like so:: - name: SSH server management hosts: ssh.server_hosts ← SEE HERE tasks: - name: install SSH package action: … … There's a bit of redundancy in this, but unfortunately Ansible playbooks hardcode the nodes to which a playbook applies. It's now trivial to apply this playbook across your infrastructure:: $ ansible-playbook ssh.server.yml My suggested way to use Ansible site-wide is then to create a ``site.yml`` playbook that includes all the other playbooks (which shall hopefully be based on Ansible roles), and then to invoke Ansible like this: ansible-playbook site.yml or, if you prefer only to reconfigure a subset of nodes, e.g. all webservers:: $ ansible-playbook site.yml --limit webserver Again, if the singular word ``webserver`` puts you off, change the convention as you wish. And if anyone comes up with a way to directly connect groups in the inventory with roles, thereby making it unnecessary to write playbook files (containing redundant information), please tell me! - Parameters corresponding to a node become ``host_vars`` for that host. Variable interpolation ---------------------- Ansible allows you to include `Jinja2`_-style variables in parameter values:: parameters: motd: greeting: Welcome to {{ ansible_fqdn }}! closing: This system is part of {{ realm }} dict_reference: {{ motd }} However, in resolving this, Ansible casts everything to a string, so in this example, ``dict_reference`` would be the string-representation of the dictionary under the ``motd`` key [#string_casts]_. To get at facts (such as ``ansible_fqdn``), you still have to use this approach, but for pure parameter references, I strongly suggest to use |reclass| interpolation instead, as it supports deep references, does not clobber type information, and is more efficient anyway:: parameters: motd: greeting: Welcome to {{ ansible_fqdn }}! closing: This system is part of ${realm} dict_reference: ${motd} Now you just need to specify realm somewhere. The reference can reside in a parent class, while the variable is defined e.g. in the node definition. And as expected, ``dict_reference`` now points to a dictionary, not a string-representation thereof. .. [#string_casts] I pointed this out to Michael Dehaan, Ansible's chief developer, but he denied this behaviour. When I tried to provide further insights, I found myself banned from the mailing list, apparently because I dared to point out flaws. If you care, you may look at https://github.com/madduck/reclass/issues/6 for more information. .. include:: extrefs.inc .. include:: substs.inc reclass-1.7.0/doc/source/changelog.rst000066400000000000000000000074621373565003400177200ustar00rootroot00000000000000========= ChangeLog ========= ========= ========== ======================================================== Version Date Changes ========= ========== ======================================================== 1.7.0 2020-10-02 Fixes and few new features: * Allow class mappings to wildcard match against either the node name and class * Support for .yaml along with .yml * Support to use current node parameters as references in class name 1.6.0 2018-11-06 * Python code and parser refactoring by a-ovchinnikov * Improvements in yaml_git and mixed setup by Andrew Pickford * Relative paths in class names by Petr Michalec, Martin Polreich and Andrew Pickford * Bug Fixes for recently added features 1.5.6 2018-07-30 * Fix, usage of integers as pillar keys * Refactoring python codebase by @a-ovchinkonv * New feature, "compose node name" from node subdirectory structure (by @gburiola) 1.5.5 2018-07 * Add immutable (constant) parameters * Fixes 1.5.4 2018-05 * Add support for salt 2018.3 * Add support for python 2.7/3.x * Extend tests coverage 1.5.3 2018 * Add new features + fixes - last 'known' full compatible release with original reclass - release shipped as well as .deb package at mirror.mirantis.com 1.5.x 2017 * Project forked under salt-formulas/reclass - based on @andrewpickford fork and community fixes - features against original are in README-extensions.rst 1.4.1 2014-10-28 * Revert debug logging, which wasn't fault-free and so it needs more time to mature. 1.4 2014-10-25 * Add rudimentary debug logging * Prevent interpolate() from overwriting merged values * Look for "init" instead of "index" when being fed a directory. * Fix error reporting on node name collision across subdirectories. 1.3 2014-03-01 * Salt: pillar data from previous pillars are now available to reclass parameter interpolation * yaml_fs: classes may be defined in subdirectories (closes: #12, #19, #20) * Migrate Salt adapter to new core API (closes: #18) * Fix --nodeinfo invocation in docs (closes: #21) 1.2.2 2013-12-27 * Recurse classes obtained from class mappings (closes: #16) * Fix class mapping regexp rendering in docs (closes: #15) 1.2.1 2013-12-26 * Fix Salt adapter wrt. class mappings (closes: #14) 1.2 2013-12-10 * Introduce class mappings (see :doc:`operations`) (closes: #5) * Fix parameter interpolation across merged lists (closes: #13). * Caching of classes for performance reasons, especially during the inventory runs * yaml_fs: nodes may be defined in subdirectories (closes: #10). * Classes and nodes URI must not overlap anymore * Class names must not contain spaces 1.1 2013-08-28 Salt adapter: fix interface to include minion_id, filter output accordingly; fixes master_tops 1.0.2 2013-08-27 Fix incorrect versioning in setuptools 1.0.1 2013-08-27 Documentation updates, new homepage 1.0 2013-08-26 Initial release ========= ========== ======================================================== reclass-1.7.0/doc/source/concepts.rst000066400000000000000000000142401373565003400175770ustar00rootroot00000000000000================ reclass concepts ================ |reclass| assumes a node-centric perspective into your inventory. This is obvious when you query |reclass| for node-specific information, but it might not be clear when you ask |reclass| to provide you with a list of groups. In that case, |reclass| loops over all nodes it can find in its database, reads all information it can find about the nodes, and finally reorders the result to provide a list of groups with the nodes they contain. Since the term "groups" is somewhat ambiguous, it helps to start off with a short glossary of |reclass|-specific terminology: ============ ============================================================== Concept Description ============ ============================================================== node A node, usually a computer in your infrastructure class A category, tag, feature, or role that applies to a node Classes may be nested, i.e. there can be a class hierarchy application A specific set of behaviour to apply parameter Node-specific variables, with inheritance throughout the class hierarchy. ============ ============================================================== A class consists of zero or more parent classes, zero or more applications, and any number of parameters. A class name must not contain spaces. A node is almost equivalent to a class, except that it usually does not (but can) specify applications. When |reclass| parses a node (or class) definition and encounters a parent class, it recurses to this parent class first before reading any data of the node (or class). When |reclass| returns from the recursive, depth first walk, it then merges all information of the current node (or class) into the information it obtained during the recursion. Furthermore, a node (or class) may define a list of classes it derives from, in which case classes defined further down the list will be able to override classes further up the list. Information in this context is essentially one of a list of applications or a list of parameters. The interaction between the depth-first walk and the delayed merging of data means that the node (and any class) may override any of the data defined by any of the parent classes (ancestors). This is in line with the assumption that more specific definitions ("this specific host") should have a higher precedence than more general definitions ("all webservers", which includes all webservers in Munich, which includes "this specific host", for example). Here's a quick example, showing how parameters accumulate and can get replaced. All "unixnodes" (i.e. nodes who have the ``unixnode`` class in their ancestry) have ``/etc/motd`` centrally-managed (through the ``motd`` application), and the `unixnode` class definition provides a generic message-of-the-day to be put into this file. All descendants of the class ``debiannode``, a descendant of ``unixnode``, should include the Debian codename in this message, so the message-of-the-day is overwritten in the ``debiannodes`` class. The node ``quantum.example.org`` (a `debiannode`) will have a scheduled downtime this weekend, so until Monday, an appropriate message-of-the-day is added to the node definition. When the ``motd`` application runs, it receives the appropriate message-of-the-day (from ``quantum.example.org`` when run on that node) and writes it into ``/etc/motd``. At this point it should be noted that parameters whose values are lists or key-value pairs don't get overwritten by children classes or node definitions, but the information gets merged (recursively) instead. Similarly to parameters, applications also accumulate during the recursive walk through the class ancestry. It is possible for a node or child class to *remove* an application added by a parent class, by prefixing the application with `~`. Finally, |reclass| happily lets you use multiple inheritance, and ensures that the resolution of parameters is still well-defined. Here's another example building upon the one about ``/etc/motd`` above: ``quantum.example.org`` (which is back up and therefore its node definition no longer contains a message-of-the-day) is at a site in Munich. Therefore, it is a child of the class ``hosted@munich``. This class is independent of the ``unixnode`` hierarchy, ``quantum.example.org`` derives from both. In this example infrastructure, ``hosted@munich`` is more specific than ``debiannode`` because there are plenty of Debian nodes at other sites (and some non-Debian nodes in Munich). Therefore, ``quantum.example.org`` derives from ``hosted@munich`` _after_ ``debiannodes``. When an electricity outage is expected over the weekend in Munich, the admin can change the message-of-the-day in the ``hosted@munich`` class, and it will apply to all hosts in Munich. However, not all hosts in Munich have ``/etc/motd``, because some of them are of class ``windowsnode``. Since the ``windowsnode`` ancestry does not specify the ``motd`` application, those hosts have access to the message-of-the-day in the node variables, but the message won't get used… … unless, of course, ``windowsnode`` specified a Windows-specific application to bring such notices to the attention of the user. It's also trivial to ensure a certain order of class evaluation. Here's another example: The ``ssh.server`` class defines the ``permit_root_login`` parameter to ``no``. The ``backuppc.client`` class defines the parameter to ``without-password``, because the BackupPC server might need to log in to the host as root. Now, what happens if the admin accidentally provides the following two classes? - ``backuppc.client`` - ``ssh.server`` Theoretically, this would mean ``permit_root_login`` gets set to ``no``. However, since all ``backuppc.client`` nodes need ``ssh.server`` (at least in most setups), the class ``backuppc.client`` itself derives from ``ssh.server``, ensuring that it gets parsed before ``backuppc.client``. When |reclass| returns to the node and encounters the ``ssh.server`` class defined there, it simply skips it, as it's already been processed. Now read about :doc:`operations`! .. include:: substs.inc reclass-1.7.0/doc/source/conf.py000066400000000000000000000172621373565003400165350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # reclass documentation build configuration file, created by # sphinx-quickstart on Mon Aug 26 12:56:14 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../../')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'reclass' copyright = u'2013, martin f. krafft' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. import reclass.version # The short X.Y version. version = '.'.join(reclass.version.VERSION.split('.')[:2]) # The full version, including alpha/beta/rc tags. release = reclass.version.VERSION # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = 'reclass' # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'reclassdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). 'papersize': 'a4paper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'reclass.tex', u'reclass Documentation', u'martin f. krafft', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('manpage', 'reclass', u'command-line interface', [u'martin f. krafft'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'reclass', u'reclass Documentation', u'martin f. krafft', 'reclass', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' reclass-1.7.0/doc/source/configfile.rst000066400000000000000000000025661373565003400200760ustar00rootroot00000000000000========================== reclass configuration file ========================== |reclass| can read some of its configuration from a file. The file is a YAML-file and simply defines key-value pairs. The configuration file can be used to set defaults for all the options that are otherwise configurable via the command-line interface, so please use the ``--help`` output of |reclass| (or the :doc:`manual page `) for reference. The command-line option ``--nodes-uri`` corresponds to the key ``nodes_uri`` in the configuration file. For example:: storage_type: yaml_fs pretty_print: True output: json inventory_base_uri: /etc/reclass nodes_uri: ../nodes |reclass| first looks in the current directory for the file called ``reclass-config.yml`` (see ``reclass/defaults.py``) and if no such file is found, it looks in ``$HOME``, then in ``/etc/reclass``, and then "next to" the ``reclass`` script itself, i.e. if the script is symlinked to ``/srv/provisioning/reclass``, then the the script will try to access ``/srv/provisioning/reclass-config.yml``. Note that ``yaml_fs`` is currently the only supported ``storage_type``, and it's the default if you don't set it. Adapters may implement their own lookup logic, of course, so make sure to read their documentation (for :doc:`Salt `, for :doc:`Ansible `, and for :doc:`Puppet `). .. include:: substs.inc reclass-1.7.0/doc/source/extrefs.inc000066400000000000000000000004601373565003400174010ustar00rootroot00000000000000.. _Puppet: http://puppetlabs.com/puppet/puppet-open-source .. _Salt: http://saltstack.com/community .. _Ansible: http://www.ansibleworks.com .. _Hiera: http://projects.puppetlabs.com/projects/hiera .. _Artistic Licence 2.0: http://opensource.org/licenses/Artistic-2.0 .. _Jinja2: http://jinja.pocoo.org reclass-1.7.0/doc/source/hacking.rst000066400000000000000000000040441373565003400173660ustar00rootroot00000000000000================== Hacking on reclass ================== Installation ------------ If you just want to run |reclass| from source, e.g. because you are going to be making and testing changes, install it in "development mode":: python setup.py develop Now the ``reclass`` script, as well as the adapters, will be available in ``/usr/local/bin``, and you can also invoke them directly from the source tree. To uninstall:: python setup.py develop --uninstall Discussing reclass ------------------ If you want to talk about |reclass|, use the `mailing list`_ or to find me on IRC, in ``#reclass`` on ``irc.oftc.net``. .. _mailing list: http://lists.pantsfullofunix.net/listinfo/reclass Contributing to reclass ----------------------- |reclass| is currently maintained `on Github `_. Conttributions to |reclass| are very welcome. Since I prefer to keep a somewhat clean history, I will not just merge pull request. You can submit pull requests, of course, and I'll rebase them onto ``HEAD`` before merging. Or send your patches using ``git-format-patch`` and ``git-send-e-mail`` to `the mailing list `_. I have added rudimentary unit tests, and it would be nice if you could submit your changes with appropriate changes to the tests. To run tests, invoke :: $ make tests in the top-level checkout directory. The tests are rather inconsistent, some using mock objects, and only the datatypes-related code is covered. If you are a testing expert, I could certainly use some help here to improve the consistency of the existing tests, as well as their coverage. Also, there is a Makefile giving access to PyLint and ``coverage.py`` (running tests). If you run that, you can see there is a lot of work to be done cleaning up the code. If this is the sort of stuff you want to do — by all means — be my guest! ;) There are a number of items on the :doc:`to-do list `, so if you are bored… If you have larger ideas, I'll be looking forward to discuss them with you. .. include:: substs.inc reclass-1.7.0/doc/source/index.rst000066400000000000000000000043121373565003400170670ustar00rootroot00000000000000================================================ reclass — Recursive external node classification ================================================ .. include:: intro.inc Releases and source code ------------------------ The latest released |reclass| version is |release|. Please have a look at the :doc:`change log ` for information about recent changes. For now, |reclass| is hosted `on Github`_, and you may clone it with the following command:: git clone https://github.com/madduck/reclass.git Please see the :doc:`install instructions ` for information about distribution packages and tarballs. .. _on Github: https://github.com/madduck/reclass Community --------- There is a `mailing list`_, where you can bring up anything related to |reclass|. .. _mailing list: http://lists.pantsfullofunix.net/listinfo/reclass For real-time communication, please join the ``#reclass`` IRC channel on ``irc.oftc.net``. If you're using `Salt`_, you can also ask your |reclass|-and-Salt-related questions on the mailing list, ideally specifying "reclass" in the subject of your message. Licence ------- |reclass| is © 2007–2014 by martin f. krafft and released under the terms of the `Artistic Licence 2.0`_. Contents -------- These documents aim to get you started with |reclass|: .. toctree:: :maxdepth: 2 install concepts operations usage refs manpage configfile salt ansible puppet hacking todo changelog About the name -------------- "reclass" stands for **r**\ ecursive **e**\ xternal node **class**\ ifier, which is somewhat of a misnomer. I chose the name very early on, based on the recursive nature of the data merging. However, to the user, a better paradigm would be "hierarchical", as s/he does not and should not care too much about the implementation internals. By the time that I realised this, unfortunately, `Hiera`_ (Puppet-specific) had already occupied this prefix. Oh well. Once you start using |reclass|, you'll think recursively as well as hierarchically at the same time. It's really quite simple. .. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. include:: extrefs.inc .. include:: substs.inc reclass-1.7.0/doc/source/install.rst000066400000000000000000000060061373565003400174300ustar00rootroot00000000000000============ Installation ============ For Debian users (including Ubuntu) ----------------------------------- |reclass| has been `packaged for Debian`_. To use it, just install it with APT:: $ apt-get install reclass [reclass-doc] .. _packaged for Debian: http://packages.debian.org/search?keywords=reclass For ArchLinux users ------------------- |reclass| is `available for ArchLinux`_, thanks to Niels Abspoel. Dowload the tarball_ from ``aur`` or ``yaourt``:: $ yaourt -S reclass or:: $ tar xvzf reclass-git.tar.gz $ cd reclass-git; makepkg $ sudo pacman -U reclass-git-.tar.gz .. _available for ArchLinux: https://aur.archlinux.org/packages/reclass-git/ .. _tarball: https://aur.archlinux.org/packages/re/reclass-git/reclass-git.tar.gz Other distributions ------------------- Developers of other distributions are cordially invited to package |reclass| themselves and `write to the mailing list `_ to have details included here. Or send a patch! From source ----------- |reclass| is currently maintained `on Github `_, so to obtain the source, run:: $ git clone https://github.com/madduck/reclass.git or:: $ git clone ssh://git@github.com:madduck/reclass.git If you want a tarball, please `obtain it from the Debian archive`_. .. _obtain it from the Debian archive: http://http.debian.net/debian/pool/main/r/reclass/ Before you can use |reclass|, you need to install it into a place where Python can find it. The following step should install the package to ``/usr/local``:: $ python setup.py install If you want to install to a different location, use --prefix like so:: $ python setup.py install --prefix=/opt/local .. todo:: These will install the ``reclass-salt`` and ``reclass-ansible`` adapters to ``$prefix/bin``, but they should go to ``$prefix/share/reclass``. How can setup.py be told to do so? It would be better for consistency if this was done "upstream", rather than fixed by the distros. Just make sure that the destination is in the Python module search path, which you can check like this:: $ python -c 'import sys; print sys.path' More options can be found in the output of :: $ python setup.py install --help $ python setup.py --help $ python setup.py --help-commands $ python setup.py --help [cmd] If you just want to run |reclass| from source, e.g. because you are going to be making and testing changes, install it in "development mode":: $ python setup.py develop To uninstall (the rm call is necessary due to `a bug in setuptools`_):: $ python setup.py develop --uninstall $ rm /usr/local/bin/reclass* `Uninstallation currently isn't possible`_ for packages installed to /usr/local as per the above method, unfortunately. The following should do:: $ rm -r /usr/local/lib/python*/dist-packages/reclass* /usr/local/bin/reclass* .. _a bug in setuptools: http://bugs.debian.org/714960 .. _Uninstallation currently isn't possible: http://bugs.python.org/issue4673 .. include:: substs.inc reclass-1.7.0/doc/source/intro.inc000066400000000000000000000026071373565003400170610ustar00rootroot00000000000000|reclass| is an "external node classifier" (ENC) as can be used with automation tools, such as `Puppet`_, `Salt`_, and `Ansible`_. It is also a stand-alone tool for merging data sources recursively. The purpose of an ENC is to allow a system administrator to maintain an inventory of nodes to be managed, completely separately from the configuration of the automation tool. Usually, the external node classifier completely replaces the tool-specific inventory (such as ``site.pp`` for Puppet, ``ext_pillar``/``master_tops`` for Salt, or ``/etc/ansible/hosts``). With respect to the configuration management tool, the ENC then fulfills two jobs: - it provides information about groups of nodes and group memberships - it gives access to node-specific information, such as variables |reclass| allows you to define your nodes through class inheritance, while always able to override details further up the tree (i.e. in more specific nodes). Think of classes as feature sets, as commonalities between nodes, or as tags. Add to that the ability to nest classes (multiple inheritance is allowed, well-defined, and encouraged), and you can assemble your infrastructure from smaller bits, eliminating duplication and exposing all important parameters to a single location, logically organised. And if that isn't enough, |reclass| lets you reference other parameters in the very hierarchy you are currently assembling. reclass-1.7.0/doc/source/manpage.rst000066400000000000000000000027011373565003400173700ustar00rootroot00000000000000=============== reclass manpage =============== Synopsis -------- | |reclass| --help | |reclass| *[options]* --inventory | |reclass| *[options]* --nodeinfo=NODENAME Description ----------- .. include:: intro.inc |reclass| will be used indirectly through adapters most of the time. However, there exists a command-line interface that allows querying the database. This manual page describes this interface. Options ------- Please see the output of ``reclass --help`` for the default values of these options: Database options '''''''''''''''' -s, --storage-type The type of storage backend to use -b, --inventory-base-uri The base URI to prepend to nodes and classes -u, --nodes-uri The URI to the nodes storage -c, --classes-uri The URI to the classes storage Output options '''''''''''''' -o, --output The output format to use (yaml or json) -y, --pretty-print Try to make the output prettier Modes ''''' -i, --inventory Output the entire inventory -n, --nodeinfo Output information for a specific node Information ''''''''''' -h, --help Help output --version Display version number See also -------- Please visit http://reclass.pantsfullofunix.net/ for more information about |reclass|. The documentation is also available from the ``./doc`` subtree in the source checkout, or from ``/usr/share/doc/reclass-doc``. .. include:: substs.inc .. include:: extrefs.inc reclass-1.7.0/doc/source/operations.rst000066400000000000000000000157531373565003400201560ustar00rootroot00000000000000================== reclass operations ================== YAML FS storage --------------- While |reclass| has been built to support different storage backends through plugins, currently only the ``yaml_fs`` storage backend exists. This is a very simple, yet powerful, YAML-based backend, using flat files on the filesystem (as suggested by the ``_fs`` postfix). ``yaml_fs`` works with two directories, one for node definitions, and another for class definitions. The two directories must not be the same, nor can one be a parent of the other. Files in those directories are YAML-files, specifying key-value pairs. The following three keys are read by |reclass|: ============ ================================================================ Key Description ============ ================================================================ classes a list of parent classes appliations a list of applications to append to the applications defined by ancestors. If an application name starts with ``~``, it would remove this application from the list, if it had already been added — but it does not prevent a future addition. E.g. ``~firewalled`` parameters key-value pairs to set defaults in class definitions, override existing data, or provide node-specific information in node specifications. \ By convention, parameters corresponding to an application should be provided as subkey-value pairs, keyed by the name of the application, e.g.:: applications: - ssh.server parameters: ssh.server: permit_root_login: no environment only relevant for nodes, this allows to specify an "environment" into which the node definition is supposed to be place. ============ ================================================================ Classes files may reside in subdirectories, which act as namespaces. For instance, a class ``ssh.server`` will result in the class definition to be read from ``ssh/server.yml``. Specifying just ``ssh`` will cause the class data to be read from ``ssh/init.yml`` or ``ssh.yml``. Note, however, that only one of those two may be present. Nodes may also be defined in subdirectories. However, node names (filename) must be unique across all subdirectories, and |reclass| will exit with an error if a node is defined multiple times. Subdirectories therefore really only exist for the administrator's local data structuring. They may be used in mappings (see below) to tag additional classes onto nodes. Data merging ------------ |reclass| has two modes of operation: node information retrieval and inventory listing. The second is really just a loop of the first across all defined nodes, and needs not be further described. When retrieving information about a node, |reclass| first obtains the node definition from the storage backend. Then, it iterates the list of classes defined for the node and recursively asks the storage backend for each class definition (unless already cached). Next, |reclass| recursively descends each class, looking at the classes it defines, and so on, until a leaf node is reached, i.e. a class that references no other classes. Now, the merging starts. At every step, the list of applications and the set of parameters at each level is merged into what has been accumulated so far. Merging of parameters is done "deeply", meaning that lists and dictionaries are extended (recursively), rather than replaced. However, a scalar value *does* overwrite a dictionary or list value. While the scalar could be appended to an existing list, there is no sane default assumption in the context of a dictionary, so this behaviour seems the most logical. Plus, it allows for a dictionary to be erased by overwriting it with the null value. After all classes (and the classes they reference) have been visited, |reclass| finally merges the applications list and parameters defined for the node into what has been accumulated during the processing of the classes, and returns the final result. Wildcard/Regexp mappings ------------------------ Using the :doc:`configuration file `, it is also possible to provide a list mappings between node names and classes. For instance:: class_mappings: - \* default - /^www\d+/ webserver - \*.ch hosted@switzerland another_class_to_show_that_it_can_take_lists This will assign the ``default`` class to all nodes (make sure to escape a leading asterisk (\*) to keep YAML happy), ``webserver`` to all nodes named ``www1`` or ``www999``, and ``hosted-in-switzerland`` to all nodes whose names end with ``.ch`` (again, note the escaped leading asterisk). Multiple classes can be assigned to each mapping by providing a space-separated list (class names cannot contain spaces anyway). By default the class mappings regex match is done against the node name. This can be changed to do the match against the path of the node file from the classes directory, but dropping the .yml extension at the end of the node file. This is controlled with the setting class_mappings_match_path. When False (the default) the match is done again the node name and when true the match is done against the node file path. .. warning:: The class mappings do not really belong in the configuration file, as they are data, not configuration inmformation. Therefore, they are likely going to move elsewhere, but I have not quite figured out to where. Most likely, there will be an additional file, specified in the configuration file, which then lists the mappings. Note that mappings are not designed to replace node definitions. Mappings can be used to pre-populate the classes of existing nodes, but you still need to define all nodes (and if only to allow them to be enumerated for the inventory). The mapped classes can also contain backreferences when regular expressions are used, although they need to be escaped, e.g.:: class_mappings: - /\.(\S+)$/ tld-\\1 Furthermore, since the outer slashes ('/') are used to "quote" the regular expression, *any* slashes within the regular expression must be escaped. For instance, the following class mapping assigns a ``subdir-X`` class to all nodes that are defined in a subdirectory (using yaml_fs):: class_mappings: - /^([^\/]+)\// subdir-\\1 Parameter interpolation ------------------------ Parameters may reference each other, including deep references, e.g.:: parameters: location: Munich, Germany motd: header: This node sits in ${location} for_demonstration: ${motd:header} dict_reference: ${motd} After merging and interpolation, which happens automatically inside the storage modules, the ``for_demonstration`` parameter will have a value of "This node sits in Munich, Germany". Types are preserved if the value contains nothing but a reference. Hence, the value of ``dict_reference`` will actually be a dictionary. You should now be ready to :doc:`use reclass `! .. include:: substs.inc reclass-1.7.0/doc/source/puppet.rst000066400000000000000000000010061373565003400172720ustar00rootroot00000000000000========================= Using reclass with Puppet ========================= .. todo:: The adapter between |reclass| and `Puppet`_ has not actually been written, since I rage-quit using Puppet before the rewrite of |reclass|. It should be trivial to do, and if you need it or are interested in working on it, and you require assistance, please get in touch with me `on the mailing list `_. Else just send the patch! .. include:: extrefs.inc .. include:: substs.inc reclass-1.7.0/doc/source/refs.rst000066400000000000000000000023551373565003400167240ustar00rootroot00000000000000=================== External references =================== * I `presented reclass`__ at `LCA 2014`_, which as been recorded: * (Slides forthcoming) * `Video recording`__ __ http://linux.conf.au/schedule/30203/view_talk?day=wednesday __ http://mirror.linux.org.au/pub/linux.conf.au/2014/Wednesday/59-Hierarchical_infrastructure_description_for_your_system_management_needs_-_Martin_Krafft.mp4 .. _LCA 2014: https://lca2014.linux.org.au * I gave `a talk about reclass`__ at `DebConf13`_, which has been recorded: * `Slides`__ * Video recording: `high quality (ogv)`__ | `high quality (webm)`__ | `low(er) quality (ogv)`__ __ http://penta.debconf.org/dc13_schedule/events/1048.en.html __ http://annex.debconf.org/debconf-share/debconf13/slides/reclass.pdf __ http://meetings-archive.debian.net/pub/debian-meetings/2013/debconf13/high/1048_Recursive_node_classification_for_system_automation.ogv __ http://meetings-archive.debian.net/pub/debian-meetings/2013/debconf13/webm-high/1048_Recursive_node_classification_for_system_automation.webm __ http://meetings-archive.debian.net/pub/debian-meetings/2013/debconf13/low/1048_Recursive_node_classification_for_system_automation.ogv .. _DebConf13: http://debconf13.debconf.org .. include:: substs.inc reclass-1.7.0/doc/source/salt.rst000066400000000000000000000177201373565003400167320ustar00rootroot00000000000000======================= Using reclass with Salt ======================= .. warning:: You need Salt 0.17 to use `reclass`, as older versions do not include the `reclass` adapter. You could use the ``cmd_yaml`` adapters, but at least for ``ext_pillar``, they are currently not useable, as they `do not export the minion ID to the command they run`_. .. _do not export the minion ID to the command they run: https://github.com/saltstack/salt/issues/2276 Quick start ----------- The following steps should get you up and running quickly with |reclass| and `Salt`_. You will need to decide for yourself where to put your |reclass| inventory. This can be your first ``base`` ``file_root`` (the default), or it could be ``/etc/reclass``, or ``/srv/salt``. The following shall assume the latter. Or you can also just look into ``./examples/salt`` of your |reclass| checkout (``/usr/share/doc/examples/salt`` on Debian-systems), where the following steps have already been prepared. /…/reclass refers to the location of your |reclass| checkout. .. todo:: With |reclass| now in Debian, as well as installable from source, the following should be checked for path consistency… #. Complete the installation steps described in the :doc:`installation section `. Alternatively, you can also tell Salt via the master config file where to look for |reclass|, but then you won't be able to interact with |reclass| through the command line. #. Copy the two directories ``nodes`` and ``classes`` from the example subdirectory in the |reclass| checkout to e.g. ``/srv/salt``. It's handy to symlink |reclass|' Salt adapter itself to that directory:: $ ln -s /usr/share/reclass/reclass-salt /srv/salt/states/reclass As you can now just inspect the data right there from the command line:: $ ./reclass --top If you don't want to do this, you can also let |reclass| know where to look for the inventory with the following contents in ``$HOME/reclass-config.yml``:: storage_type: yaml_fs base_inventory_uri: /srv/reclass Or you can reuse the first entry of ``file_roots`` under ``base`` in the Salt master config. Note that ``yaml_fs`` is currently the only supported ``storage_type``, and it's the default if you don't set it. #. Check out your inventory by invoking :: $ reclass-salt --top which should return all the information about all defined nodes, which is only ``localhost`` in the example. This is essentially the same information that you would keep in your ``top.sls`` file. If you symlinked the script to your inventory base directory, use :: $ ./reclass --top #. See the pillar information for ``localhost``:: $ reclass-salt --pillar localhost #. Now add |reclass| to ``/etc/salt/master``, like so:: reclass: &reclass inventory_base_uri: /srv/salt reclass_source_path: ~/code/reclass master_tops: […] reclass: *reclass ext_pillar: - reclass: *reclass .. warning:: When using ``ext_pillar`` and/or ``master_tops``, you should make sure that your ``file_roots`` paths do not contain a ``top.sls`` file. Even though they ought to be able to coexist, there are a few sharp edges around at the moment, so beware! If you did not install |reclass| (but you are running it from source), you can either specify the source path like above, or you can add it to ``PYTHONPATH`` before invoking the Salt master, to ensure that Python can find it:: PYTHONPATH=/…/reclass /etc/init.d/salt-master restart #. Provided that you have set up ``localhost`` as a Salt minion, the following commands should now return the same data as above, but processed through salt:: $ salt localhost pillar.items # shows just the parameters $ salt localhost state.show_top # shows only the states (applications) Alternatively, if you don't have the Salt minion running yet:: $ salt-call pillar.items # shows just the parameters $ salt-call state.show_top # shows only the states (applications) #. You can also invoke |reclass| directly, which gives a slightly different view onto the same data, i.e. before it has been adapted for Salt:: $ reclass --inventory $ reclass --nodeinfo localhost Configuration file and master configuration ------------------------------------------- Even though the Salt adapter of |reclass| looks for and reads the :doc:`configuration file `, a better means to pass information to the adapter is via Salt's master configuration file, as shown above. Not all configuration options can be passed this way (e.g. ``output`` is hardcoded to YAML, which Salt uses), but it *is* possible to specify :doc:`class mappings ` next to all the storage-specific options. .. warning:: The Salt CLI adapter does *not* read Salt's master configuration, so if you are calling ``reclass-salt`` from the command-line (the CLI exists for debugging purposes, mainly), be aware that it will be run in a different environment than when Salt queries reclass directly. Integration with Salt --------------------- |reclass| hooks into Salt at two different points: ``master_tops`` and ``ext_pillar``. For both, Salt provides plugins. These plugins need to know where to find |reclass|, so if |reclass| is not properly installed (but you are running it from source), make sure to export ``PYTHONPATH`` accordingly before you start your Salt master, or specify the path in the master configuration file, as show above. Salt has no concept of "nodes", "applications", "parameters", and "classes". Therefore it is necessary to explain how those correspond to Salt. Crudely, the following mapping exists: ================= ================ |reclass| concept Salt terminology ================= ================ nodes hosts classes (none) [#nodegroups]_ applications states parameters pillar environment environment ================= ================ .. [#nodegroups] See `Salt issue #5787`_ for steps into the direction of letting |reclass| provide nodegroup information. .. _Salt issue #5787: https://github.com/saltstack/salt/issues/5787 Whatever applications you define for a node will become states applicable to a host. If those applications are added via ancestor classes, then that's fine, but currently, Salt does not do anything with the classes ancestry. Similarly, all parameters that are collected and merged eventually end up in the pillar data of a specific node. The pillar data of a node include all the information about classes and applications, so you could theoretically use them to target your Salt calls at groups of nodes defined in the |reclass| inventory, e.g. :: salt -I __reclass__:classes:salt_minion test.ping Unfortunately, this does not work yet, please stay tuned, and let me know if you figure out a way. `Salt issue #5787`_ is also of relevance. Optionally, data from pillars that run before the |reclass| ``ext_pillar`` (i.e. Salt's builtin ``pillar_roots``, as well as other ``ext_pillar`` modules listed before the ``reclass_adapter``) can be made available to |reclass|. Please use this with caution as referencing data from Salt in the inventory will make it harder or impossible to run |reclass| in other environments. This feature is therefore turned off by default and must be explicitly enabled in the Salt master configuration file, like this:: ext_pillar: - reclass: […] propagate_pillar_data_to_reclass: True Unfortunately, to use this, currently you cannot use YAML references (i.e. ``*reclass``) as shown above, as the ``master_tops`` subsystem does not accept this configuration parameter, and there seems to be no way to extend an alias. Specifically, the following is not possible — let me know if it is!:: ext_pillar: - reclass: *reclass # WARNING: this does not work! propagate_pillar_data_to_reclass: True .. include:: substs.inc .. include:: extrefs.inc reclass-1.7.0/doc/source/substs.inc000066400000000000000000000000431373565003400172410ustar00rootroot00000000000000.. |reclass| replace:: **reclass** reclass-1.7.0/doc/source/todo.rst000066400000000000000000000141311373565003400167250ustar00rootroot00000000000000================== reclass to-do list ================== Common set of classes --------------------- A lot of the classes I have set up during the various stages of development of |reclass| are generic. It would probably be sensible to make them available as part of |reclass|, to give people a common baseline to work from, and to ensure a certain level of consistency between users. This could also provide a more realistic example to users on how to use |reclass|. Testing framework ----------------- There is rudimentary testing in place, but it's inconsistent. I got side-tracked into discussions about the philosphy of mocking objects. This could all be fixed and unified. Also, storage, outputters, CLI and adapters have absolutely no tests yet… The testing framework should also incorporate the example classes mentioned above. Configurable file extension --------------------------- Right now, ``.yml`` is hard-coded. This could be exported to the configuration file, or even given as a list, so that ``.yml`` and ``.yaml`` can both be used. Actually, I don't think this is such a good idea. If we create too many options right now, it'll be harder to unify later. Please also see `issue #17 ` in a fixed set of locations. On of those derives from ``OPT_INVENTORY_BASE_URI``, the default inventory base URI (``/etc/reclass``). This should probably be updated in case the user changes the URI. Furthermore, ``$CWD`` and ``~`` might not make a lot of sense in all use-cases. However, this might be better addressed by the following point: Adapter class hierarchy ----------------------- At the moment, adapters are just imperative code. It might make more sense to wrap them in classes, which customise things like command-line and config file parsing. One nice way would be to generalise configuration file reading, integrate it with command-line parsing, and then allow the consumers (the adapters) to configure them, for instance, in the Salt adapter:: config_proxy = ConfigProxy() config_proxy.set_configfile_search_path(['/etc/reclass', '/etc/salt']) config_proxy.lock_config_option('output', 'yaml') The last call would effectively remove the ``--output`` config option from the CLI, and yield an error (or warning) if the option was encountered while parsing the configuration file. Furthermore, the class instances could become long-lived and keep a reference to a storage proxy, e.g. to prevent having to reload storage on every request. Node lists ---------- Class mappings are still experimental, and one of the reasons I am not too happy with them right now is that one would still need to provide node files for all nodes for ``inventory`` invocations. This is because class mappings can assign classes based on patterns or regular expressions, but it is not possible to turn a pattern or regular expression into a list of valid nodes. `Issue #9 `_ contains a lengthy discussion on this. At the moment, I am unsure what the best way forward is. Inventory filters ----------------- As described in `issue #11 `_: provide a means to limit the enumeration of the inventory, according to node name patterns, or using classes white-/blacklists. .. include:: substs.inc reclass-1.7.0/doc/source/usage.rst000066400000000000000000000035471373565003400170750ustar00rootroot00000000000000============= Using reclass ============= .. todo:: With |reclass| now in Debian, as well as installable from source, the following should be checked for path consistency… For information on how to use |reclass| directly, call ``reclass --help`` and study the output, or have a look at its :doc:`manual page `. The three options, ``--inventory-base-uri``, ``--nodes-uri``, and ``--classes-uri`` together specify the location of the inventory. If the base URI is specified, then it is prepended to the other two URIs, unless they are absolute URIs. If these two URIs are not specified, they default to ``nodes`` and ``classes``. Therefore, if your inventory is in ``/etc/reclass/nodes`` and ``/etc/reclass/classes``, all you need to specify is the base URI as ``/etc/reclass`` — which is actually the default (specified in ``reclass/defaults.py``). If you've installed |reclass| from source as per the :doc:`installation instructions `, try to run it from the source directory like this:: $ reclass -b examples/ --inventory $ reclass -b examples/ --nodeinfo localhost This will make it use the data from ``examples/nodes`` and ``examples/classes``, and you can surely make your own way from here. On Debian-systems, use the following:: $ reclass -b /usr/share/doc/reclass/examples/ --inventory $ reclass -b /usr/share/doc/reclass/examples/ --nodeinfo localhost More commonly, however, use of |reclass| will happen indirectly, and through so-called adapters. The job of an adapter is to translate between different invocation paradigms, provide a sane set of default options, and massage the data from |reclass| into the format expected by the automation tool in use. Please have a look at the respective README files for these adapters, i.e. for :doc:`Salt `, for :doc:`Ansible `, and for :doc:`Puppet `. .. include:: substs.inc reclass-1.7.0/examples/000077500000000000000000000000001373565003400147775ustar00rootroot00000000000000reclass-1.7.0/examples/ansible/000077500000000000000000000000001373565003400164145ustar00rootroot00000000000000reclass-1.7.0/examples/ansible/hosts000077500000000000000000000001401373565003400174750ustar00rootroot00000000000000#!/bin/sh cd ../../ PYTHONPATH="`pwd`:$PYTHONPATH" exec python reclass/adapters/ansible.py "$@" reclass-1.7.0/examples/ansible/reclass-config.yml000066400000000000000000000000271373565003400220350ustar00rootroot00000000000000inventory_base_uri: .. reclass-1.7.0/examples/ansible/test.yml000066400000000000000000000002031373565003400201110ustar00rootroot00000000000000- name: Test playbook against all test hosts hosts: test_hosts tasks: - name: Greet the world debug: msg='$greeting' reclass-1.7.0/examples/classes/000077500000000000000000000000001373565003400164345ustar00rootroot00000000000000reclass-1.7.0/examples/classes/debian/000077500000000000000000000000001373565003400176565ustar00rootroot00000000000000reclass-1.7.0/examples/classes/debian/init.yml000066400000000000000000000025221373565003400213450ustar00rootroot00000000000000applications: - apt - locales parameters: debian_stable_suite: wheezy apt: repo_uri: http://http.debian.net/debian repo_uri_security: http://security.debian.org/debian-security default_components: main # TODO: pass as a list! include_sources: no include_security: yes include_updates: yes include_proposed_updates: no disable_sources_dir: no disable_preferences_dir: no acquire_pdiffs: no install_recommends: no cache_limit: 67108864 apt_repos: - id: debian enabled: yes uri: ${apt:repo_uri} components: ${apt:default_components} sources: ${apt:include_sources} - id: debian-security enabled: ${apt:include_security} uri: ${apt:repo_uri_security} suite_postfix: /updates components: ${apt:default_components} sources: ${apt:include_sources} - id: debian-updates enabled: ${apt:include_updates} suite_postfix: -updates uri: ${apt:repo_uri} components: ${apt:default_components} sources: ${apt:include_sources} - id: debian-proposed-updates enabled: ${apt:include_proposed_updates} uri: ${apt:repo_uri} suite_postfix: -proposed-updates components: ${apt:default_components} sources: ${apt:include_sources} locales: list: - en_NZ.UTF-8 UTF-8 - de_CH.UTF-8 UTF-8 reclass-1.7.0/examples/classes/debian/release/000077500000000000000000000000001373565003400212765ustar00rootroot00000000000000reclass-1.7.0/examples/classes/debian/release/jessie.yml000066400000000000000000000001101373565003400232730ustar00rootroot00000000000000classes: - debian.suite.testing parameters: debian_codename: jessie reclass-1.7.0/examples/classes/debian/release/lenny.yml000066400000000000000000000001101373565003400231360ustar00rootroot00000000000000classes: - debian.suite.archived parameters: debian_codename: lenny reclass-1.7.0/examples/classes/debian/release/sid.yml000066400000000000000000000001061373565003400225750ustar00rootroot00000000000000classes: - debian.suite.unstable parameters: debian_codename: sid reclass-1.7.0/examples/classes/debian/release/squeeze.yml000066400000000000000000000001131373565003400234750ustar00rootroot00000000000000classes: - debian.suite.oldstable parameters: debian_codename: squeeze reclass-1.7.0/examples/classes/debian/release/wheezy.yml000066400000000000000000000001071373565003400233320ustar00rootroot00000000000000classes: - debian.suite.stable parameters: debian_codename: wheezy reclass-1.7.0/examples/classes/debian/suite/000077500000000000000000000000001373565003400210075ustar00rootroot00000000000000reclass-1.7.0/examples/classes/debian/suite/archived.yml000066400000000000000000000007541373565003400233250ustar00rootroot00000000000000classes: - debian parameters: debian_suite: archived apt: repo_uri: http://archive.debian.org/debian repo_uri_security: http://archive.debian.org/debian-security repo_uri_backports: http://archive.debian.org/debian-backports repo_uri_volatile: http://archive.debian.org/debian-volatile include_security: no include_updates: no include_proposed_updates: no motd: newsitems: - This host is no longer kept up-to-date and will be decomissioned soon. reclass-1.7.0/examples/classes/debian/suite/include_backports.yml000066400000000000000000000005371373565003400252320ustar00rootroot00000000000000classes: - debian parameters: apt: repo_uri_backports: http://http.debian.net/debian include_backports: yes apt_repos: - id: debian-backports enabled: ${apt:include_backports} uri: ${apt:repo_uri_backports} suite_postfix: -backports components: ${apt:default_components} sources: ${apt:include_sources} reclass-1.7.0/examples/classes/debian/suite/include_experimental.yml000066400000000000000000000005321373565003400257320ustar00rootroot00000000000000classes: - debian parameters: apt: repo_uri_experimental: ${apt:repo_uri} include_experimental: yes apt_repos: - id: debian-experimental enabled: ${apt:include_experimental} uri: ${apt:repo_uri_experimental} suite: experimental components: ${apt:default_components} sources: ${apt:include_sources} reclass-1.7.0/examples/classes/debian/suite/include_multimedia.yml000066400000000000000000000005001373565003400253620ustar00rootroot00000000000000classes: - debian parameters: apt: repo_uri_multimedia: http://deb-multimedia.org include_multimedia: yes apt_repos: - id: debian-multimedia enabled: ${apt:include_multimedia} uri: ${apt:repo_uri_multimedia} components: ${apt:default_components} sources: ${apt:include_sources} reclass-1.7.0/examples/classes/debian/suite/include_volatile.yml000066400000000000000000000010411373565003400250500ustar00rootroot00000000000000classes: - debian parameters: apt: repo_uri_volatile: ${repo_uri}-volatile include_volatile: True include_volatile_sloppy: False apt_repos: - id: debian-volatile enabled: ${apt:include_volatile} uri: ${apt:repo_uri_volatile} components: ${apt:default_components} sources: ${apt:include_sources} - id: debian-volatile-sloppy enabled: ${apt:include_volatile_sloppy} uri: ${apt:repo_uri_volatile}-sloppy components: ${apt:default_components} sources: ${apt:include_sources} reclass-1.7.0/examples/classes/debian/suite/oldstable.yml000066400000000000000000000002241373565003400235010ustar00rootroot00000000000000classes: - debian parameters: debian_suite: oldstable apt: include_security: yes include_updates: no include_proposed_updates: no reclass-1.7.0/examples/classes/debian/suite/stable.yml000066400000000000000000000002221373565003400230000ustar00rootroot00000000000000classes: - debian parameters: debian_suite: stable apt: include_security: yes include_updates: yes include_proposed_updates: no reclass-1.7.0/examples/classes/debian/suite/testing.yml000066400000000000000000000002221373565003400232030ustar00rootroot00000000000000classes: - debian parameters: debian_suite: testing apt: include_security: yes include_updates: no include_proposed_updates: no reclass-1.7.0/examples/classes/debian/suite/unstable.yml000066400000000000000000000002221373565003400233430ustar00rootroot00000000000000classes: - debian parameters: debian_suite: unstable apt: include_security: no include_updates: no include_proposed_updates: no reclass-1.7.0/examples/classes/example.org.yml000066400000000000000000000010631373565003400214000ustar00rootroot00000000000000classes: - sudo # all nodes in the example.org domain provide sudo applications: - motd # all nodes in the example.org domain are expected to provide /etc/motd parameters: motd: legalese: This system is for authorized users only. All traffic on this device is monitored and will be used as evidence if necessary. Use your brain. support: "Please write a message to <${local_admin:email}> in case of problems." location: "Rack ${location:rack}, ${location:address}" tagline: "My hostname's RGB colour code is ${rgb_colour_code}." reclass-1.7.0/examples/classes/hosted@munich.yml000066400000000000000000000002171373565003400217510ustar00rootroot00000000000000parameters: location: address: Briennerstrasse 32, 80333 Munich rack: 2.64 local_admin: email: local-admins@munich.example.org reclass-1.7.0/examples/classes/hosted@zurich.yml000066400000000000000000000002161373565003400217710ustar00rootroot00000000000000parameters: location: address: Letzigraben 4, 8004 Zurich rack: C/IV 6.43 local_admin: email: local-admins@zurich.example.org reclass-1.7.0/examples/classes/mail/000077500000000000000000000000001373565003400173565ustar00rootroot00000000000000reclass-1.7.0/examples/classes/mail/init.yml000066400000000000000000000000321373565003400210370ustar00rootroot00000000000000applications: - postfix reclass-1.7.0/examples/classes/mail/relay.yml000066400000000000000000000001041373565003400212100ustar00rootroot00000000000000classes: - mail parameters: mail: role: relay port: 587 reclass-1.7.0/examples/classes/mail/satellite.yml000066400000000000000000000003101373565003400220610ustar00rootroot00000000000000classes: - mail parameters: mail: role: satellite smtp_relay: smtp.example.org:587 smtp_relay_fingerprint: 45:88:ff:11:b0:be:39:c8:30:2a:84:bd:fc:6c:52:ff:76:d4:c5:41 tls: enforce reclass-1.7.0/examples/classes/mail/server.yml000066400000000000000000000000671373565003400214120ustar00rootroot00000000000000classes: - mail parameters: mail: role: server reclass-1.7.0/examples/classes/salt.minion.yml000066400000000000000000000002611373565003400214110ustar00rootroot00000000000000applications: - salt_minion parameters: salt_minion: master: salt-master.example.org master_fingerprint: ed:38:43:88:4b:2d:22:04:76:60:95:18:2e:cd:cf:bf:cc:63:20:c9 reclass-1.7.0/examples/classes/sudo.yml000066400000000000000000000005001373565003400201240ustar00rootroot00000000000000applications: - sudo parameters: sudo: opt_lecture: false opt_ignore_dot: true opt_listpw: true opt_insults: true opt_requiretty: true opt_tty_tickets: true opt_passwd_tries: 1 opt_secure_path: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" no_passwd_group: wheel reclass-1.7.0/examples/classes/webserver.yml000066400000000000000000000000311373565003400211550ustar00rootroot00000000000000applications: - apache reclass-1.7.0/examples/nodes/000077500000000000000000000000001373565003400161075ustar00rootroot00000000000000reclass-1.7.0/examples/nodes/munich/000077500000000000000000000000001373565003400173725ustar00rootroot00000000000000reclass-1.7.0/examples/nodes/munich/black.example.org.yml000066400000000000000000000002571373565003400234150ustar00rootroot00000000000000classes: - example.org - debian.release.jessie - hosted@munich - salt.minion - webserver - mail.satellite environment: dev parameters: rgb_colour_code: "000000" reclass-1.7.0/examples/nodes/munich/yellow.example.org.yml000066400000000000000000000002351373565003400236500ustar00rootroot00000000000000classes: - example.org - debian.release.wheezy - hosted@munich - salt.minion - mail.relay environment: dev parameters: rgb_colour_code: "00ffff" reclass-1.7.0/examples/nodes/zurich/000077500000000000000000000000001373565003400174135ustar00rootroot00000000000000reclass-1.7.0/examples/nodes/zurich/blue.example.org.yml000066400000000000000000000003231373565003400233030ustar00rootroot00000000000000classes: - example.org - debian.release.wheezy - debian.suite.include_backports - hosted@zurich - salt.minion - webserver - mail.satellite environment: prod parameters: rgb_colour_code: "0000ff" reclass-1.7.0/examples/nodes/zurich/white.example.org.yml000066400000000000000000000002371373565003400235000ustar00rootroot00000000000000classes: - example.org - debian.release.jessie - hosted@zurich - salt.minion - mail.server environment: prod parameters: rgb_colour_code: "ffffff" reclass-1.7.0/examples/salt/000077500000000000000000000000001373565003400157425ustar00rootroot00000000000000reclass-1.7.0/examples/salt/reclass000077500000000000000000000001351373565003400173230ustar00rootroot00000000000000#!/bin/sh cd ../../ PYTHONPATH="`pwd`:$PYTHONPATH" exec python reclass/adapters/salt.py "$@" reclass-1.7.0/examples/salt/reclass-config.yml000066400000000000000000000000271373565003400213630ustar00rootroot00000000000000inventory_base_uri: .. reclass-1.7.0/reclass.py000077500000000000000000000006521373565003400151750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import reclass.cli reclass.cli.main() reclass-1.7.0/reclass/000077500000000000000000000000001373565003400146155ustar00rootroot00000000000000reclass-1.7.0/reclass/__init__.py000066400000000000000000000020501373565003400167230ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .output import OutputLoader from .storage.loader import StorageBackendLoader from .storage.memcache_proxy import MemcacheProxy def get_storage(storage_type, nodes_uri, classes_uri, compose_node_name=False, **kwargs): storage_class = StorageBackendLoader(storage_type).load() return MemcacheProxy(storage_class(nodes_uri, classes_uri, compose_node_name, **kwargs)) def get_path_mangler(storage_type, **kwargs): return StorageBackendLoader(storage_type).path_mangler() def output(data, fmt, pretty_print=False, no_refs=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print, no_refs=no_refs) reclass-1.7.0/reclass/adapters/000077500000000000000000000000001373565003400164205ustar00rootroot00000000000000reclass-1.7.0/reclass/adapters/__init__.py000077500000000000000000000005561373565003400205420ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals reclass-1.7.0/reclass/adapters/ansible.py000077500000000000000000000103741373565003400204170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # IMPORTANT NOTICE: I was kicked out of the Ansible community, and therefore # I have no interest in developing this adapter anymore. If you use it and # have changes, I will take your patch. # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # # 2017.08.08 Andew Pickford # The ansible adapter has received little testing and may not work at all now. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os, sys, posix, optparse from six import iteritems from reclass import get_storage, output from reclass.core import Core from reclass.errors import ReclassException from reclass.config import find_and_read_configfile, get_options from reclass.version import * from reclass.constants import MODE_NODEINFO from reclass.settings import Settings def cli(): try: # this adapter has to be symlinked to ansible_dir, so we can use this # information to initialise the inventory_base_uri to ansible_dir: ansible_dir = os.path.abspath(os.path.dirname(sys.argv[0])) defaults = {'inventory_base_uri': ansible_dir, 'no_refs' : False, 'pretty_print' : True, 'output' : 'json', 'applications_postfix': '_hosts' } defaults.update(find_and_read_configfile()) def add_ansible_options_group(parser, defaults): group = optparse.OptionGroup(parser, 'Ansible options', 'Ansible-specific options') group.add_option('--applications-postfix', dest='applications_postfix', default=defaults.get('applications_postfix'), help='postfix to append to applications to '\ 'turn them into groups') parser.add_option_group(group) options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, inventory_shortopt='-l', inventory_longopt='--list', inventory_help='output the inventory', nodeinfo_shortopt='-t', nodeinfo_longopt='--host', nodeinfo_dest='hostname', nodeinfo_help='output host_vars for the given host', add_options_cb=add_ansible_options_group, defaults=defaults) storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri, options.compose_node_name) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.hostname) # Massage and shift the data like Ansible wants it data['parameters']['__reclass__'] = data['__reclass__'] for i in ('classes', 'applications'): data['parameters']['__reclass__'][i] = data[i] data = data['parameters'] else: data = reclass.inventory() # Ansible inventory is only the list of groups. Groups are the set # of classes plus the set of applications with the postfix added: groups = data['classes'] apps = data['applications'] if options.applications_postfix: postfix = options.applications_postfix groups.update([(k + postfix, v) for (k, v) in iteritems(apps)]) else: groups.update(apps) data = groups print(output(data, options.output, options.pretty_print, options.no_refs)) except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) if __name__ == '__main__': cli() reclass-1.7.0/reclass/adapters/salt.py000077500000000000000000000126151373565003400177450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os, sys, posix from six import iteritems from reclass import get_storage, output, get_path_mangler from reclass.core import Core from reclass.errors import ReclassException from reclass.config import find_and_read_configfile, get_options from reclass.constants import MODE_NODEINFO from reclass.defaults import * from reclass.settings import Settings from reclass.version import * def ext_pillar(minion_id, pillar, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, classes_uri=OPT_CLASSES_URI, class_mappings=None, propagate_pillar_data_to_reclass=False, compose_node_name=OPT_COMPOSE_NODE_NAME, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) input_data = None if propagate_pillar_data_to_reclass: input_data = pillar settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=input_data) data = reclass.nodeinfo(minion_id) params = data.get('parameters', {}) params['__reclass__'] = {} params['__reclass__']['nodename'] = minion_id params['__reclass__']['applications'] = data['applications'] params['__reclass__']['classes'] = data['classes'] params['__reclass__']['environment'] = data['environment'] return params def top(minion_id, storage_type=OPT_STORAGE_TYPE, inventory_base_uri=OPT_INVENTORY_BASE_URI, nodes_uri=OPT_NODES_URI, classes_uri=OPT_CLASSES_URI, class_mappings=None, compose_node_name=OPT_COMPOSE_NODE_NAME, **kwargs): path_mangler = get_path_mangler(storage_type) nodes_uri, classes_uri = path_mangler(inventory_base_uri, nodes_uri, classes_uri) storage = get_storage(storage_type, nodes_uri, classes_uri, compose_node_name) settings = Settings(kwargs) reclass = Core(storage, class_mappings, settings, input_data=None) # if the minion_id is not None, then return just the applications for the # specific minion, otherwise return the entire top data (which we need for # CLI invocations of the adapter): if minion_id is not None: data = reclass.nodeinfo(minion_id) applications = data.get('applications', []) env = data['environment'] return {env: applications} else: data = reclass.inventory() nodes = {} for (node_id, node_data) in iteritems(data['nodes']): env = node_data['environment'] if env not in nodes: nodes[env] = {} nodes[env][node_id] = node_data['applications'] return nodes def cli(): try: inventory_dir = os.path.abspath(os.path.dirname(sys.argv[0])) defaults = {'pretty_print' : True, 'no_refs' : False, 'output' : 'yaml', 'inventory_base_uri': inventory_dir } defaults.update(find_and_read_configfile()) options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, inventory_shortopt='-t', inventory_longopt='--top', inventory_help='output the state tops (inventory)', nodeinfo_shortopt='-p', nodeinfo_longopt='--pillar', nodeinfo_dest='nodename', nodeinfo_help='output pillar data for a specific node', defaults=defaults) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) defaults.pop("storage_type", None) defaults.pop("inventory_base_uri", None) defaults.pop("nodes_uri", None) defaults.pop("classes_uri", None) defaults.pop("class_mappings", None) if options.mode == MODE_NODEINFO: data = ext_pillar(options.nodename, {}, storage_type=options.storage_type, inventory_base_uri=options.inventory_base_uri, nodes_uri=options.nodes_uri, classes_uri=options.classes_uri, class_mappings=class_mappings, **defaults) else: data = top(minion_id=None, storage_type=options.storage_type, inventory_base_uri=options.inventory_base_uri, nodes_uri=options.nodes_uri, classes_uri=options.classes_uri, class_mappings=class_mappings, **defaults) print(output(data, options.output, options.pretty_print, options.no_refs)) except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) if __name__ == '__main__': cli() reclass-1.7.0/reclass/cli.py000066400000000000000000000034231373565003400157400ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import sys, os, posix from reclass import get_storage, output from reclass.core import Core from reclass.settings import Settings from reclass.config import find_and_read_configfile, get_options from reclass.defaults import * from reclass.errors import ReclassException from reclass.constants import MODE_NODEINFO from reclass.version import * def main(): try: defaults = {'no_refs' : OPT_NO_REFS, 'pretty_print' : OPT_PRETTY_PRINT, 'output' : OPT_OUTPUT } defaults.update(find_and_read_configfile()) options = get_options(RECLASS_NAME, VERSION, DESCRIPTION, defaults=defaults) storage = get_storage(options.storage_type, options.nodes_uri, options.classes_uri, options.compose_node_name) class_mappings = defaults.get('class_mappings') defaults.update(vars(options)) settings = Settings(defaults) reclass = Core(storage, class_mappings, settings) if options.mode == MODE_NODEINFO: data = reclass.nodeinfo(options.nodename) else: data = reclass.inventory() print(output(data, options.output, options.pretty_print, options.no_refs)) except ReclassException as e: e.exit_with_message(sys.stderr) sys.exit(posix.EX_OK) if __name__ == '__main__': main() reclass-1.7.0/reclass/config.py000066400000000000000000000225001373565003400164330ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import yaml, os, optparse, posix, sys from . import errors, get_path_mangler from .defaults import * from .constants import MODE_NODEINFO, MODE_INVENTORY def make_db_options_group(parser, defaults={}): ret = optparse.OptionGroup(parser, 'Database options', 'Configure from where {0} collects data'.format(parser.prog)) ret.add_option('-s', '--storage-type', dest='storage_type', default=defaults.get('storage_type', OPT_STORAGE_TYPE), help='the type of storage backend to use [%default]') ret.add_option('-b', '--inventory-base-uri', dest='inventory_base_uri', default=defaults.get('inventory_base_uri', OPT_INVENTORY_BASE_URI), help='the base URI to prepend to nodes and classes [%default]'), ret.add_option('-u', '--nodes-uri', dest='nodes_uri', default=defaults.get('nodes_uri', OPT_NODES_URI), help='the URI to the nodes storage [%default]'), ret.add_option('-c', '--classes-uri', dest='classes_uri', default=defaults.get('classes_uri', OPT_CLASSES_URI), help='the URI to the classes storage [%default]'), ret.add_option('-z', '--ignore-class-notfound', dest='ignore_class_notfound', default=defaults.get('ignore_class_notfound', OPT_IGNORE_CLASS_NOTFOUND), help='decision for not found classes [%default]') ret.add_option('-a', '--compose-node-name', dest='compose_node_name', action="store_true", default=defaults.get('compose_node_name', OPT_COMPOSE_NODE_NAME), help='Add subdir when generating node names. [%default]') ret.add_option('-x', '--ignore-class-notfound-regexp', dest='ignore_class_notfound_regexp', default=defaults.get('ignore_class_notfound_regexp', OPT_IGNORE_CLASS_NOTFOUND_REGEXP), help='regexp for not found classes [%default]') return ret def make_output_options_group(parser, defaults={}): ret = optparse.OptionGroup(parser, 'Output options', 'Configure the way {0} prints data'.format(parser.prog)) ret.add_option('-o', '--output', dest='output', default=defaults.get('output', OPT_OUTPUT), help='output format (yaml or json) [%default]') ret.add_option('-y', '--pretty-print', dest='pretty_print', action="store_true", default=defaults.get('pretty_print', OPT_PRETTY_PRINT), help='try to make the output prettier [%default]') ret.add_option('-r', '--no-refs', dest='no_refs', action="store_true", default=defaults.get('no_refs', OPT_NO_REFS), help='output all key values do not use yaml references [%default]') ret.add_option('-1', '--single-error', dest='group_errors', action="store_false", default=defaults.get('group_errors', OPT_GROUP_ERRORS), help='throw errors immediately instead of grouping them together') ret.add_option('-0', '--multiple-errors', dest='group_errors', action="store_true", help='were possible report any errors encountered as a group') return ret def make_modes_options_group(parser, inventory_shortopt, inventory_longopt, inventory_help, nodeinfo_shortopt, nodeinfo_longopt, nodeinfo_dest, nodeinfo_help): def _mode_checker_cb(option, opt_str, value, parser): if hasattr(parser.values, 'mode'): raise optparse.OptionValueError('Cannot specify multiple modes') if option == parser.get_option(nodeinfo_longopt): setattr(parser.values, 'mode', MODE_NODEINFO) setattr(parser.values, nodeinfo_dest, value) else: setattr(parser.values, 'mode', MODE_INVENTORY) setattr(parser.values, nodeinfo_dest, None) ret = optparse.OptionGroup(parser, 'Modes', 'Specify one of these to determine what to do.') ret.add_option(inventory_shortopt, inventory_longopt, action='callback', callback=_mode_checker_cb, help=inventory_help) ret.add_option(nodeinfo_shortopt, nodeinfo_longopt, default=None, dest=nodeinfo_dest, type='string', action='callback', callback=_mode_checker_cb, help=nodeinfo_help) return ret def make_parser_and_checker(name, version, description, inventory_shortopt='-i', inventory_longopt='--inventory', inventory_help='output the entire inventory', nodeinfo_shortopt='-n', nodeinfo_longopt='--nodeinfo', nodeinfo_dest='nodename', nodeinfo_help='output information for a specific node', add_options_cb=None, defaults={}): parser = optparse.OptionParser(version=version) parser.prog = name parser.version = version parser.description = description.capitalize() parser.usage = '%prog [options] ( {0} | {1} {2} )'.format(inventory_longopt, nodeinfo_longopt, nodeinfo_dest.upper()) parser.epilog = 'Exactly one mode has to be specified.' db_group = make_db_options_group(parser, defaults) parser.add_option_group(db_group) output_group = make_output_options_group(parser, defaults) parser.add_option_group(output_group) if callable(add_options_cb): add_options_cb(parser, defaults) modes_group = make_modes_options_group(parser, inventory_shortopt, inventory_longopt, inventory_help, nodeinfo_shortopt, nodeinfo_longopt, nodeinfo_dest, nodeinfo_help) parser.add_option_group(modes_group) def option_checker(options, args): if len(args) > 0: parser.error('No arguments allowed') elif not hasattr(options, 'mode') \ or options.mode not in (MODE_NODEINFO, MODE_INVENTORY): parser.error('You need to specify exactly one mode '\ '({0} or {1})'.format(inventory_longopt, nodeinfo_longopt)) elif options.mode == MODE_NODEINFO \ and not getattr(options, nodeinfo_dest, None): parser.error('Mode {0} needs {1}'.format(nodeinfo_longopt, nodeinfo_dest.upper())) elif options.inventory_base_uri is None and options.nodes_uri is None: parser.error('Must specify --inventory-base-uri or --nodes-uri') elif options.inventory_base_uri is None and options.classes_uri is None: parser.error('Must specify --inventory-base-uri or --classes-uri') return parser, option_checker def get_options(name, version, description, inventory_shortopt='-i', inventory_longopt='--inventory', inventory_help='output the entire inventory', nodeinfo_shortopt='-n', nodeinfo_longopt='--nodeinfo', nodeinfo_dest='nodename', nodeinfo_help='output information for a specific node', add_options_cb=None, defaults={}): parser, checker = make_parser_and_checker(name, version, description, inventory_shortopt, inventory_longopt, inventory_help, nodeinfo_shortopt, nodeinfo_longopt, nodeinfo_dest, nodeinfo_help, add_options_cb, defaults=defaults) options, args = parser.parse_args() checker(options, args) path_mangler = get_path_mangler(options.storage_type) options.nodes_uri, options.classes_uri = path_mangler(options.inventory_base_uri, options.nodes_uri, options.classes_uri) return options def vvv(msg): #print(msg, file=sys.stderr) pass def find_and_read_configfile(filename=CONFIG_FILE_NAME, dirs=CONFIG_FILE_SEARCH_PATH): for d in dirs: f = os.path.join(d, filename) if os.access(f, os.R_OK): vvv('Using config file: {0}'.format(str(f))) return yaml.safe_load(open(f)) elif os.path.isfile(f): raise PermissionsError('cannot read %s' % f) return {} reclass-1.7.0/reclass/constants.py000066400000000000000000000011221373565003400171770ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals class _Constant(object): def __init__(self, displayname): self._repr = displayname __str__ = __repr__ = lambda self: self._repr MODE_NODEINFO = _Constant('NODEINFO') MODE_INVENTORY = _Constant('INVENTORY') reclass-1.7.0/reclass/core.py000066400000000000000000000305061373565003400161230ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy import time import re import fnmatch import shlex import string import sys import yaml from six import iteritems from reclass.settings import Settings from reclass.datatypes import Entity, Classes, Parameters, Exports from reclass.errors import MappingFormatError, ClassNameResolveError, ClassNotFound, InvQueryClassNameResolveError, InvQueryClassNotFound, InvQueryError, InterpolationError, ResolveError from reclass.values import NodeInventory from reclass.values.parser import Parser class Core(object): _parser = Parser() def __init__(self, storage, class_mappings, settings, input_data=None): self._storage = storage self._class_mappings = class_mappings self._settings = settings self._input_data = input_data if self._settings.ignore_class_notfound: self._cnf_r = re.compile( '|'.join(self._settings.ignore_class_notfound_regexp)) @staticmethod def _get_timestamp(): return time.strftime('%c') @staticmethod def _match_regexp(key, nodename): return re.search(key, nodename) @staticmethod def _match_glob(key, nodename): return fnmatch.fnmatchcase(nodename, key) @staticmethod def _shlex_split(instr): lexer = shlex.shlex(instr, posix=True) lexer.whitespace_split = True lexer.commenters = '' regexp = False if instr[0] == '/': lexer.quotes += '/' lexer.escapedquotes += '/' regexp = True try: key = lexer.get_token() except ValueError as e: raise MappingFormatError('Error in mapping "{0}": missing closing ' 'quote (or slash)'.format(instr)) if regexp: key = '/{0}/'.format(key) return key, list(lexer) def _get_class_mappings_entity(self, entity): if not self._class_mappings: return Entity(self._settings, name='empty (class mappings)') c = Classes() if self._settings.class_mappings_match_path: matchname = entity.pathname else: matchname = entity.name for mapping in self._class_mappings: matched = False key, klasses = Core._shlex_split(mapping) if key[0] == ('/'): matched = Core._match_regexp(key[1:-1], matchname) if matched: for klass in klasses: c.append_if_new(matched.expand(klass)) else: if Core._match_glob(key, matchname): for klass in klasses: c.append_if_new(klass) return Entity(self._settings, classes=c, name='class mappings for node {0}'.format(entity.name)) def _get_input_data_entity(self): if not self._input_data: return Entity(self._settings, name='empty (input data)') p = Parameters(self._input_data, self._settings) return Entity(self._settings, parameters=p, name='input data') def _recurse_entity(self, entity, merge_base=None, context=None, seen=None, nodename=None, environment=None): if seen is None: seen = {} if environment is None: environment = self._settings.default_environment if merge_base is None: merge_base = Entity(self._settings, name='empty (@{0})'.format(nodename)) if context is None: context = Entity(self._settings, name='empty (@{0})'.format(nodename)) for klass in entity.classes.as_list(): # class name contain reference num_references = klass.count(self._settings.reference_sentinels[0]) +\ klass.count(self._settings.export_sentinels[0]) if num_references > 0: try: klass = str(self._parser.parse(klass, self._settings).render(merge_base.parameters.as_dict(), {})) except ResolveError as e: try: klass = str(self._parser.parse(klass, self._settings).render(context.parameters.as_dict(), {})) except ResolveError as e: raise ClassNameResolveError(klass, nodename, entity.uri) if klass not in seen: try: class_entity = self._storage.get_class(klass, environment, self._settings) except ClassNotFound as e: if self._settings.ignore_class_notfound: if self._cnf_r.match(klass): if self._settings.ignore_class_notfound_warning: # TODO, add logging handler print("[WARNING] Reclass class not found: '%s'. Skipped!" % klass, file=sys.stderr) continue e.nodename = nodename e.uri = entity.uri raise descent = self._recurse_entity(class_entity, context=context, seen=seen, nodename=nodename, environment=environment) # on every iteration, we merge the result of the recursive # descent into what we have so far… merge_base.merge(descent) seen[klass] = True # … and finally, we merge what we have at this level into the # result of the iteration, so that elements at the current level # overwrite stuff defined by parents merge_base.merge(entity) return merge_base def _get_automatic_parameters(self, nodename, environment): if self._settings.automatic_parameters: pars = { '_reclass_': { 'name': { 'full': nodename, 'short': nodename.split('.')[0] }, 'environment': environment } } return Parameters(pars, self._settings, '__auto__') else: return Parameters({}, self._settings, '') def _get_scalar_parameters(self, node_parameters): if self._settings.scalar_parameters: scalars = node_parameters.as_dict().get( self._settings.scalar_parameters, {}) return Parameters( {self._settings.scalar_parameters: scalars}, self._settings, '__scalar__') else: return Parameters({}, self._settings, '') def _get_inventory(self, all_envs, environment, queries): ''' Returns a dictionary of NodeInventory objects, one per matching node. Exports which are required for the given queries (or all exports if the queries is None) are rendered, remaining exports are left unrendered. Args: all_envs - if True match export values from nodes in any environment else if False match only for nodes in the same environment as the environment parameter environment - node environment to match against if all_envs is False queries - list of inventory queries to determine required export values or if None match all exports defined by a node ''' inventory = {} for nodename in self._storage.enumerate_nodes(): try: node_base = self._storage.get_node(nodename, self._settings) if node_base.environment is None: node_base.environment = self._settings.default_environment except yaml.scanner.ScannerError as e: if self._settings.inventory_ignore_failed_node: continue raise if all_envs or node_base.environment == environment: try: node = self._node_entity(nodename) except ClassNotFound as e: raise InvQueryClassNotFound(e) except ClassNameResolveError as e: raise InvQueryClassNameResolveError(e) if queries is None: # This only happens if reclass is called with the --inventory option try: node.interpolate_exports() except InterpolationError as e: e.nodename = nodename else: node.initialise_interpolation() for p, q in queries: try: node.interpolate_single_export(q) except InterpolationError as e: e.nodename = nodename raise InvQueryError(q.contents, e, context=p, uri=q.uri) inventory[nodename] = NodeInventory(node.exports.as_dict(), node_base.environment == environment) return inventory def _node_entity(self, nodename): node_entity = self._storage.get_node(nodename, self._settings) if node_entity.environment == None: node_entity.environment = self._settings.default_environment base_entity = Entity(self._settings, name='base') base_entity.merge(self._get_class_mappings_entity(node_entity)) base_entity.merge(self._get_input_data_entity()) base_entity.merge_parameters(self._get_automatic_parameters(nodename, node_entity.environment)) base_entity.merge_parameters(self._get_scalar_parameters(node_entity.parameters)) seen = {} merge_base = self._recurse_entity(base_entity, seen=seen, nodename=nodename, environment=node_entity.environment) return self._recurse_entity(node_entity, merge_base=merge_base, context=merge_base, seen=seen, nodename=nodename, environment=node_entity.environment) def _nodeinfo(self, nodename, inventory): try: node = self._node_entity(nodename) node.initialise_interpolation() if node.parameters.has_inv_query and inventory is None: inventory = self._get_inventory(node.parameters.needs_all_envs, node.environment, node.parameters.get_inv_queries()) node.interpolate(inventory) return node except InterpolationError as e: e.nodename = nodename raise def _nodeinfo_as_dict(self, nodename, entity): ret = {'__reclass__' : {'node': entity.name, 'name': nodename, 'uri': entity.uri, 'environment': entity.environment, 'timestamp': Core._get_timestamp() }, } ret.update(entity.as_dict()) return ret def nodeinfo(self, nodename): return self._nodeinfo_as_dict(nodename, self._nodeinfo(nodename, None)) def inventory(self): query_nodes = set() entities = {} inventory = self._get_inventory(True, '', None) for n in self._storage.enumerate_nodes(): entities[n] = self._nodeinfo(n, inventory) if entities[n].parameters.has_inv_query: nodes.add(n) for n in query_nodes: entities[n] = self._nodeinfo(n, inventory) nodes = {} applications = {} classes = {} for (f, nodeinfo) in iteritems(entities): d = nodes[f] = self._nodeinfo_as_dict(f, nodeinfo) for a in d['applications']: if a in applications: applications[a].append(f) else: applications[a] = [f] for c in d['classes']: if c in classes: classes[c].append(f) else: classes[c] = [f] return {'__reclass__' : {'timestamp': Core._get_timestamp()}, 'nodes': nodes, 'classes': classes, 'applications': applications } reclass-1.7.0/reclass/datatypes/000077500000000000000000000000001373565003400166135ustar00rootroot00000000000000reclass-1.7.0/reclass/datatypes/__init__.py000066400000000000000000000010171373565003400207230ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .applications import Applications from .classes import Classes from .entity import Entity from .exports import Exports from .parameters import Parameters reclass-1.7.0/reclass/datatypes/applications.py000066400000000000000000000045471373565003400216650ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .classes import Classes class Applications(Classes): ''' Extends Classes with the possibility to let specially formatted items remove earlier occurences of the item. For instance, if the "negater" is '~', then "adding" an element "~foo" to a list causes a previous element "foo" to be removed. If no such element exists, nothing happens, but a reference of the negation is kept, in case the instance is later used to extend another instance, in which case the negations should apply to the instance to be extended. ''' DEFAULT_NEGATION_PREFIX = '~' def __init__(self, iterable=None, negation_prefix=DEFAULT_NEGATION_PREFIX): self.negation_prefix = negation_prefix self._offset = len(negation_prefix) self._negations = [] super(Applications, self).__init__(iterable) def append_if_new(self, item): self._assert_is_string(item) if item.startswith(self.negation_prefix): item = item[self._offset:] self._negations.append(item) try: self._items.remove(item) except ValueError: pass else: super(Applications, self)._append_if_new(item) def merge_unique(self, iterable): if isinstance(iterable, self.__class__): # we might be extending ourselves to include negated applications, # in which case we need to remove our own content accordingly: for negation in iterable._negations: try: self._items.remove(negation) except ValueError: pass iterable = iterable.as_list() for i in iterable: self.append_if_new(i) def __repr__(self): contents = self._items + \ ['%s%s' % (self.negation_prefix, i) for i in self._negations] return "%s(%r, %r)" % (self.__class__.__name__, contents, str(self.negation_prefix)) reclass-1.7.0/reclass/datatypes/classes.py000066400000000000000000000044551373565003400206320ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import six import os from reclass.errors import InvalidClassnameError INVALID_CHARACTERS_FOR_CLASSNAMES = ' ' + os.sep class Classes(object): ''' A very limited ordered set of strings with O(n) uniqueness constraints. It is neither a proper list or a proper set, on purpose, to keep things simple. ''' def __init__(self, iterable=None): self._items = [] if iterable is not None: self.merge_unique(iterable) def __len__(self): return len(self._items) def __eq__(self, rhs): if isinstance(rhs, list): return self._items == rhs else: try: return self._items == rhs._items except AttributeError as e: return False def __ne__(self, rhs): return not self.__eq__(rhs) def as_list(self): return self._items[:] def merge_unique(self, iterable): if isinstance(iterable, self.__class__): iterable = iterable.as_list() # Cannot just call list.extend here, as iterable's items might not # be unique by themselves, or in the context of self. for i in iterable: self.append_if_new(i) def _assert_is_string(self, item): if not isinstance(item, six.string_types): raise TypeError('%s instances can only contain strings, ' 'not %s' % (self.__class__.__name__, type(item))) def _assert_valid_characters(self, item): for c in INVALID_CHARACTERS_FOR_CLASSNAMES: if c in item: raise InvalidClassnameError(c, item) def _append_if_new(self, item): if item not in self._items: self._items.append(item) def append_if_new(self, item): self._assert_is_string(item) self._assert_valid_characters(item) self._append_if_new(item) def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self._items) reclass-1.7.0/reclass/datatypes/entity.py000066400000000000000000000106361373565003400205070ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .classes import Classes from .applications import Applications from .exports import Exports from .parameters import Parameters class Entity(object): ''' A collection of Classes, Parameters, and Applications, mainly as a wrapper for merging. The name and uri of an Entity will be updated to the name and uri of the Entity that is being merged. ''' def __init__(self, settings, classes=None, applications=None, parameters=None, exports=None, uri=None, name=None, pathname=None, environment=None): self._uri = '' if uri is None else uri self._name = '' if name is None else name self._pathname = '' if pathname is None else pathname self._classes = self._set_field(classes, Classes) self._applications = self._set_field(applications, Applications) pars = [None, settings, uri] self._parameters = self._set_field(parameters, Parameters, pars) self._exports = self._set_field(exports, Exports, pars) self._environment = environment name = property(lambda s: s._name) uri = property(lambda s: s._uri) pathname = property(lambda s: s._pathname) classes = property(lambda s: s._classes) applications = property(lambda s: s._applications) parameters = property(lambda s: s._parameters) exports = property(lambda s: s._exports) @property def environment(self): return self._environment @environment.setter def environment(self, value): self._environment = value def _set_field(self, received_value, expected_type, parameters=None): if parameters is None: parameters = [] if received_value is None: return expected_type(*parameters) if not isinstance(received_value, expected_type): raise TypeError('Entity.%s cannot be set to instance of type %s' % (type(expected_type), type(received_value))) return received_value def merge(self, other): self._classes.merge_unique(other.classes) self._applications.merge_unique(other.applications) self._parameters.merge(other.parameters) self._exports.merge(other.exports) self._name = other.name self._uri = other.uri self._parameters._uri = other.uri if other.environment != None: self._environment = other.environment def merge_parameters(self, params): self._parameters.merge(params) def interpolate(self, inventory): self._parameters.interpolate(inventory) self.interpolate_exports() def initialise_interpolation(self): self._parameters.initialise_interpolation() self._exports.initialise_interpolation() def interpolate_exports(self): self.initialise_interpolation() self._exports.interpolate_from_external(self._parameters) def interpolate_single_export(self, references): self._exports.interpolate_single_from_external(self._parameters, references) def __eq__(self, other): return isinstance(other, type(self)) \ and self._applications == other.applications \ and self._classes == other.classes \ and self._parameters == other.parameters \ and self._exports == other.exports \ and self._name == other.name \ and self._uri == other.uri def __ne__(self, other): return not self.__eq__(other) def __repr__(self): return "%s(%r, %r, %r, %r, uri=%r, name=%r, pathname=%r, environment=%r)" % ( self.__class__.__name__, self.classes, self.applications, self.parameters, self.exports, self.uri, self.name, self.pathname, self.environment) def as_dict(self): return {'classes': self._classes.as_list(), 'applications': self._applications.as_list(), 'parameters': self._parameters.as_dict(), 'exports': self._exports.as_dict(), 'environment': self._environment } reclass-1.7.0/reclass/datatypes/exports.py000066400000000000000000000070741373565003400207010ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy from six import iteritems, next from .parameters import Parameters from reclass.errors import ResolveError from reclass.values.value import Value from reclass.values.valuelist import ValueList from reclass.utils.dictpath import DictPath class Exports(Parameters): def __init__(self, mapping, settings, uri): super(Exports, self).__init__(mapping, settings, uri) def delete_key(self, key): self._base.pop(key, None) self._unrendered.pop(key, None) def overwrite(self, other): overdict = {'~' + key: value for (key, value) in iteritems(other)} self.merge(overdict) def interpolate_from_external(self, external): while len(self._unrendered) > 0: path, v = next(iteritems(self._unrendered)) value = path.get_value(self._base) if isinstance(value, (Value, ValueList)): external._interpolate_references(path, value, None) new = self._interpolate_render_from_external(external._base, path, value) path.set_value(self._base, new) del self._unrendered[path] else: # references to lists and dicts are only deepcopied when merged # together so it's possible a value with references in a referenced # list or dict has already been rendered del self._unrendered[path] def interpolate_single_from_external(self, external, query): for r in query.get_inv_references(): self._interpolate_single_path_from_external(r, external, query) def _interpolate_single_path_from_external(self, mainpath, external, query): required = self._get_required_paths(mainpath) while len(required) > 0: while len(required) > 0: path, v = next(iteritems(required)) value = path.get_value(self._base) if isinstance(value, (Value, ValueList)): try: external._interpolate_references(path, value, None) new = self._interpolate_render_from_external(external._base, path, value) path.set_value(self._base, new) except ResolveError as e: if query.ignore_failed_render(): path.delete(self._base) else: raise del required[path] del self._unrendered[path] required = self._get_required_paths(mainpath) def _get_required_paths(self, mainpath): paths = {} path = DictPath(self._settings.delimiter) for i in mainpath.key_parts(): path.add_subpath(i) if path in self._unrendered: paths[path] = True for i in self._unrendered: if mainpath.is_ancestor_of(i) or mainpath == i: paths[i] = True return paths def _interpolate_render_from_external(self, context, path, value): try: new = value.render(context, None) except ResolveError as e: e.context = path raise if isinstance(new, dict): new = self._render_simple_dict(new, path) elif isinstance(new, list): new = self._render_simple_list(new, path) return new reclass-1.7.0/reclass/datatypes/parameters.py000066400000000000000000000335551373565003400213430ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy import sys import types from six import iteritems, next from collections import namedtuple from reclass.utils.dictpath import DictPath from reclass.utils.parameterdict import ParameterDict from reclass.utils.parameterlist import ParameterList from reclass.values.value import Value from reclass.values.valuelist import ValueList from reclass.errors import InfiniteRecursionError, ResolveError from reclass.errors import ResolveErrorList, InterpolationError, ParseError from reclass.errors import BadReferencesError class Parameters(object): ''' A class to hold nested dictionaries with the following specialities: 1. "merging" a dictionary (the "new" dictionary) into the current Parameters causes a recursive walk of the new dict, during which - scalars (incl. tuples) are replaced with the value from the new dictionary; - lists are extended, not replaced; - dictionaries are updated (using dict.update), not replaced; 2. "interpolating" a dictionary means that values within the dictionary can reference other values in the same dictionary. Those references are collected during merging and then resolved during interpolation, which avoids having to walk the dictionary twice. If a referenced value contains references itself, those are resolved first, in topological order. Therefore, deep references work. Cyclical references cause an error. To support these specialities, this class only exposes very limited functionality and does not try to be a really mapping object. ''' def __init__(self, mapping, settings, uri, parse_strings=True): self._settings = settings self._uri = uri self._base = ParameterDict(uri=self._uri) self._unrendered = None self._inv_queries = [] self.resolve_errors = ResolveErrorList() self.needs_all_envs = False self._parse_strings = parse_strings if mapping is not None: # initialise by merging self.merge(mapping) def __len__(self): return len(self._base) def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self._base) def __eq__(self, other): return isinstance(other, type(self)) \ and self._base == other._base \ and self._settings == other._settings def __ne__(self, other): return not self.__eq__(other) @property def has_inv_query(self): return len(self._inv_queries) > 0 def get_inv_queries(self): return self._inv_queries def as_dict(self): return self._base.copy() def _wrap_value(self, value): if isinstance(value, (Value, ValueList)): return value elif isinstance(value, dict): return self._wrap_dict(value) elif isinstance(value, list): return self._wrap_list(value) else: try: return Value(value, self._settings, self._uri, parse_string=self._parse_strings) except InterpolationError as e: e.context = DictPath(self._settings.delimiter) raise def _get_wrapped(self, position, value): try: return self._wrap_value(value) except InterpolationError as e: e.context.add_ancestor(str(position)) raise def _wrap_list(self, source): l = ParameterList(uri=self._uri) for (k, v) in enumerate(source): l.append(self._get_wrapped(k, v)) return l def _wrap_dict(self, source): d = ParameterDict(uri=self._uri) for (k, v) in iteritems(source): d[k] = self._get_wrapped(k, v) return d def _update_value(self, cur, new): if isinstance(cur, Value): values = ValueList(cur, self._settings) elif isinstance(cur, ValueList): values = cur else: if isinstance(cur, (ParameterDict, ParameterList)): uri = cur.uri else: uri = self._uri values = ValueList(Value(cur, self._settings, uri), self._settings) if isinstance(new, Value): values.append(new) elif isinstance(new, ValueList): values.extend(new) else: if isinstance(new, (ParameterDict, ParameterList)): uri = new.uri else: uri = self._uri values.append(Value(new, self._settings, uri, parse_string=self._parse_strings)) return values def _merge_dict(self, cur, new): """Merge a dictionary with another dictionary. Iterate over keys in new. If this is not an initialization merge and the key begins with PARAMETER_DICT_KEY_OVERRIDE_PREFIX, override the value of the key in cur. Otherwise deeply merge the contents of the key in cur with the contents of the key in _merge_recurse over the item. Args: cur (dict): Current dictionary new (dict): Dictionary to be merged initmerge (bool): True if called as part of entity init Returns: dict: a merged dictionary """ for (key, value) in iteritems(new): # check key for "control" preffixes (~,=,...) key = str(key) if key[0] in self._settings.dict_key_prefixes: newkey = key[1:] if not isinstance(value, Value): value = Value(value, self._settings, self._uri, parse_string=self._parse_strings) if key[0] == self._settings.dict_key_override_prefix: value.overwrite = True elif key[0] == self._settings.dict_key_constant_prefix: value.constant = True value = self._merge_recurse(cur.get(newkey), value) key = newkey else: value = self._merge_recurse(cur.get(key), value) cur[key] = value cur.uri = new.uri return cur def _merge_recurse(self, cur, new): """Merge a parameter with another parameter. Iterate over keys in new. Call _merge_dict, _update_value depending on type. Args: cur: Current parameter new: Parameter to be merged Returns: merged parameter (Value or ValueList) """ if isinstance(new, dict): if cur is None: cur = ParameterDict(uri=self._uri) if isinstance(cur, dict): return self._merge_dict(cur, new) else: return self._update_value(cur, new) else: if cur is None: return new else: return self._update_value(cur, new) def merge(self, other): """Merge function (public edition). Call _merge_recurse on self with either another Parameter object or a dict (for initialization). Set initmerge if it's a dict. Args: other (dict or Parameter): Thing to merge with self._base Returns: None: Nothing """ self._unrendered = None if isinstance(other, dict): wrapped = self._wrap_dict(other) elif isinstance(other, self.__class__): wrapped = other._wrap_dict(other._base) else: raise TypeError('Cannot merge %s objects into %s' % (type(other), self.__class__.__name__)) self._base = self._merge_recurse(self._base, wrapped) def _render_simple_container(self, container, key, value, path): if isinstance(value, ValueList): if value.is_complex: p = path.new_subpath(key) self._unrendered[p] = True container[key] = value if value.has_inv_query: self._inv_queries.append((p, value)) if value.needs_all_envs: self.needs_all_envs = True return else: value = value.merge() if isinstance(value, Value) and value.is_container(): value = value.contents if isinstance(value, dict): container[key] = self._render_simple_dict(value, path.new_subpath(key)) elif isinstance(value, list): container[key] = self._render_simple_list(value, path.new_subpath(key)) elif isinstance(value, Value): if value.is_complex: p = path.new_subpath(key) self._unrendered[p] = True container[key] = value if value.has_inv_query: self._inv_queries.append((p, value)) if value.needs_all_envs: self.needs_all_envs = True else: container[key] = value.render(None, None) else: container[key] = value def _render_simple_dict(self, dictionary, path): new_dict = {} for (key, value) in iteritems(dictionary): self._render_simple_container(new_dict, key, value, path) return new_dict def _render_simple_list(self, item_list, path): new_list = [ None ] * len(item_list) for n, value in enumerate(item_list): self._render_simple_container(new_list, n, value, path) return new_list def interpolate(self, inventory=None): self._initialise_interpolate() while len(self._unrendered) > 0: # we could use a view here, but this is simple enough: # _interpolate_inner removes references from the refs hash after # processing them, so we cannot just iterate the dict path, v = next(iteritems(self._unrendered)) self._interpolate_inner(path, inventory) if self.resolve_errors.have_errors(): raise self.resolve_errors def initialise_interpolation(self): self._unrendered = None self._initialise_interpolate() def _initialise_interpolate(self): if self._unrendered is None: self._unrendered = {} self._inv_queries = [] self.needs_all_envs = False self.resolve_errors = ResolveErrorList() self._base = self._render_simple_dict(self._base, DictPath(self._settings.delimiter)) def _interpolate_inner(self, path, inventory): value = path.get_value(self._base) if not isinstance(value, (Value, ValueList)): # references to lists and dicts are only deepcopied when merged # together so it's possible a value with references in a referenced # list or dict has already been visited by _interpolate_inner del self._unrendered[path] return self._unrendered[path] = False self._interpolate_references(path, value, inventory) new = self._interpolate_render_value(path, value, inventory) path.set_value(self._base, new) del self._unrendered[path] def _interpolate_render_value(self, path, value, inventory): try: new = value.render(self._base, inventory) except ResolveError as e: e.context = path if self._settings.group_errors: self.resolve_errors.add(e) new = None else: raise except InterpolationError as e: e.context = path raise if isinstance(new, dict): new = self._render_simple_dict(new, path) elif isinstance(new, list): new = self._render_simple_list(new, path) return new def _interpolate_references(self, path, value, inventory): all_refs = False while not all_refs: for ref in value.get_references(): path_from_ref = DictPath(self._settings.delimiter, ref) if path_from_ref in self._unrendered: if self._unrendered[path_from_ref] is False: # every call to _interpolate_inner replaces the value of # self._unrendered[path] with False # Therefore, if we encounter False instead of True, # it means that we have already processed it and are now # faced with a cyclical reference. raise InfiniteRecursionError(path, ref, value.uri) else: self._interpolate_inner(path_from_ref, inventory) else: # ensure ancestor keys are already dereferenced ancestor = DictPath(self._settings.delimiter) for k in path_from_ref.key_parts(): ancestor = ancestor.new_subpath(k) if ancestor in self._unrendered: self._interpolate_inner(ancestor, inventory) if value.allRefs: all_refs = True else: # not all references in the value could be calculated previously so # try recalculating references with current context and recursively # call _interpolate_inner if the number of references has increased # Otherwise raise an error old = len(value.get_references()) value.assembleRefs(self._base) if old == len(value.get_references()): raise BadReferencesError(value.get_references(), str(path), value.uri) reclass-1.7.0/reclass/datatypes/tests/000077500000000000000000000000001373565003400177555ustar00rootroot00000000000000reclass-1.7.0/reclass/datatypes/tests/__init__.py000066400000000000000000000002511373565003400220640ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals reclass-1.7.0/reclass/datatypes/tests/test_applications.py000066400000000000000000000046631373565003400240650ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.datatypes import Applications, Classes import unittest try: import unittest.mock as mock except ImportError: import mock TESTLIST1 = ['one', 'two', 'three'] TESTLIST2 = ['red', 'green', '~two', '~three'] GOALLIST = ['one', 'red', 'green'] #TODO: mock out the underlying list class TestApplications(unittest.TestCase): def test_inheritance(self): a = Applications() self.assertIsInstance(a, Classes) def test_constructor_negate(self): a = Applications(TESTLIST1 + TESTLIST2) self.assertSequenceEqual(a, GOALLIST) def test_merge_unique_negate_list(self): a = Applications(TESTLIST1) a.merge_unique(TESTLIST2) self.assertSequenceEqual(a, GOALLIST) def test_merge_unique_negate_instance(self): a = Applications(TESTLIST1) a.merge_unique(Applications(TESTLIST2)) self.assertSequenceEqual(a, GOALLIST) def test_append_if_new_negate(self): a = Applications(TESTLIST1) a.append_if_new(TESTLIST2[2]) self.assertSequenceEqual(a, TESTLIST1[::2]) def test_repr_empty(self): negater = '%%' a = Applications(negation_prefix=negater) self.assertEqual('%r' % a, "%s(%r, '%s')" % (a.__class__.__name__, [], negater)) def test_repr_contents(self): negater = '%%' a = Applications(TESTLIST1, negation_prefix=negater) self.assertEqual('%r' % a, "%s(%r, '%s')" % (a.__class__.__name__, TESTLIST1, negater)) def test_repr_negations(self): negater = '~' a = Applications(TESTLIST2, negation_prefix=negater) self.assertEqual('%r' % a, "%s(%r, '%s')" % (a.__class__.__name__, TESTLIST2, negater)) def test_repr_negations_interspersed(self): l = ['a', '~b', 'a', '~d'] a = Applications(l) is_negation = lambda x: x.startswith(a.negation_prefix) GOAL = list(filter(lambda x: not is_negation(x), set(l))) + list(filter(is_negation, l)) self.assertEqual('%r' % a, "%s(%r, '~')" % (a.__class__.__name__, GOAL)) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/datatypes/tests/test_classes.py000066400000000000000000000077261373565003400230370ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.datatypes import Classes from reclass.datatypes.classes import INVALID_CHARACTERS_FOR_CLASSNAMES import unittest try: import unittest.mock as mock except ImportError: import mock from reclass.errors import InvalidClassnameError TESTLIST1 = ['one', 'two', 'three'] TESTLIST2 = ['red', 'green', 'blue'] #TODO: mock out the underlying list class TestClasses(unittest.TestCase): def test_len_empty(self): with mock.patch.object(Classes, 'merge_unique') as m: c = Classes() self.assertEqual(len(c), 0) self.assertFalse(m.called) def test_constructor(self): with mock.patch.object(Classes, 'merge_unique') as m: c = Classes(TESTLIST1) m.assert_called_once_with(TESTLIST1) def test_equality_list_empty(self): self.assertEqual(Classes(), []) def test_equality_list(self): self.assertEqual(Classes(TESTLIST1), TESTLIST1) def test_equality_instance_empty(self): self.assertEqual(Classes(), Classes()) def test_equality_instance(self): self.assertEqual(Classes(TESTLIST1), Classes(TESTLIST1)) def test_inequality(self): self.assertNotEqual(Classes(TESTLIST1), Classes(TESTLIST2)) def test_construct_duplicates(self): c = Classes(TESTLIST1 + TESTLIST1) self.assertSequenceEqual(c, TESTLIST1) def test_append_if_new(self): c = Classes() c.append_if_new(TESTLIST1[0]) self.assertEqual(len(c), 1) self.assertSequenceEqual(c, TESTLIST1[:1]) def test_append_if_new_duplicate(self): c = Classes(TESTLIST1) c.append_if_new(TESTLIST1[0]) self.assertEqual(len(c), len(TESTLIST1)) self.assertSequenceEqual(c, TESTLIST1) def test_append_if_new_nonstring(self): c = Classes() with self.assertRaises(TypeError): c.append_if_new(0) def test_append_invalid_characters(self): c = Classes() invalid_name = ' '.join(('foo', 'bar')) with self.assertRaises(InvalidClassnameError) as e: c.append_if_new(invalid_name) self.assertEqual(e.exception.message, "Invalid character ' ' in class name 'foo bar'.") def test_merge_unique(self): c = Classes(TESTLIST1) c.merge_unique(TESTLIST2) self.assertSequenceEqual(c, TESTLIST1 + TESTLIST2) def test_merge_unique_duplicate1_list(self): c = Classes(TESTLIST1) c.merge_unique(TESTLIST1) self.assertSequenceEqual(c, TESTLIST1) def test_merge_unique_duplicate1_instance(self): c = Classes(TESTLIST1) c.merge_unique(Classes(TESTLIST1)) self.assertSequenceEqual(c, TESTLIST1) def test_merge_unique_duplicate2_list(self): c = Classes(TESTLIST1) c.merge_unique(TESTLIST2 + TESTLIST2) self.assertSequenceEqual(c, TESTLIST1 + TESTLIST2) def test_merge_unique_duplicate2_instance(self): c = Classes(TESTLIST1) c.merge_unique(Classes(TESTLIST2 + TESTLIST2)) self.assertSequenceEqual(c, TESTLIST1 + TESTLIST2) def test_merge_unique_nonstring(self): c = Classes() with self.assertRaises(TypeError): c.merge_unique([0,1,2]) def test_repr_empty(self): c = Classes() self.assertEqual('%r' % c, '%s(%r)' % (c.__class__.__name__, [])) def test_repr_contents(self): c = Classes(TESTLIST1) self.assertEqual('%r' % c, '%s(%r)' % (c.__class__.__name__, TESTLIST1)) def test_as_list(self): c = Classes(TESTLIST1) self.assertListEqual(c.as_list(), TESTLIST1) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/datatypes/tests/test_entity.py000066400000000000000000000412311373565003400227030ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from six import iteritems from reclass.settings import Settings from reclass.datatypes import Entity, Classes, Parameters, Applications, Exports from reclass.errors import ResolveError from reclass.values import NodeInventory import unittest try: import unittest.mock as mock except ImportError: import mock SETTINGS = Settings() @mock.patch.multiple('reclass.datatypes', autospec=True, Classes=mock.DEFAULT, Applications=mock.DEFAULT, Parameters=mock.DEFAULT, Exports=mock.DEFAULT) class TestEntity(unittest.TestCase): def _make_instances(self, Classes, Applications, Parameters, Exports): return Classes(), Applications(), Parameters({}, SETTINGS, ""), Exports({}, SETTINGS, "") def test_constructor_default(self, **mocks): # Actually test the real objects by calling the default constructor, # all other tests shall pass instances to the constructor e = Entity(SETTINGS) self.assertEqual(e.name, '') self.assertEqual(e.uri, '') self.assertIsInstance(e.classes, Classes) self.assertIsInstance(e.applications, Applications) self.assertIsInstance(e.parameters, Parameters) self.assertIsInstance(e.exports, Exports) def test_constructor_empty(self, **types): instances = self._make_instances(**types) e = Entity(SETTINGS, *instances) self.assertEqual(e.name, '') self.assertEqual(e.uri, '') cl, al, pl, ex = [getattr(i, '__len__') for i in instances] self.assertEqual(len(e.classes), cl.return_value) cl.assert_called_once_with() self.assertEqual(len(e.applications), al.return_value) al.assert_called_once_with() self.assertEqual(len(e.parameters), pl.return_value) pl.assert_called_once_with() self.assertEqual(len(e.exports), pl.return_value) ex.assert_called_once_with() def test_constructor_empty_named(self, **types): name = 'empty' e = Entity(SETTINGS, *self._make_instances(**types), name=name) self.assertEqual(e.name, name) def test_constructor_empty_uri(self, **types): uri = 'test://uri' e = Entity(SETTINGS, *self._make_instances(**types), uri=uri) self.assertEqual(e.uri, uri) def test_constructor_empty_env(self, **types): env = 'not base' e = Entity(SETTINGS, *self._make_instances(**types), environment=env) self.assertEqual(e.environment, env) def test_equal_empty(self, **types): instances = self._make_instances(**types) self.assertEqual(Entity(SETTINGS, *instances), Entity(SETTINGS, *instances)) for i in instances: i.__eq__.assert_called_once_with(i) def test_equal_empty_named(self, **types): instances = self._make_instances(**types) self.assertEqual(Entity(SETTINGS, *instances), Entity(SETTINGS, *instances)) name = 'empty' self.assertEqual(Entity(SETTINGS, *instances, name=name), Entity(SETTINGS, *instances, name=name)) def test_unequal_empty_uri(self, **types): instances = self._make_instances(**types) uri = 'test://uri' self.assertNotEqual(Entity(SETTINGS, *instances, uri=uri), Entity(SETTINGS, *instances, uri=uri[::-1])) for i in instances: i.__eq__.assert_called_once_with(i) def test_unequal_empty_named(self, **types): instances = self._make_instances(**types) name = 'empty' self.assertNotEqual(Entity(SETTINGS, *instances, name=name), Entity(SETTINGS, *instances, name=name[::-1])) for i in instances: i.__eq__.assert_called_once_with(i) def test_unequal_types(self, **types): instances = self._make_instances(**types) self.assertNotEqual(Entity(SETTINGS, *instances, name='empty'), None) for i in instances: self.assertEqual(i.__eq__.call_count, 0) def _test_constructor_wrong_types(self, which_replace, **types): instances = self._make_instances(**types) instances[which_replace] = 'Invalid type' e = Entity(SETTINGS, *instances) def test_constructor_wrong_type_classes(self, **types): self.assertRaises(TypeError, self._test_constructor_wrong_types, 0) def test_constructor_wrong_type_applications(self, **types): self.assertRaises(TypeError, self._test_constructor_wrong_types, 1) def test_constructor_wrong_type_parameters(self, **types): self.assertRaises(TypeError, self._test_constructor_wrong_types, 2) def test_merge(self, **types): instances = self._make_instances(**types) e = Entity(SETTINGS, *instances) e.merge(e) for i, fn in zip(instances, ('merge_unique', 'merge_unique', 'merge')): getattr(i, fn).assert_called_once_with(i) def test_merge_newname(self, **types): instances = self._make_instances(**types) newname = 'newname' e1 = Entity(SETTINGS, *instances, name='oldname') e2 = Entity(SETTINGS, *instances, name=newname) e1.merge(e2) self.assertEqual(e1.name, newname) def test_merge_newuri(self, **types): instances = self._make_instances(**types) newuri = 'test://uri2' e1 = Entity(SETTINGS, *instances, uri='test://uri1') e2 = Entity(SETTINGS, *instances, uri=newuri) e1.merge(e2) self.assertEqual(e1.uri, newuri) def test_merge_newenv(self, **types): instances = self._make_instances(**types) newenv = 'new env' e1 = Entity(SETTINGS, *instances, environment='env') e2 = Entity(SETTINGS, *instances, environment=newenv) e1.merge(e2) self.assertEqual(e1.environment, newenv) def test_as_dict(self, **types): instances = self._make_instances(**types) entity = Entity(SETTINGS, *instances, name='test', environment='test') comp = {} comp['classes'] = instances[0].as_list() comp['applications'] = instances[1].as_list() comp['parameters'] = instances[2].as_dict() comp['exports'] = instances[3].as_dict() comp['environment'] = 'test' d = entity.as_dict() self.assertDictEqual(d, comp) class TestEntityNoMock(unittest.TestCase): def _make_inventory(self, nodes): return { name: NodeInventory(node, True) for name, node in iteritems(nodes) } def test_interpolate_list_types(self): node1_exports = Exports({'exps': [ '${one}' ] }, SETTINGS, 'first') node1_parameters = Parameters({'alpha': [ '${two}', '${three}' ], 'one': 1, 'two': 2, 'three': 3 }, SETTINGS, 'first') node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) node2_exports = Exports({'exps': '${alpha}' }, SETTINGS, 'second') node2_parameters = Parameters({}, SETTINGS, 'second') node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) result = {'exps': [ 1, 2, 3 ]} node1_entity.merge(node2_entity) node1_entity.interpolate(None) self.assertIs(type(node1_entity.exports.as_dict()['exps']), list) self.assertDictEqual(node1_entity.exports.as_dict(), result) def test_exports_with_refs(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'a': '${c}', 'b': 5}, SETTINGS, '') node3_parameters.merge({'c': 3}) node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) node3_entity.interpolate_exports() inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) result = { 'node1': NodeInventory({'a': 1, 'b': 2}, True), 'node2': NodeInventory({'a': 3, 'b': 4}, True), 'node3': NodeInventory({'a': 3, 'b': 5}, True) } self.assertDictEqual(inventory, result) def test_reference_to_an_export(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) node3_exports = Exports({'a': '${a}', 'b': '${b}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'ref': '${exp}', 'a': '${c}', 'b': 5}, SETTINGS, '') node3_parameters.merge({'c': 3, 'exp': '$[ exports:a ]'}) node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) node3_entity.interpolate_exports() inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) res_inv = { 'node1': NodeInventory({'a': 1, 'b': 2}, True), 'node2': NodeInventory({'a': 3, 'b': 4}, True), 'node3': NodeInventory({'a': 3, 'b': 5}, True) } res_params = {'a': 3, 'c': 3, 'b': 5, 'name': 'node3', 'exp': {'node1': 1, 'node3': 3, 'node2': 3}, 'ref': {'node1': 1, 'node3': 3, 'node2': 3}} self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_multiple_nodes(self): node1_exports = Exports({'a': '${a}'}, SETTINGS, '') node1_parameters = Parameters({'name': 'node1', 'a': { 'test': '${b}' }, 'b': 1, 'exp': '$[ exports:a ]'}, SETTINGS, '') node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) node2_exports = Exports({'a': '${a}'}, SETTINGS, '') node2_parameters = Parameters({'name': 'node2', 'a': { 'test': '${b}' }, 'b': 2 }, SETTINGS, '') node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) node1_entity.initialise_interpolation() node2_entity.initialise_interpolation() queries = node1_entity.parameters.get_inv_queries() for p, q in queries: node1_entity.interpolate_single_export(q) node2_entity.interpolate_single_export(q) res_inv = { 'node1': NodeInventory({'a': {'test': 1}}, True), 'node2': NodeInventory({'a': {'test': 2}}, True) } res_params = { 'name': 'node1', 'a': {'test': 1}, 'b': 1, 'exp': {'node1': {'test': 1}, 'node2': {'test': 2}} } inventory = self._make_inventory({'node1': node1_entity.exports.as_dict(), 'node2': node2_entity.exports.as_dict()}) node1_entity.interpolate(inventory) self.assertDictEqual(node1_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_with_ancestor_references(self): inventory = self._make_inventory({'node1': {'alpha' : {'beta': {'a': 1, 'b': 2}}}, 'node2': {'alpha' : {'beta': {'a': 3, 'b': 4}}}}) node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') node3_parameters = Parameters({'name': 'node3', 'alpha': {'beta' : {'a': 5, 'b': 6}}, 'exp': '$[ exports:alpha:beta ]'}, SETTINGS, '') node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) res_params = { 'name': 'node3', 'exp': {'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}, 'node3': {'a': 5, 'b': 6}}, 'alpha': {'beta': {'a': 5, 'b': 6}} } res_inv = { 'node1': NodeInventory({'alpha' : {'beta': {'a': 1, 'b': 2}}}, True), 'node2': NodeInventory({'alpha' : {'beta': {'a': 3, 'b': 4}}}, True), 'node3': NodeInventory({'alpha' : {'beta': {'a': 5, 'b': 6}}}, True) } node3_entity.initialise_interpolation() queries = node3_entity.parameters.get_inv_queries() for p, q in queries: node3_entity.interpolate_single_export(q) inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_with_nested_references(self): inventory = self._make_inventory({'node1': {'alpha': {'a': 1, 'b': 2}}, 'node2': {'alpha': {'a': 3, 'b': 4}}}) node3_exports = Exports({'alpha': '${alpha}'}, SETTINGS, '') node3_parameters = Parameters({ 'name': 'node3', 'alpha': {'a': '${one}', 'b': '${two}'}, 'beta': '$[ exports:alpha ]', 'one': '111', 'two': '${three}', 'three': '123'}, SETTINGS, '') node3_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node3_parameters, exports=node3_exports) res_params = { 'name': 'node3', 'alpha': { 'a': '111', 'b': '123' }, 'beta': { 'node1': {'a': 1, 'b': 2 }, 'node2': { 'a': 3, 'b': 4}, 'node3': { 'a': '111', 'b': '123' } }, 'one': '111', 'two': '123', 'three': '123' } res_inv = { 'node1': NodeInventory({'alpha': {'a': 1, 'b': 2}}, True), 'node2': NodeInventory({'alpha': {'a': 3, 'b': 4}}, True), 'node3': NodeInventory({'alpha': {'a': '111', 'b': '123'}}, True) } node3_entity.interpolate_exports() inventory['node3'] = NodeInventory(node3_entity.exports.as_dict(), True) node3_entity.interpolate(inventory) self.assertDictEqual(node3_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) def test_exports_failed_render(self): node1_exports = Exports({'a': '${a}'}, SETTINGS, '') node1_parameters = Parameters({'name': 'node1', 'a': 1, 'exp': '$[ exports:a ]'}, SETTINGS, '') node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) node2_exports = Exports({'a': '${b}'}, SETTINGS, '') node2_parameters = Parameters({'name': 'node2', 'a': 2}, SETTINGS, '') node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) node1_entity.initialise_interpolation() node2_entity.initialise_interpolation() queries = node1_entity.parameters.get_inv_queries() with self.assertRaises(ResolveError) as e: for p, q in queries: node1_entity.interpolate_single_export(q) node2_entity.interpolate_single_export(q) self.assertEqual(e.exception.message, "-> \n Cannot resolve ${b}, at a") def test_exports_failed_render_ignore(self): node1_exports = Exports({'a': '${a}'}, SETTINGS, '') node1_parameters = Parameters({'name': 'node1', 'a': 1, 'exp': '$[ +IgnoreErrors exports:a ]'}, SETTINGS, '') node1_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node1_parameters, exports=node1_exports) node2_exports = Exports({'a': '${b}'}, SETTINGS, '') node2_parameters = Parameters({'name': 'node1', 'a': 2}, SETTINGS, '') node2_entity = Entity(SETTINGS, classes=None, applications=None, parameters=node2_parameters, exports=node2_exports) node1_entity.initialise_interpolation() node2_entity.initialise_interpolation() queries = node1_entity.parameters.get_inv_queries() for p, q in queries: node1_entity.interpolate_single_export(q) node2_entity.interpolate_single_export(q) res_inv = { 'node1': NodeInventory({'a': 1}, True), 'node2': NodeInventory({}, True) } res_params = { 'name': 'node1', 'a': 1, 'exp': {'node1': 1} } inventory = self._make_inventory({'node1': node1_entity.exports.as_dict(), 'node2': node2_entity.exports.as_dict()}) node1_entity.interpolate(inventory) self.assertDictEqual(node1_parameters.as_dict(), res_params) self.assertDictEqual(inventory, res_inv) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/datatypes/tests/test_exports.py000066400000000000000000000212551373565003400230770ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from six import iteritems from reclass.utils.parameterdict import ParameterDict from reclass.utils.parameterlist import ParameterList from reclass.settings import Settings from reclass.datatypes import Exports, Parameters from reclass.errors import ParseError from reclass.values import NodeInventory import unittest SETTINGS = Settings() class TestInvQuery(unittest.TestCase): def _make_inventory(self, nodes): return { name: NodeInventory(node, True) for name, node in iteritems(nodes) } def test_overwrite_method(self): exports = Exports({'alpha': { 'one': 1, 'two': 2}}, SETTINGS, '') data = {'alpha': { 'three': 3, 'four': 4}} exports.overwrite(data) exports.interpolate() self.assertEqual(exports.as_dict(), data) def test_interpolate_types(self): exports = Exports({'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]}, SETTINGS, '') result = {'alpha': { 'one': 1, 'two': 2}, 'beta': [ 1, 2 ]} self.assertIs(type(exports.as_dict()['alpha']), ParameterDict) self.assertIs(type(exports.as_dict()['beta']), ParameterList) exports.interpolate() self.assertIs(type(exports.as_dict()['alpha']), dict) self.assertIs(type(exports.as_dict()['beta']), list) self.assertEqual(exports.as_dict(), result) def test_malformed_invquery(self): with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a exports:b == self:test_value ]'}, SETTINGS, '') with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a if exports:b self:test_value ]'}, SETTINGS, '') with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a if exports:b == ]'}, SETTINGS, '') with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value and exports:c = self:test_value2 ]'}, SETTINGS, '') with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value or exports:c == ]'}, SETTINGS, '') with self.assertRaises(ParseError): p = Parameters({'exp': '$[ exports:a if exports:b == self:test_value anddd exports:c == self:test_value2 ]'}, SETTINGS, '') def test_value_expr_invquery(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) parameters = Parameters({'exp': '$[ exports:a ]'}, SETTINGS, '') result = {'exp': {'node1': 1, 'node2': 3}} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 ]'}, SETTINGS, '') result = {'exp': {'node2': 3}} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_with_refs(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 4}}) parameters = Parameters({'exp': '$[ exports:a if exports:b == self:test_value ]', 'test_value': 2}, SETTINGS, '') result = {'exp': {'node1': 1}, 'test_value': 2} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}}) parameters = Parameters({'exp': '$[ if exports:b == 2 ]'}, SETTINGS, '') result = {'exp': ['node1', 'node3']} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_wth_and(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 4, 'c': False}, 'node2': {'a': 3, 'b': 4, 'c': True}}) parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 and exports:c == True ]'}, SETTINGS, '') result = {'exp': {'node2': 3}} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_if_expr_invquery_wth_or(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 4}, 'node2': {'a': 3, 'b': 3}}) parameters = Parameters({'exp': '$[ exports:a if exports:b == 4 or exports:b == 3 ]'}, SETTINGS, '') result = {'exp': {'node1': 1, 'node2': 3}} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery_with_and(self): inventory = self._make_inventory( { 'node1': {'a': 1, 'b': 2, 'c': 'green'}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2, 'c': 'red'} }) parameters = Parameters({'exp': '$[ if exports:b == 2 and exports:c == green ]'}, SETTINGS, '') result = {'exp': ['node1']} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_list_if_expr_invquery_with_and_missing(self): inventory = self._make_inventory({'node1': {'a': 1, 'b': 2, 'c': 'green'}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 2}}) mapping = {'exp': '$[ if exports:b == 2 and exports:c == green ]'} expected = {'exp': ['node1']} parameterss = Parameters(mapping, SETTINGS, '') parameterss.interpolate(inventory) self.assertEqual(parameterss.as_dict(), expected) def test_list_if_expr_invquery_with_or(self): inventory = self._make_inventory( { 'node1': {'a': 1, 'b': 2}, 'node2': {'a': 3, 'b': 3}, 'node3': {'a': 3, 'b': 4} }) parameters = Parameters({'exp': '$[ if exports:b == 2 or exports:b == 4 ]'}, SETTINGS, '') result = {'exp': ['node1', 'node3']} parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_merging_inv_queries(self): inventory = self._make_inventory({'node1': {'a': 1}, 'node2': {'a': 1}, 'node3': {'a': 2}}) pars1 = Parameters({'exp': '$[ if exports:a == 1 ]'}, SETTINGS, '') pars2 = Parameters({'exp': '$[ if exports:a == 2 ]'}, SETTINGS, '') result = { 'exp': [ 'node1', 'node2', 'node3' ] } pars1.merge(pars2) pars1.interpolate(inventory) self.assertEqual(pars1.as_dict(), result) def test_same_expr_invquery_different_flags(self): inventory = { 'node1': NodeInventory({'a': 1}, True), 'node2': NodeInventory({'a': 2}, True), 'node3': NodeInventory({'a': 3}, False) } parameters = Parameters({'alpha': '$[ exports:a ]', 'beta': '$[ +AllEnvs exports:a ]'}, SETTINGS, '') result = { 'alpha': { 'node1': 1, 'node2': 2 }, 'beta': { 'node1': 1 , 'node2': 2, 'node3': 3 } } parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_same_if_expr_invquery_different_flags(self): inventory = { 'node1': NodeInventory({'a': 1, 'b': 1}, True), 'node2': NodeInventory({'a': 2, 'b': 2}, True), 'node3': NodeInventory({'a': 3, 'b': 2}, False) } parameters = Parameters( { 'alpha': '$[ exports:a if exports:b == 2 ]', 'beta': '$[ +AllEnvs exports:a if exports:b == 2]' }, SETTINGS, '') result = { 'alpha': { 'node2': 2 }, 'beta': { 'node2': 2, 'node3': 3 } } parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) def test_same_list_if_expr_invquery_different_flags(self): inventory = { 'node1': NodeInventory({'a': 1}, True), 'node2': NodeInventory({'a': 2}, True), 'node3': NodeInventory({'a': 2}, False) } parameters = Parameters( { 'alpha': '$[ if exports:a == 2 ]', 'beta': '$[ +AllEnvs if exports:a == 2]' }, SETTINGS, '') result = { 'alpha': [ 'node2' ], 'beta': [ 'node2', 'node3' ] } parameters.interpolate(inventory) self.assertEqual(parameters.as_dict(), result) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/datatypes/tests/test_parameters.py000066400000000000000000001050411373565003400235320ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy from six import iteritems from reclass.settings import Settings from reclass.datatypes import Parameters from reclass.utils.parameterdict import ParameterDict from reclass.utils.parameterlist import ParameterList from reclass.values.value import Value from reclass.values.valuelist import ValueList from reclass.values.scaitem import ScaItem from reclass.errors import ChangedConstantError, InfiniteRecursionError, InterpolationError, ResolveError, ResolveErrorList, TypeMergeError import unittest try: import unittest.mock as mock except ImportError: import mock SIMPLE = {'one': 1, 'two': 2, 'three': 3} SETTINGS = Settings() class MockDevice(object): def __init__(self): self._text = '' def write(self, s): self._text += s return def text(self): return self._text class TestParameters(unittest.TestCase): def _construct_mocked_params(self, iterable=None, settings=SETTINGS): p = Parameters(iterable, settings, '') self._base = base = p._base p._base = mock.MagicMock(spec_set=ParameterDict, wraps=base) p._base.__repr__ = mock.MagicMock(autospec=dict.__repr__, return_value=repr(base)) p._base.__getitem__.side_effect = base.__getitem__ p._base.__setitem__.side_effect = base.__setitem__ return p, p._base def test_len_empty(self): p, b = self._construct_mocked_params() l = 0 b.__len__.return_value = l self.assertEqual(len(p), l) b.__len__.assert_called_with() def test_constructor(self): p, b = self._construct_mocked_params(SIMPLE) l = len(SIMPLE) b.__len__.return_value = l self.assertEqual(len(p), l) b.__len__.assert_called_with() def test_repr_empty(self): p, b = self._construct_mocked_params() b.__repr__.return_value = repr({}) self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, {})) b.__repr__.assert_called_once_with() def test_repr(self): p, b = self._construct_mocked_params(SIMPLE) b.__repr__.return_value = repr(SIMPLE) self.assertEqual('%r' % p, '%s(%r)' % (p.__class__.__name__, SIMPLE)) b.__repr__.assert_called_once_with() def test_equal_empty(self): p1, b1 = self._construct_mocked_params() p2, b2 = self._construct_mocked_params() b1.__eq__.return_value = True self.assertEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) def test_equal_default_delimiter(self): p1, b1 = self._construct_mocked_params(SIMPLE) p2, b2 = self._construct_mocked_params(SIMPLE, SETTINGS) b1.__eq__.return_value = True self.assertEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) def test_equal_contents(self): p1, b1 = self._construct_mocked_params(SIMPLE) p2, b2 = self._construct_mocked_params(SIMPLE) b1.__eq__.return_value = True self.assertEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) def test_unequal_content(self): p1, b1 = self._construct_mocked_params() p2, b2 = self._construct_mocked_params(SIMPLE) b1.__eq__.return_value = False self.assertNotEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) def test_unequal_delimiter(self): settings1 = Settings({'delimiter': ':'}) settings2 = Settings({'delimiter': '%'}) p1, b1 = self._construct_mocked_params(settings=settings1) p2, b2 = self._construct_mocked_params(settings=settings2) b1.__eq__.return_value = False self.assertNotEqual(p1, p2) b1.__eq__.assert_called_once_with(b2) def test_unequal_types(self): p1, b1 = self._construct_mocked_params() self.assertNotEqual(p1, None) self.assertEqual(b1.__eq__.call_count, 0) def test_construct_wrong_type(self): with self.assertRaises(TypeError) as e: self._construct_mocked_params(str('wrong type')) self.assertIn(str(e.exception), [ "Cannot merge objects into Parameters", # python 2 "Cannot merge objects into Parameters" ]) # python 3 def test_merge_wrong_type(self): p, b = self._construct_mocked_params() with self.assertRaises(TypeError) as e: p.merge(str('wrong type')) self.assertIn(str(e.exception), [ "Cannot merge objects into Parameters", # python 2 "Cannot merge objects into Parameters"]) # python 3 def test_get_dict(self): p, b = self._construct_mocked_params(SIMPLE) p.initialise_interpolation() self.assertDictEqual(p.as_dict(), SIMPLE) def test_merge_scalars(self): p1, b1 = self._construct_mocked_params(SIMPLE) mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} p2, b2 = self._construct_mocked_params(mergee) p1.merge(p2) self.assertEqual(b1.get.call_count, 4) self.assertEqual(b1.__setitem__.call_count, 4) def test_stray_occurrence_overwrites_during_interpolation(self): p1 = Parameters({'r' : mock.sentinel.ref, 'b': '${r}'}, SETTINGS, '') p2 = Parameters({'b' : mock.sentinel.goal}, SETTINGS, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['b'], mock.sentinel.goal) class TestParametersNoMock(unittest.TestCase): def test_merge_scalars(self): p = Parameters(SIMPLE, SETTINGS, '') mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} p.merge(mergee) p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), goal) def test_merge_scalars_overwrite(self): p = Parameters(SIMPLE, SETTINGS, '') mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)} p.merge(mergee) p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), goal) def test_merge_lists(self): l1 = [1,2,3] l2 = [2,3,4] p1 = Parameters(dict(list=l1[:]), SETTINGS, '') p2 = Parameters(dict(list=l2), SETTINGS, '') p1.merge(p2) p1.initialise_interpolation() self.assertListEqual(p1.as_dict()['list'], l1+l2) def test_merge_list_into_scalar(self): l = ['foo', 1, 2] p1 = Parameters(dict(key=l[0]), SETTINGS, '') p2 = Parameters(dict(key=l[1:]), SETTINGS, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge list over scalar, at key, in ; ") def test_merge_list_into_scalar_allow(self): settings = Settings({'allow_list_over_scalar': True}) l = ['foo', 1, 2] p1 = Parameters(dict(key=l[0]), settings, '') p2 = Parameters(dict(key=l[1:]), settings, '') p1.merge(p2) p1.interpolate() self.assertListEqual(p1.as_dict()['key'], l) def test_merge_scalar_over_list(self): l = ['foo', 1, 2] p1 = Parameters(dict(key=l[:2]), SETTINGS, '') p2 = Parameters(dict(key=l[2]), SETTINGS, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") def test_merge_scalar_over_list_allow(self): l = ['foo', 1, 2] settings = Settings({'allow_scalar_over_list': True}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=l[2]), settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['key'], l[2]) def test_merge_none_over_list(self): l = ['foo', 1, 2] settings = Settings({'allow_none_override': False}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=None), settings, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over list, at key, in ; ") def test_merge_none_over_list_allow(self): l = ['foo', 1, 2] settings = Settings({'allow_none_override': True}) p1 = Parameters(dict(key=l[:2]), settings, '') p2 = Parameters(dict(key=None), settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['key'], None) def test_merge_dict_over_scalar(self): d = { 'one': 1, 'two': 2 } p1 = Parameters({ 'a': 1 }, SETTINGS, '') p2 = Parameters({ 'a': d }, SETTINGS, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge dictionary over scalar, at a, in ; ") def test_merge_dict_over_scalar_allow(self): settings = Settings({'allow_dict_over_scalar': True}) d = { 'one': 1, 'two': 2 } p1 = Parameters({ 'a': 1 }, settings, '') p2 = Parameters({ 'a': d }, settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), { 'a': d }) def test_merge_scalar_over_dict(self): d = { 'one': 1, 'two': 2} p1 = Parameters({ 'a': d }, SETTINGS, '') p2 = Parameters({ 'a': 1 }, SETTINGS, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at a, in ; ") def test_merge_scalar_over_dict_allow(self): d = { 'one': 1, 'two': 2} settings = Settings({'allow_scalar_over_dict': True}) p1 = Parameters({ 'a': d }, settings, '') p2 = Parameters({ 'a': 1 }, settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), { 'a': 1}) def test_merge_none_over_dict(self): p1 = Parameters(dict(key=SIMPLE), SETTINGS, '') p2 = Parameters(dict(key=None), SETTINGS, '') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge scalar over dictionary, at key, in ; ") def test_merge_none_over_dict_allow(self): settings = Settings({'allow_none_override': True}) p1 = Parameters(dict(key=SIMPLE), settings, '') p2 = Parameters(dict(key=None), settings, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['key'], None) def test_merge_list_over_dict(self): p1 = Parameters({}, SETTINGS, '') p2 = Parameters({'one': { 'a': { 'b': 'c' } } }, SETTINGS, 'second') p3 = Parameters({'one': { 'a': [ 'b' ] } }, SETTINGS, 'third') with self.assertRaises(TypeMergeError) as e: p1.merge(p2) p1.merge(p3) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Cannot merge list over dictionary, at one:a, in second; third") # def test_merge_bare_dict_over_dict(self): # settings = Settings({'allow_bare_override': True}) # p1 = Parameters(dict(key=SIMPLE), settings, '') # p2 = Parameters(dict(key=dict()), settings, '') # p1.merge(p2) # p1.initialise_interpolation() # self.assertEqual(p1.as_dict()['key'], {}) # def test_merge_bare_list_over_list(self): # l = ['foo', 1, 2] # settings = Settings({'allow_bare_override': True}) # p1 = Parameters(dict(key=l), settings, '') # p2 = Parameters(dict(key=list()), settings, '') # p1.merge(p2) # p1.initialise_interpolation() # self.assertEqual(p1.as_dict()['key'], []) def test_merge_dicts(self): mergee = {'five':5,'four':4,'None':None,'tuple':(1,2,3)} p = Parameters(dict(dict=SIMPLE), SETTINGS, '') p2 = Parameters(dict(dict=mergee), SETTINGS, '') p.merge(p2) p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), dict(dict=goal)) def test_merge_dicts_overwrite(self): mergee = {'two':5,'four':4,'three':None,'one':(1,2,3)} p = Parameters(dict(dict=SIMPLE), SETTINGS, '') p2 = Parameters(dict(dict=mergee), SETTINGS, '') p.merge(p2) p.initialise_interpolation() goal = SIMPLE.copy() goal.update(mergee) self.assertDictEqual(p.as_dict(), dict(dict=goal)) def test_merge_dicts_override(self): """Validate that tilde merge overrides function properly.""" mergee = {'~one': {'a': 'alpha'}, '~two': ['gamma']} base = {'one': {'b': 'beta'}, 'two': ['delta']} goal = {'one': {'a': 'alpha'}, 'two': ['gamma']} p = Parameters(dict(dict=base), SETTINGS, '') p2 = Parameters(dict(dict=mergee), SETTINGS, '') p.merge(p2) p.interpolate() self.assertDictEqual(p.as_dict(), dict(dict=goal)) def test_interpolate_single(self): v = 42 d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': v} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_multiple(self): v = '42' d = {'foo': 'bar'.join(SETTINGS.reference_sentinels) + 'meep'.join(SETTINGS.reference_sentinels), 'bar': v[0], 'meep': v[1]} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_multilevel(self): v = 42 d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': 'meep'.join(SETTINGS.reference_sentinels), 'meep': v} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_list(self): l = [41, 42, 43] d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': l} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], l) def test_interpolate_infrecursion(self): v = 42 d = {'foo': 'bar'.join(SETTINGS.reference_sentinels), 'bar': 'foo'.join(SETTINGS.reference_sentinels)} p = Parameters(d, SETTINGS, '') with self.assertRaises(InfiniteRecursionError) as e: p.interpolate() # interpolation can start with foo or bar self.assertIn(e.exception.message, [ "-> \n Infinite recursion: ${foo}, at bar", "-> \n Infinite recursion: ${bar}, at foo"]) def test_nested_references(self): d = {'a': '${${z}}', 'b': 2, 'z': 'b'} r = {'a': 2, 'b': 2, 'z': 'b'} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict(), r) def test_nested_deep_references(self): d = {'one': { 'a': 1, 'b': '${one:${one:c}}', 'c': 'a' } } r = {'one': { 'a': 1, 'b': 1, 'c': 'a'} } p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict(), r) def test_stray_occurrence_overwrites_during_interpolation(self): p1 = Parameters({'r' : 1, 'b': '${r}'}, SETTINGS, '') p2 = Parameters({'b' : 2}, SETTINGS, '') p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict()['b'], 2) def test_referenced_dict_deep_overwrite(self): p1 = Parameters({'alpha': {'one': {'a': 1, 'b': 2} } }, SETTINGS, '') p2 = Parameters({'beta': '${alpha}'}, SETTINGS, '') p3 = Parameters({'alpha': {'one': {'c': 3, 'd': 4} }, 'beta': {'one': {'a': 99} } }, SETTINGS, '') r = {'alpha': {'one': {'a':1, 'b': 2, 'c': 3, 'd':4} }, 'beta': {'one': {'a':99, 'b': 2, 'c': 3, 'd':4} } } p1.merge(p2) p1.merge(p3) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_complex_reference_overwriting(self): p1 = Parameters({'one': 'abc_123_${two}_${three}', 'two': 'XYZ', 'four': 4}, SETTINGS, '') p2 = Parameters({'one': 'QWERTY_${three}_${four}', 'three': '999'}, SETTINGS, '') r = {'one': 'QWERTY_999_4', 'two': 'XYZ', 'three': '999', 'four': 4} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_nested_reference_with_overwriting(self): p1 = Parameters({'one': {'a': 1, 'b': 2, 'z': 'a'}, 'two': '${one:${one:z}}' }, SETTINGS, '') p2 = Parameters({'one': {'z': 'b'} }, SETTINGS, '') r = {'one': {'a': 1, 'b':2, 'z': 'b'}, 'two': 2} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_merge_referenced_lists(self): p1 = Parameters({'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6 ], 'three': '${one}'}, SETTINGS, '') p2 = Parameters({'three': '${two}'}, SETTINGS, '') r = {'one': [ 1, 2, 3 ], 'two': [ 4, 5, 6], 'three': [ 1, 2, 3, 4, 5, 6 ]} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_merge_referenced_dicts(self): p1 = Parameters({'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': '${one}'}, SETTINGS, '') p2 = Parameters({'three': '${two}'}, SETTINGS, '') r = {'one': {'a': 1, 'b': 2}, 'two': {'c': 3, 'd': 4}, 'three': {'a': 1, 'b': 2, 'c': 3, 'd': 4}} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_deep_refs_in_referenced_dicts(self): p = Parameters({'A': '${C:a}', 'B': {'a': 1, 'b': 2}, 'C': '${B}'}, SETTINGS, '') r = {'A': 1, 'B': {'a': 1, 'b': 2}, 'C': {'a': 1, 'b': 2}} p.interpolate() self.assertEqual(p.as_dict(), r) def test_overwrite_none(self): p1 = Parameters({'A': None, 'B': None, 'C': None, 'D': None, 'E': None, 'F': None}, SETTINGS, '') p2 = Parameters({'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': '${A}', 'E': '${B}', 'F': '${C}'}, SETTINGS, '') r = {'A': 'abc', 'B': [1, 2, 3], 'C': {'a': 'aaa', 'b': 'bbb'}, 'D': 'abc', 'E': [1, 2, 3], 'F': {'a': 'aaa', 'b': 'bbb'}} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_overwrite_dict(self): p1 = Parameters({'a': { 'one': 1, 'two': 2 }}, SETTINGS, '') p2 = Parameters({'~a': { 'three': 3, 'four': 4 }}, SETTINGS, '') r = {'a': { 'three': 3, 'four': 4 }} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_overwrite_list(self): p1 = Parameters({'a': [1, 2]}, SETTINGS, '') p2 = Parameters({'~a': [3, 4]}, SETTINGS, '') r = {'a': [3, 4]} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_interpolate_escaping(self): v = 'bar'.join(SETTINGS.reference_sentinels) d = {'foo': SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels), 'bar': 'unused'} p = Parameters(d, SETTINGS, '') p.initialise_interpolation() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_double_escaping(self): v = SETTINGS.escape_character + 'meep' d = {'foo': SETTINGS.escape_character + SETTINGS.escape_character + 'bar'.join(SETTINGS.reference_sentinels), 'bar': 'meep'} p = Parameters(d, SETTINGS, '') p.interpolate() self.assertEqual(p.as_dict()['foo'], v) def test_interpolate_escaping_backwards_compatibility(self): """In all following cases, escaping should not happen and the escape character needs to be printed as-is, to ensure backwards compatibility to older versions.""" v = ' '.join([ # Escape character followed by unescapable character '1', SETTINGS.escape_character, # Escape character followed by escape character '2', SETTINGS.escape_character + SETTINGS.escape_character, # Escape character followed by interpolation end sentinel '3', SETTINGS.escape_character + SETTINGS.reference_sentinels[1], # Escape character at the end of the string '4', SETTINGS.escape_character ]) d = {'foo': v} p = Parameters(d, SETTINGS, '') p.initialise_interpolation() self.assertEqual(p.as_dict()['foo'], v) def test_escape_close_in_ref(self): p1 = Parameters({'one}': 1, 'two': '${one\\}}'}, SETTINGS, '') r = {'one}': 1, 'two': 1} p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_double_escape_in_ref(self): d = {'one\\': 1, 'two': '${one\\\\}'} p1 = Parameters(d, SETTINGS, '') r = {'one\\': 1, 'two': 1} p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_merging_for_multiple_nodes(self): p1 = Parameters({ 'alpha': { 'one': 111 }}, SETTINGS, '') p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') p3 = Parameters({ 'beta': {'two': 222 }}, SETTINGS, '') n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') r1 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 111 }, 'name': 'node1' } r2 = { 'alpha': { 'one': 111 }, 'beta': { 'two': 222 }, 'name': 'node2' } n1.merge(p1) n1.merge(p2) n1.interpolate() n2 = Parameters({'name': 'node2'}, SETTINGS, '') n2.merge(p1) n2.merge(p2) n2.merge(p3) n2.interpolate() self.assertEqual(n1.as_dict(), r1) self.assertEqual(n2.as_dict(), r2) def test_list_merging_for_multiple_nodes(self): p1 = Parameters({ 'alpha': { 'one': [1, 2] }}, SETTINGS, '') p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') p3 = Parameters({ 'beta': {'two': [3] }}, SETTINGS, '') n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') r1 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2] }, 'name': 'node1' } r2 = { 'alpha': { 'one': [1, 2] }, 'beta': { 'two': [1, 2, 3] }, 'name': 'node2' } n1.merge(p1) n1.merge(p2) n1.interpolate() n2 = Parameters({'name': 'node2'}, SETTINGS, '') n2.merge(p1) n2.merge(p2) n2.merge(p3) n2.interpolate() self.assertEqual(n1.as_dict(), r1) self.assertEqual(n2.as_dict(), r2) def test_dict_merging_for_multiple_nodes(self): p1 = Parameters({ 'alpha': { 'one': { 'a': 'aa', 'b': 'bb' }}}, SETTINGS, '') p2 = Parameters({ 'beta': {'two': '${alpha:one}' }}, SETTINGS, '') p3 = Parameters({ 'beta': {'two': {'c': 'cc' }}}, SETTINGS, '') n1 = Parameters({ 'name': 'node1'}, SETTINGS, '') r1 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb'} }, 'name': 'node1' } r2 = { 'alpha': { 'one': {'a': 'aa', 'b': 'bb'} }, 'beta': { 'two': {'a': 'aa', 'b': 'bb', 'c': 'cc'} }, 'name': 'node2' } n1.merge(p1) n1.merge(p2) n1.interpolate() n2 = Parameters({'name': 'node2'}, SETTINGS, '') n2.merge(p1) n2.merge(p2) n2.merge(p3) n2.interpolate() self.assertEqual(n1.as_dict(), r1) self.assertEqual(n2.as_dict(), r2) def test_list_merging_with_refs_for_multiple_nodes(self): p1 = Parameters({ 'alpha': { 'one': [1, 2], 'two': [3, 4] }}, SETTINGS, '') p2 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '') p3 = Parameters({ 'beta': { 'three': '${alpha:two}' }}, SETTINGS, '') p4 = Parameters({ 'beta': { 'three': '${alpha:one}' }}, SETTINGS, '') n1 = Parameters({ 'name': 'node1' }, SETTINGS, '') r1 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2]}, 'name': 'node1'} r2 = {'alpha': {'one': [1, 2], 'two': [3, 4]}, 'beta': {'three': [1, 2, 3, 4, 1, 2]}, 'name': 'node2'} n2 = Parameters({ 'name': 'node2' }, SETTINGS, '') n2.merge(p1) n2.merge(p2) n2.merge(p3) n2.merge(p4) n2.interpolate() n1.merge(p1) n1.merge(p2) n1.interpolate() self.assertEqual(n1.as_dict(), r1) self.assertEqual(n2.as_dict(), r2) def test_nested_refs_with_multiple_nodes(self): p1 = Parameters({ 'alpha': { 'one': 1, 'two': 2 } }, SETTINGS, '') p2 = Parameters({ 'beta': { 'three': 'one' } }, SETTINGS, '') p3 = Parameters({ 'beta': { 'three': 'two' } }, SETTINGS, '') p4 = Parameters({ 'beta': { 'four': '${alpha:${beta:three}}' } }, SETTINGS, '') n1 = Parameters({ 'name': 'node1' }, SETTINGS, '') r1 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'one', 'four': 1}, 'name': 'node1'} r2 = {'alpha': {'one': 1, 'two': 2}, 'beta': {'three': 'two', 'four': 2}, 'name': 'node2'} n1.merge(p1) n1.merge(p4) n1.merge(p2) n1.interpolate() n2 = Parameters({ 'name': 'node2' }, SETTINGS, '') n2.merge(p1) n2.merge(p4) n2.merge(p3) n2.interpolate() self.assertEqual(n1.as_dict(), r1) self.assertEqual(n2.as_dict(), r2) def test_nested_refs_error_message(self): # beta is missing, oops p1 = Parameters({'alpha': {'one': 1, 'two': 2}, 'gamma': '${alpha:${beta}}'}, SETTINGS, '') with self.assertRaises(InterpolationError) as error: p1.interpolate() self.assertEqual(error.exception.message, "-> \n Bad references, at gamma\n ${beta}") def test_multiple_resolve_errors(self): p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, SETTINGS, '') with self.assertRaises(ResolveErrorList) as error: p1.interpolate() # interpolation can start with either alpha or beta self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha\n Cannot resolve ${gamma}, at beta", "-> \n Cannot resolve ${gamma}, at beta\n Cannot resolve ${gamma}, at alpha"]) def test_force_single_resolve_error(self): settings = copy.deepcopy(SETTINGS) settings.group_errors = False p1 = Parameters({'alpha': '${gamma}', 'beta': '${gamma}'}, settings, '') with self.assertRaises(ResolveError) as error: p1.interpolate() # interpolation can start with either alpha or beta self.assertIn(error.exception.message, [ "-> \n Cannot resolve ${gamma}, at alpha", "-> \n Cannot resolve ${gamma}, at beta"]) def test_ignore_overwriten_missing_reference(self): settings = copy.deepcopy(SETTINGS) settings.ignore_overwritten_missing_references = True p1 = Parameters({'alpha': '${beta}'}, settings, '') p2 = Parameters({'alpha': '${gamma}'}, settings, '') p3 = Parameters({'gamma': 3}, settings, '') r1 = {'alpha': 3, 'gamma': 3} p1.merge(p2) p1.merge(p3) err1 = "[WARNING] Reference '${beta}' undefined\n" with mock.patch('sys.stderr', new=MockDevice()) as std_err: p1.interpolate() self.assertEqual(p1.as_dict(), r1) self.assertEqual(std_err.text(), err1) def test_ignore_overwriten_missing_reference_last_value(self): # an error should be raised if the last reference to be merged # is missing even if ignore_overwritten_missing_references is true settings = copy.deepcopy(SETTINGS) settings.ignore_overwritten_missing_references = True p1 = Parameters({'alpha': '${gamma}'}, settings, '') p2 = Parameters({'alpha': '${beta}'}, settings, '') p3 = Parameters({'gamma': 3}, settings, '') p1.merge(p2) p1.merge(p3) with self.assertRaises(InterpolationError) as error: p1.interpolate() self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha") def test_ignore_overwriten_missing_reference_dict(self): # setting ignore_overwritten_missing_references to true should # not change the behaviour for dicts settings = copy.deepcopy(SETTINGS) settings.ignore_overwritten_missing_references = True p1 = Parameters({'alpha': '${beta}'}, settings, '') p2 = Parameters({'alpha': '${gamma}'}, settings, '') p3 = Parameters({'gamma': {'one': 1, 'two': 2}}, settings, '') err1 = "[WARNING] Reference '${beta}' undefined\n" p1.merge(p2) p1.merge(p3) with self.assertRaises(InterpolationError) as error, mock.patch('sys.stderr', new=MockDevice()) as std_err: p1.interpolate() self.assertEqual(error.exception.message, "-> \n Cannot resolve ${beta}, at alpha") self.assertEqual(std_err.text(), err1) def test_escaped_string_in_ref_dict_1(self): # test with escaped string in first dict to be merged p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${b}', 'a_ref': 123}, SETTINGS, '') p2 = Parameters({'c': '${a}'}, SETTINGS, '') r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_escaped_string_in_ref_dict_2(self): # test with escaped string in second dict to be merged p1 = Parameters({'a': { 'one': '${a_ref}' }, 'b': { 'two': '\${not_a_ref}' }, 'c': '${a}', 'a_ref': 123}, SETTINGS, '') p2 = Parameters({'c': '${b}'}, SETTINGS, '') r = { 'a': { 'one': 123 }, 'b': { 'two': '${not_a_ref}' }, 'c': { 'one': 123, 'two': '${not_a_ref}' }, 'a_ref': 123} p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_complex_overwrites_1(self): # find a better name for this test p1 = Parameters({ 'test': { 'dict': { 'a': '${values:one}', 'b': '${values:two}' } }, 'values': { 'one': 1, 'two': 2, 'three': { 'x': 'X', 'y': 'Y' } } }, SETTINGS, '') p2 = Parameters({ 'test': { 'dict': { 'c': '${values:two}' } } }, SETTINGS, '') p3 = Parameters({ 'test': { 'dict': { '~b': '${values:three}' } } }, SETTINGS, '') r = {'test': {'dict': {'a': 1, 'b': {'x': 'X', 'y': 'Y'}, 'c': 2}}, 'values': {'one': 1, 'three': {'x': 'X', 'y': 'Y'}, 'two': 2} } p2.merge(p3) p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_escaped_string_overwrites(self): p1 = Parameters({ 'test': '\${not_a_ref}' }, SETTINGS, '') p2 = Parameters({ 'test': '\${also_not_a_ref}' }, SETTINGS, '') r = { 'test': '${also_not_a_ref}' } p1.merge(p2) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_escaped_string_in_ref_dict_overwrite(self): p1 = Parameters({'a': { 'one': '\${not_a_ref}' }, 'b': { 'two': '\${also_not_a_ref}' }}, SETTINGS, '') p2 = Parameters({'c': '${a}'}, SETTINGS, '') p3 = Parameters({'c': '${b}'}, SETTINGS, '') p4 = Parameters({'c': { 'one': '\${again_not_a_ref}' } }, SETTINGS, '') r = {'a': {'one': '${not_a_ref}'}, 'b': {'two': '${also_not_a_ref}'}, 'c': {'one': '${again_not_a_ref}', 'two': '${also_not_a_ref}'}} p1.merge(p2) p1.merge(p3) p1.merge(p4) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_strict_constant_parameter(self): p1 = Parameters({'one': { 'a': 1} }, SETTINGS, 'first') p2 = Parameters({'one': { '=a': 2} }, SETTINGS, 'second') p3 = Parameters({'one': { 'a': 3} }, SETTINGS, 'third') with self.assertRaises(ChangedConstantError) as e: p1.merge(p2) p1.merge(p3) p1.interpolate() self.assertEqual(e.exception.message, "-> \n Attempt to change constant value, at one:a, in second; third") def test_constant_parameter(self): settings = Settings({'strict_constant_parameters': False}) p1 = Parameters({'one': { 'a': 1} }, settings, 'first') p2 = Parameters({'one': { '=a': 2} }, settings, 'second') p3 = Parameters({'one': { 'a': 3} }, settings, 'third') r = {'one': { 'a': 2 } } p1.merge(p2) p1.merge(p3) p1.interpolate() self.assertEqual(p1.as_dict(), r) def test_interpolated_list_type(self): p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first') r = {'a': [ 1, 2, 3 ]} self.assertIs(type(p1.as_dict()['a']), ParameterList) p1.interpolate() self.assertIs(type(p1.as_dict()['a']), list) self.assertEqual(p1.as_dict(), r) def test_interpolated_dict_type(self): p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first') r = {'a': { 'one': 1, 'two': 2, 'three': 3 }} self.assertIs(type(p1.as_dict()['a']), ParameterDict) p1.interpolate() self.assertIs(type(p1.as_dict()['a']), dict) self.assertEqual(p1.as_dict(), r) def test_merged_interpolated_list_type(self): p1 = Parameters({'a': [ 1, 2, 3 ]}, SETTINGS, 'first') p2 = Parameters({'a': [ 4, 5, 6 ]}, SETTINGS, 'second') r = {'a': [ 1, 2, 3, 4, 5, 6 ]} self.assertIs(type(p1.as_dict()['a']), ParameterList) self.assertIs(type(p2.as_dict()['a']), ParameterList) p1.merge(p2) self.assertIs(type(p1.as_dict()['a']), ValueList) p1.interpolate() self.assertIs(type(p1.as_dict()['a']), list) self.assertEqual(p1.as_dict(), r) def test_merged_interpolated_dict_type(self): p1 = Parameters({'a': { 'one': 1, 'two': 2, 'three': 3 }}, SETTINGS, 'first') p2 = Parameters({'a': { 'four': 4, 'five': 5, 'six': 6 }}, SETTINGS, 'second') r = {'a': { 'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}} self.assertIs(type(p1.as_dict()['a']), ParameterDict) self.assertIs(type(p2.as_dict()['a']), ParameterDict) p1.merge(p2) self.assertIs(type(p1.as_dict()['a']), ParameterDict) p1.interpolate() self.assertIs(type(p1.as_dict()['a']), dict) self.assertEqual(p1.as_dict(), r) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/defaults.py000066400000000000000000000034141373565003400170000ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os, sys from .version import RECLASS_NAME # defaults for the command-line options OPT_STORAGE_TYPE = 'yaml_fs' OPT_INVENTORY_BASE_URI = os.path.join('/etc', RECLASS_NAME) OPT_NODES_URI = 'nodes' OPT_CLASSES_URI = 'classes' OPT_PRETTY_PRINT = True OPT_GROUP_ERRORS = True OPT_COMPOSE_NODE_NAME = False OPT_NO_REFS = False OPT_OUTPUT = 'yaml' OPT_IGNORE_CLASS_NOTFOUND = False OPT_IGNORE_CLASS_NOTFOUND_REGEXP = ['.*'] OPT_IGNORE_CLASS_NOTFOUND_WARNING = True OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES = True OPT_STRICT_CONSTANT_PARAMETERS = True OPT_ALLOW_SCALAR_OVER_DICT = False OPT_ALLOW_SCALAR_OVER_LIST = False OPT_ALLOW_LIST_OVER_SCALAR = False OPT_ALLOW_DICT_OVER_SCALAR = False OPT_ALLOW_NONE_OVERRIDE = False OPT_INVENTORY_IGNORE_FAILED_NODE = False OPT_INVENTORY_IGNORE_FAILED_RENDER = False CONFIG_FILE_SEARCH_PATH = [os.getcwd(), os.path.expanduser('~'), OPT_INVENTORY_BASE_URI, os.path.dirname(sys.argv[0]) ] CONFIG_FILE_NAME = RECLASS_NAME + '-config.yml' REFERENCE_SENTINELS = ('${', '}') EXPORT_SENTINELS = ('$[', ']') PARAMETER_INTERPOLATION_DELIMITER = ':' PARAMETER_DICT_KEY_OVERRIDE_PREFIX = '~' PARAMETER_DICT_KEY_CONSTANT_PREFIX = '=' ESCAPE_CHARACTER = '\\' AUTOMATIC_RECLASS_PARAMETERS = True SCALAR_RECLASS_PARAMETERS = False DEFAULT_ENVIRONMENT = 'base' CLASS_MAPPINGS_MATCH_PATH = False reclass-1.7.0/reclass/errors.py000066400000000000000000000270011373565003400165030ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import posix, sys import traceback from reclass.defaults import REFERENCE_SENTINELS, EXPORT_SENTINELS from reclass.utils.dictpath import DictPath class ReclassException(Exception): def __init__(self, rc=posix.EX_SOFTWARE, msg=None, tbFlag=True): super(ReclassException, self).__init__() self._rc = rc self._msg = msg if tbFlag: self._traceback = traceback.format_exc() else: self._traceback = None self._full_traceback = False message = property(lambda self: self._get_message()) rc = property(lambda self: self._rc) def __str__(self): return self.message + '\n' + super(ReclassException, self).__str__() def _get_message(self): if self._msg: return self._msg else: return 'No error message provided.' def exit_with_message(self, out=sys.stderr): if self._full_traceback: t, v, tb = sys.exc_info() print('Full Traceback', file=out) for l in traceback.format_tb(tb): print(l, file=out) if self._traceback: print(self._traceback, file=out) print(self.message, file=out) sys.exit(self.rc) class PermissionError(ReclassException): def __init__(self, msg, rc=posix.EX_NOPERM): super(PermissionError, self).__init__(rc=rc, msg=msg) class InvocationError(ReclassException): def __init__(self, msg, rc=posix.EX_USAGE): super(InvocationError, self).__init__(rc=rc, msg=msg) class ConfigError(ReclassException): def __init__(self, msg, rc=posix.EX_CONFIG): super(ConfigError, self).__init__(rc=rc, msg=msg) class DuplicateUriError(ConfigError): def __init__(self, nodes_uri, classes_uri): super(DuplicateUriError, self).__init__(msg=None) self._nodes_uri = nodes_uri self._classes_uri = classes_uri def _get_message(self): return "The inventory URIs must not be the same " \ "for nodes and classes: {0}".format(self._nodes_uri) class UriOverlapError(ConfigError): def __init__(self, nodes_uri, classes_uri): super(UriOverlapError, self).__init__(msg=None) self._nodes_uri = nodes_uri self._classes_uri = classes_uri def _get_message(self): msg = "The URIs for the nodes and classes inventories must not " \ "overlap, but {0} and {1} do." return msg.format(self._nodes_uri, self._classes_uri) class NotFoundError(ReclassException): def __init__(self, msg, rc=posix.EX_IOERR): super(NotFoundError, self).__init__(rc=rc, msg=msg) class NodeNotFound(NotFoundError): def __init__(self, storage, nodename, uri): super(NodeNotFound, self).__init__(msg=None) self.storage = storage self.name = nodename self.uri = uri def _get_message(self): msg = "Node '{0}' not found under {1}://{2}" return msg.format(self.name, self.storage, self.uri) class InterpolationError(ReclassException): def __init__(self, msg=None, rc=posix.EX_DATAERR, nodename='', uri=None, context=None, tbFlag=True): super(InterpolationError, self).__init__(rc=rc, msg=msg, tbFlag=tbFlag) self.nodename = nodename self.uri = uri self.context = context def _get_message(self): msg = '-> {0}\n'.format(self.nodename) msg += self._render_error_message(self._get_error_message(), 1) msg = msg[:-1] return msg def _render_error_message(self, message_list, indent): msg = '' for l in message_list: if isinstance(l, list): msg += self._render_error_message(l, indent + 1) else: msg += (' ' * indent * 3) + l + '\n' return msg def _add_context_and_uri(self): msg = '' if self.context: msg += ', at %s' % str(self.context) if self.uri: msg += ', in %s' % self.uri return msg class ClassNotFound(InterpolationError): def __init__(self, storage, classname, path, nodename='', uri=None): super(ClassNotFound, self).__init__(msg=None, uri=uri, nodename=nodename) self.storage = storage self.name = classname self.path = path def _get_error_message(self): msg = [ 'In {0}'.format(self.uri), 'Class {0} not found under {1}://{2}'.format(self.name, self.storage, self.path) ] return msg class ClassNameResolveError(InterpolationError): def __init__(self, classname, nodename, uri): super(ClassNameResolveError, self).__init__(msg=None, uri=uri, nodename=nodename) self.name = classname def _get_error_message(self): msg = [ 'In {0}'.format(self.uri), 'Class name {0} not resolvable'.format(self.name) ] return msg class InvQueryClassNotFound(InterpolationError): def __init__(self, classNotFoundError, nodename=''): super(InvQueryClassNotFound, self).__init__(msg=None, nodename=nodename) self.classNotFoundError = classNotFoundError self._traceback = self.classNotFoundError._traceback def _get_error_message(self): msg = [ 'Inventory Queries:', '-> {0}'.format(self.classNotFoundError.nodename) ] msg.append(self.classNotFoundError._get_error_message()) return msg class InvQueryClassNameResolveError(InterpolationError): def __init__(self, classNameResolveError, nodename=''): super(InvQueryClassNameResolveError, self).__init__(msg=None, nodename=nodename) self.classNameResolveError = classNameResolveError self._traceback = self.classNameResolveError._traceback def _get_error_message(self): msg = [ 'Inventory Queries:', '-> {0}'.format(self.classNameResolveError.nodename) ] msg.append(self.classNameResolveError._get_error_message()) return msg class ResolveError(InterpolationError): def __init__(self, reference, uri=None, context=None): super(ResolveError, self).__init__(msg=None) self.reference = reference def _get_error_message(self): msg = 'Cannot resolve {0}'.format(self.reference.join(REFERENCE_SENTINELS)) + self._add_context_and_uri() return [ msg ] class ResolveErrorList(InterpolationError): def __init__(self): super(ResolveErrorList, self).__init__(msg=None) self.resolve_errors = [] self._traceback = False def add(self, resolve_error): self.resolve_errors.append(resolve_error) def have_errors(self): return len(self.resolve_errors) > 0 def _get_error_message(self): msgs = [] for e in self.resolve_errors: msgs.extend(e._get_error_message()) return msgs class InvQueryError(InterpolationError): def __init__(self, query, resolveError, uri=None, context=None): super(InvQueryError, self).__init__(msg=None) self.query = query self.resolveError = resolveError self._traceback = self.resolveError._traceback def _get_error_message(self): msg1 = 'Failed inv query {0}'.format(self.query.join(EXPORT_SENTINELS)) + self._add_context_and_uri() msg2 = '-> {0}'.format(self.resolveError.nodename) msg3 = self.resolveError._get_error_message() return [ msg1, msg2, msg3 ] class ParseError(InterpolationError): def __init__(self, msg, line, col, lineno, rc=posix.EX_DATAERR): super(ParseError, self).__init__(rc=rc, msg=None) self._err = msg self._line = line self._col = col self._lineno = lineno def _get_error_message(self): msg = [ 'Parse error: {0}'.format(self._line.join(EXPORT_SENTINELS)) + self._add_context_and_uri() ] msg.append('{0} at char {1}'.format(self._err, self._col - 1)) return msg class InfiniteRecursionError(InterpolationError): def __init__(self, context, ref, uri): super(InfiniteRecursionError, self).__init__(msg=None, tbFlag=False, uri=uri) self.context = context self.ref = ref def _get_error_message(self): msg = [ 'Infinite recursion: {0}'.format(self.ref.join(REFERENCE_SENTINELS)) + self._add_context_and_uri() ] return msg class BadReferencesError(InterpolationError): def __init__(self, refs, context, uri): super(BadReferencesError, self).__init__(msg=None, context=context, uri=uri, tbFlag=False) self.refs = [ r.join(REFERENCE_SENTINELS) for r in refs ] def _get_error_message(self): msg = [ 'Bad references' + self._add_context_and_uri(), ' ' + ', '.join(self.refs) ] return msg class TypeMergeError(InterpolationError): def __init__(self, value1, value2, uri): super(TypeMergeError, self).__init__(msg=None, uri=uri, tbFlag=False) self.type1 = value1.item_type_str() self.type2 = value2.item_type_str() def _get_error_message(self): msg = [ 'Cannot merge {0} over {1}'.format(self.type1, self.type2) + self._add_context_and_uri() ] return msg class ExpressionError(InterpolationError): def __init__(self, msg, rc=posix.EX_DATAERR, tbFlag=True): super(ExpressionError, self).__init__(rc=rc, msg=None, tbFlag=tbFlag) self._error_msg = msg def _get_error_message(self): msg = [ 'Expression error: {0}'.format(self._error_msg) + self._add_context_and_uri() ] return msg class ChangedConstantError(InterpolationError): def __init__(self, uri): super(ChangedConstantError, self).__init__(msg=None, uri=uri, tbFlag=False) def _get_error_message(self): msg = [ 'Attempt to change constant value' + self._add_context_and_uri() ] return msg class MappingError(ReclassException): def __init__(self, msg, rc=posix.EX_DATAERR): super(MappingError, self).__init__(rc=rc, msg=msg) class MappingFormatError(MappingError): def __init__(self, msg): super(MappingFormatError, self).__init__(msg) class NameError(ReclassException): def __init__(self, msg=None, rc=posix.EX_DATAERR): super(NameError, self).__init__(rc=rc, msg=msg) class InvalidClassnameError(NameError): def __init__(self, invalid_character, classname): super(InvalidClassnameError, self).__init__(msg=None) self._char = invalid_character self._classname = classname def _get_message(self): msg = "Invalid character '{0}' in class name '{1}'." return msg.format(self._char, self._classname) class DuplicateNodeNameError(NameError): def __init__(self, storage, name, uri1, uri2): super(DuplicateNodeNameError, self).__init__(msg=None) self._storage = storage self._name = name self._uris = (uri1, uri2) def _get_message(self): msg = "{0}: Definition of node '{1}' in '{2}' collides with " \ "definition in '{3}'. Nodes can only be defined once " \ "per inventory." return msg.format(self._storage, self._name, self._uris[1], self._uris[0]) class MissingModuleError(ReclassException): def __init__(self, modname): msg = "Module %s is missing" % modname super(MissingModuleError, self).__init__(rc=posix.EX_DATAERR, msg=msg) reclass-1.7.0/reclass/output/000077500000000000000000000000001373565003400161555ustar00rootroot00000000000000reclass-1.7.0/reclass/output/__init__.py000066400000000000000000000020761373565003400202730ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals class OutputterBase(object): def __init__(self): pass def dump(self, data, pretty_print=False): raise NotImplementedError("dump() method not implemented.") class OutputLoader(object): def __init__(self, outputter): self._name = 'reclass.output.' + outputter + '_outputter' try: self._module = __import__(self._name, globals(), locals(), self._name) except ImportError: raise NotImplementedError() def load(self, attr='Outputter'): klass = getattr(self._module, attr, None) if klass is None: raise AttributeError('Outputter class {0} does not export "{1}"'.format(self._name, klass)) return klass reclass-1.7.0/reclass/output/json_outputter.py000066400000000000000000000012711373565003400216340ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.output import OutputterBase import json class Outputter(OutputterBase): def dump(self, data, pretty_print=False, no_refs=False): separators = (',', ': ') if pretty_print else (',', ':') indent = 2 if pretty_print else None return json.dumps(data, indent=indent, separators=separators) reclass-1.7.0/reclass/output/yaml_outputter.py000066400000000000000000000017131373565003400216260ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.output import OutputterBase import yaml _SafeDumper = yaml.CSafeDumper if yaml.__with_libyaml__ else yaml.SafeDumper class Outputter(OutputterBase): def dump(self, data, pretty_print=False, no_refs=False): if (no_refs): return yaml.dump(data, default_flow_style=not pretty_print, Dumper=ExplicitDumper) else: return yaml.dump(data, default_flow_style=not pretty_print, Dumper=_SafeDumper) class ExplicitDumper(_SafeDumper): """ A dumper that will never emit aliases. """ def ignore_aliases(self, data): return True reclass-1.7.0/reclass/settings.py000066400000000000000000000057271373565003400170420ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import reclass.defaults as defaults from six import string_types, iteritems class Settings(object): known_opts = { 'allow_scalar_over_dict': defaults.OPT_ALLOW_SCALAR_OVER_DICT, 'allow_scalar_over_list': defaults.OPT_ALLOW_SCALAR_OVER_LIST, 'allow_list_over_scalar': defaults.OPT_ALLOW_LIST_OVER_SCALAR, 'allow_dict_over_scalar': defaults.OPT_ALLOW_DICT_OVER_SCALAR, 'allow_none_override': defaults.OPT_ALLOW_NONE_OVERRIDE, 'automatic_parameters': defaults.AUTOMATIC_RECLASS_PARAMETERS, 'class_mappings_match_path': defaults.CLASS_MAPPINGS_MATCH_PATH, 'scalar_parameters': defaults.SCALAR_RECLASS_PARAMETERS, 'default_environment': defaults.DEFAULT_ENVIRONMENT, 'delimiter': defaults.PARAMETER_INTERPOLATION_DELIMITER, 'dict_key_override_prefix': defaults.PARAMETER_DICT_KEY_OVERRIDE_PREFIX, 'dict_key_constant_prefix': defaults.PARAMETER_DICT_KEY_CONSTANT_PREFIX, 'escape_character': defaults.ESCAPE_CHARACTER, 'export_sentinels': defaults.EXPORT_SENTINELS, 'inventory_ignore_failed_node': defaults.OPT_INVENTORY_IGNORE_FAILED_NODE, 'inventory_ignore_failed_render': defaults.OPT_INVENTORY_IGNORE_FAILED_RENDER, 'reference_sentinels': defaults.REFERENCE_SENTINELS, 'ignore_class_notfound': defaults.OPT_IGNORE_CLASS_NOTFOUND, 'strict_constant_parameters': defaults.OPT_STRICT_CONSTANT_PARAMETERS, 'ignore_class_notfound_regexp': defaults.OPT_IGNORE_CLASS_NOTFOUND_REGEXP, 'ignore_class_notfound_warning': defaults.OPT_IGNORE_CLASS_NOTFOUND_WARNING, 'ignore_overwritten_missing_references': defaults.OPT_IGNORE_OVERWRITTEN_MISSING_REFERENCES, 'group_errors': defaults.OPT_GROUP_ERRORS, 'compose_node_name': defaults.OPT_COMPOSE_NODE_NAME, } def __init__(self, options={}): for opt_name, opt_value in iteritems(self.known_opts): setattr(self, opt_name, options.get(opt_name, opt_value)) self.dict_key_prefixes = [str(self.dict_key_override_prefix), str(self.dict_key_constant_prefix)] if isinstance(self.ignore_class_notfound_regexp, string_types): self.ignore_class_notfound_regexp = [ self.ignore_class_notfound_regexp] def __eq__(self, other): if isinstance(other, type(self)): return all(getattr(self, opt) == getattr(other, opt) for opt in self.known_opts) return False def __copy__(self): cls = self.__class__ result = cls.__new__(cls) result.__dict__.update(self.__dict__) return result def __deepcopy__(self, memo): return self.__copy__() reclass-1.7.0/reclass/storage/000077500000000000000000000000001373565003400162615ustar00rootroot00000000000000reclass-1.7.0/reclass/storage/__init__.py000066400000000000000000000031131373565003400203700ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.storage.common import NameMangler class NodeStorageBase(object): def __init__(self, name): self._name = name name = property(lambda self: self._name) def get_node(self, name, settings): msg = "Storage class '{0}' does not implement node entity retrieval." raise NotImplementedError(msg.format(self.name)) def get_class(self, name, environment, settings): msg = "Storage class '{0}' does not implement class entity retrieval." raise NotImplementedError(msg.format(self.name)) def enumerate_nodes(self): msg = "Storage class '{0}' does not implement node enumeration." raise NotImplementedError(msg.format(self.name)) def path_mangler(self): msg = "Storage class '{0}' does not implement path_mangler." raise NotImplementedError(msg.format(self.name)) class ExternalNodeStorageBase(NodeStorageBase): def __init__(self, name, compose_node_name): super(ExternalNodeStorageBase, self).__init__(name) self.class_name_mangler = NameMangler.classes if compose_node_name: self.node_name_mangler = NameMangler.composed_nodes else: self.node_name_mangler = NameMangler.nodes reclass-1.7.0/reclass/storage/common.py000066400000000000000000000024161373565003400201260ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os class NameMangler: @staticmethod def nodes(relpath, name): # nodes are identified just by their basename, so # no mangling required return relpath, name @staticmethod def composed_nodes(relpath, name): if relpath == '.' or relpath == '': # './' is converted to None return None, name parts = relpath.split(os.path.sep) if parts[0].startswith("_"): return relpath, name parts.append(name) return relpath, '.'.join(parts) @staticmethod def classes(relpath, name): if relpath == '.' or relpath == '': # './' is converted to None return None, name parts = relpath.split(os.path.sep) if name != 'init': # "init" is the directory index, so only append the basename # to the path parts for all other filenames. This has the # effect that data in file "foo/init.yml" will be registered # as data for class "foo", not "foo.init" parts.append(name) return relpath, '.'.join(parts) reclass-1.7.0/reclass/storage/loader.py000066400000000000000000000024041373565003400201010ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from importlib import import_module class StorageBackendLoader(object): def __init__(self, storage_name): self._name = str('reclass.storage.' + storage_name) try: self._module = import_module(self._name) except ImportError as e: raise NotImplementedError def load(self, klassname='ExternalNodeStorage'): klass = getattr(self._module, klassname, None) if klass is None: raise AttributeError('Storage backend class {0} does not export ' '"{1}"'.format(self._name, klassname)) return klass def path_mangler(self, name='path_mangler'): function = getattr(self._module, name, None) if function is None: raise AttributeError('Storage backend class {0} does not export ' '"{1}"'.format(self._name, name)) return function reclass-1.7.0/reclass/storage/memcache_proxy.py000066400000000000000000000043661373565003400216470ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.storage import NodeStorageBase STORAGE_NAME = 'memcache_proxy' class MemcacheProxy(NodeStorageBase): def __init__(self, real_storage, cache_classes=True, cache_nodes=True, cache_nodelist=True): name = '{0}({1})'.format(STORAGE_NAME, real_storage.name) super(MemcacheProxy, self).__init__(name) self._real_storage = real_storage self._cache_classes = cache_classes if cache_classes: self._classes_cache = {} self._cache_nodes = cache_nodes if cache_nodes: self._nodes_cache = {} self._cache_nodelist = cache_nodelist if cache_nodelist: self._nodelist_cache = None name = property(lambda self: self._real_storage.name) def get_node(self, name, settings): if not self._cache_nodes: return self._real_storage.get_node(name, settings) try: return self._nodes_cache[name] except KeyError as e: ret = self._real_storage.get_node(name, settings) self._nodes_cache[name] = ret return ret def get_class(self, name, environment, settings): if not self._cache_classes: return self._real_storage.get_class(name, environment, settings) try: return self._classes_cache[environment][name] except KeyError as e: if environment not in self._classes_cache: self._classes_cache[environment] = dict() ret = self._real_storage.get_class(name, environment, settings) self._classes_cache[environment][name] = ret return ret def enumerate_nodes(self): if not self._cache_nodelist: return self._real_storage.enumerate_nodes() elif self._nodelist_cache is None: self._nodelist_cache = self._real_storage.enumerate_nodes() return self._nodelist_cache reclass-1.7.0/reclass/storage/mixed/000077500000000000000000000000001373565003400173675ustar00rootroot00000000000000reclass-1.7.0/reclass/storage/mixed/__init__.py000066400000000000000000000045631373565003400215100ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import copy from six import iteritems import reclass.errors from reclass import get_storage from reclass.storage import ExternalNodeStorageBase def path_mangler(inventory_base_uri, nodes_uri, classes_uri): if nodes_uri == classes_uri: raise errors.DuplicateUriError(nodes_uri, classes_uri) return nodes_uri, classes_uri STORAGE_NAME = 'mixed' class ExternalNodeStorage(ExternalNodeStorageBase): MixedUri = collections.namedtuple('MixedURI', 'storage_type options') def __init__(self, nodes_uri, classes_uri, compose_node_name): super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) self._nodes_uri = self._uri(nodes_uri) self._nodes_storage = get_storage(self._nodes_uri.storage_type, self._nodes_uri.options, None, compose_node_name) self._classes_default_uri = self._uri(classes_uri) self._classes_default_storage = get_storage(self._classes_default_uri.storage_type, None, self._classes_default_uri.options, compose_node_name) self._classes_storage = dict() if 'env_overrides' in classes_uri: for override in classes_uri['env_overrides']: for (env, options) in iteritems(override): uri = copy.deepcopy(classes_uri) uri.update(options) uri = self._uri(uri) self._classes_storage[env] = get_storage(uri.storage_type, None, uri.options, compose_node_name) def _uri(self, uri): ret = copy.deepcopy(uri) ret['storage_type'] = uri['storage_type'] if 'env_overrides' in ret: del ret['env_overrides'] if uri['storage_type'] == 'yaml_fs': ret = ret['uri'] return self.MixedUri(uri['storage_type'], ret) def get_node(self, name, settings): return self._nodes_storage.get_node(name, settings) def get_class(self, name, environment, settings): storage = self._classes_storage.get(environment, self._classes_default_storage) return storage.get_class(name, environment, settings) def enumerate_nodes(self): return self._nodes_storage.enumerate_nodes() reclass-1.7.0/reclass/storage/tests/000077500000000000000000000000001373565003400174235ustar00rootroot00000000000000reclass-1.7.0/reclass/storage/tests/__init__.py000066400000000000000000000002511373565003400215320ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals reclass-1.7.0/reclass/storage/tests/test_loader.py000066400000000000000000000013251373565003400223030ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.storage.loader import StorageBackendLoader import unittest class TestLoader(unittest.TestCase): def test_load(self): loader = StorageBackendLoader('yaml_fs') from reclass.storage.yaml_fs import ExternalNodeStorage as YamlFs self.assertEqual(loader.load(), YamlFs) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/storage/tests/test_memcache_proxy.py000066400000000000000000000077461373565003400240550ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.settings import Settings from reclass.storage.memcache_proxy import MemcacheProxy from reclass.storage import NodeStorageBase import unittest try: import unittest.mock as mock except ImportError: import mock class TestMemcacheProxy(unittest.TestCase): def setUp(self): self._storage = mock.MagicMock(spec_set=NodeStorageBase) def test_no_nodes_caching(self): p = MemcacheProxy(self._storage, cache_nodes=False) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_node.return_value = RET self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) expected = [mock.call(NAME, SETTINGS), mock.call(NAME, SETTINGS), mock.call(NAME2, SETTINGS), mock.call(NAME2, SETTINGS)] self.assertListEqual(self._storage.get_node.call_args_list, expected) def test_nodes_caching(self): p = MemcacheProxy(self._storage, cache_nodes=True) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_node.return_value = RET self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) self.assertEqual(p.get_node(NAME2, SETTINGS), RET) expected = [mock.call(NAME, SETTINGS), mock.call(NAME2, SETTINGS)] # called once each self.assertListEqual(self._storage.get_node.call_args_list, expected) def test_no_classes_caching(self): p = MemcacheProxy(self._storage, cache_classes=False) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_class.return_value = RET self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) expected = [mock.call(NAME, None, SETTINGS), mock.call(NAME, None, SETTINGS), mock.call(NAME2, None, SETTINGS), mock.call(NAME2, None, SETTINGS)] self.assertListEqual(self._storage.get_class.call_args_list, expected) def test_classes_caching(self): p = MemcacheProxy(self._storage, cache_classes=True) NAME = 'foo'; NAME2 = 'bar'; RET = 'baz'; SETTINGS = Settings() self._storage.get_class.return_value = RET self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) self.assertEqual(p.get_class(NAME2, None, SETTINGS), RET) expected = [mock.call(NAME, None, SETTINGS), mock.call(NAME2, None, SETTINGS)] # called once each self.assertListEqual(self._storage.get_class.call_args_list, expected) def test_nodelist_no_caching(self): p = MemcacheProxy(self._storage, cache_nodelist=False) p.enumerate_nodes() p.enumerate_nodes() expected = [mock.call(), mock.call()] self.assertListEqual(self._storage.enumerate_nodes.call_args_list, expected) def test_nodelist_caching(self): p = MemcacheProxy(self._storage, cache_nodelist=True) p.enumerate_nodes() p.enumerate_nodes() expected = [mock.call()] # once only self.assertListEqual(self._storage.enumerate_nodes.call_args_list, expected) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/storage/tests/test_yamldata.py000066400000000000000000000023461373565003400226350ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.storage.yamldata import YamlData import unittest class TestYamlData(unittest.TestCase): def setUp(self): lines = [ 'classes:', ' - testdir.test1', ' - testdir.test2', ' - test3', '', 'environment: base', '', 'parameters:', ' _TEST_:', ' alpha: 1', ' beta: two' ] self.data = '\n'.join(lines) self.yamldict = { 'classes': [ 'testdir.test1', 'testdir.test2', 'test3' ], 'environment': 'base', 'parameters': { '_TEST_': { 'alpha': 1, 'beta': 'two' } } } def test_yaml_from_string(self): res = YamlData.from_string(self.data, 'testpath') self.assertEqual(res.uri, 'testpath') self.assertEqual(res.get_data(), self.yamldict) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/storage/yaml_fs/000077500000000000000000000000001373565003400177135ustar00rootroot00000000000000reclass-1.7.0/reclass/storage/yaml_fs/__init__.py000066400000000000000000000077361373565003400220410ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os, sys import yaml from reclass.output.yaml_outputter import ExplicitDumper from reclass.storage import ExternalNodeStorageBase from reclass.storage.yamldata import YamlData from .directory import Directory from reclass.datatypes import Entity import reclass.errors FILE_EXTENSION = ('.yml', '.yaml') STORAGE_NAME = 'yaml_fs' def vvv(msg): #print(msg, file=sys.stderr) pass def path_mangler(inventory_base_uri, nodes_uri, classes_uri): if inventory_base_uri is None: # if inventory_base is not given, default to current directory inventory_base_uri = os.getcwd() nodes_uri = nodes_uri or 'nodes' classes_uri = classes_uri or 'classes' def _path_mangler_inner(path): ret = os.path.join(inventory_base_uri, path) ret = os.path.expanduser(ret) return os.path.abspath(ret) n, c = map(_path_mangler_inner, (nodes_uri, classes_uri)) if n == c: raise errors.DuplicateUriError(n, c) common = os.path.commonprefix((n, c)) if common == n or common == c: raise errors.UriOverlapError(n, c) return n, c class ExternalNodeStorage(ExternalNodeStorageBase): def __init__(self, nodes_uri, classes_uri, compose_node_name): super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) if nodes_uri is not None: self._nodes_uri = nodes_uri self._nodes = self._enumerate_inventory(nodes_uri, self.node_name_mangler) if classes_uri is not None: self._classes_uri = classes_uri self._classes = self._enumerate_inventory(classes_uri, self.class_name_mangler) nodes_uri = property(lambda self: self._nodes_uri) classes_uri = property(lambda self: self._classes_uri) def _enumerate_inventory(self, basedir, name_mangler): ret = {} def register_fn(dirpath, filenames): filenames = [f for f in filenames if f.endswith(FILE_EXTENSION)] vvv('REGISTER {0} in path {1}'.format(filenames, dirpath)) for f in filenames: name = os.path.splitext(f)[0] relpath = os.path.relpath(dirpath, basedir) if callable(name_mangler): relpath, name = name_mangler(relpath, name) uri = os.path.join(dirpath, f) if name in ret: E = reclass.errors.DuplicateNodeNameError raise E(self.name, name, os.path.join(basedir, ret[name]), uri) if relpath: f = os.path.join(relpath, f) ret[name] = f d = Directory(basedir) d.walk(register_fn) return ret def get_node(self, name, settings): vvv('GET NODE {0}'.format(name)) try: relpath = self._nodes[name] path = os.path.join(self.nodes_uri, relpath) pathname = os.path.splitext(relpath)[0] except KeyError as e: raise reclass.errors.NodeNotFound(self.name, name, self.nodes_uri) entity = YamlData.from_file(path).get_entity(name, pathname, settings) return entity def get_class(self, name, environment, settings): vvv('GET CLASS {0}'.format(name)) try: path = os.path.join(self.classes_uri, self._classes[name]) pathname = os.path.splitext(self._classes[name])[0] except KeyError as e: raise reclass.errors.ClassNotFound(self.name, name, self.classes_uri) entity = YamlData.from_file(path).get_entity(name, pathname, settings) return entity def enumerate_nodes(self): return self._nodes.keys() reclass-1.7.0/reclass/storage/yaml_fs/directory.py000066400000000000000000000043221373565003400222720ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os from reclass.errors import NotFoundError SKIPDIRS = ('CVS', 'SCCS') FILE_EXTENSION = ('.yml', '.yaml') def vvv(msg): #print(msg, file=sys.stderr) pass class Directory(object): def __init__(self, path, fileclass=None): ''' Initialise a directory object ''' if not os.path.isdir(path): raise NotFoundError('No such directory: %s' % path) if not os.access(path, os.R_OK|os.X_OK): raise NotFoundError('Cannot change to or read directory: %s' % path) self._path = path self._fileclass = fileclass self._files = {} def _register_files(self, dirpath, filenames): for f in filter(lambda f: f.endswith(FILE_EXTENSION), filenames): vvv('REGISTER {0}'.format(f)) f = os.path.join(dirpath, f) ptr = None if not self._fileclass else self._fileclass(f) self._files[f] = ptr files = property(lambda self: self._files) def walk(self, register_fn=None): if not callable(register_fn): register_fn = self._register_files def _error(exc): raise(exc) for dirpath, dirnames, filenames in os.walk(self._path, topdown=True, onerror=_error, followlinks=True): vvv('RECURSE {0}, {1} files, {2} subdirectories'.format( dirpath.replace(os.getcwd(), '.'), len(filenames), len(dirnames))) for d in dirnames: if d.startswith('.') or d in SKIPDIRS: vvv(' SKIP subdirectory {0}'.format(d)) dirnames.remove(d) register_fn(dirpath, filenames) def __repr__(self): return '<{0} {1}>'.format(self.__class__.__name__, self._path) reclass-1.7.0/reclass/storage/yaml_git/000077500000000000000000000000001373565003400200665ustar00rootroot00000000000000reclass-1.7.0/reclass/storage/yaml_git/__init__.py000066400000000000000000000310031373565003400221740ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import distutils.version import errno import fcntl import os import time # Squelch warning on centos7 due to upgrading cffi # see https://github.com/saltstack/salt/pull/39871 import warnings with warnings.catch_warnings(): warnings.simplefilter('ignore') try: # NOTE: in some distros pygit2 could require special effort to acquire. # It is not a problem per se, but it breaks tests for no real reason. # This try block is for keeping tests sane. import pygit2 except ImportError: pygit2 = None from six import iteritems import reclass.errors from reclass.storage import ExternalNodeStorageBase from reclass.storage.yamldata import YamlData FILE_EXTENSION = ('.yml', '.yaml') STORAGE_NAME = 'yaml_git' def path_mangler(inventory_base_uri, nodes_uri, classes_uri): if nodes_uri == classes_uri: raise errors.DuplicateUriError(nodes_uri, classes_uri) return nodes_uri, classes_uri GitMD = collections.namedtuple('GitMD', ['name', 'path', 'id'], rename=False) class GitURI(object): def __init__(self, dictionary): self.repo = None self.branch = None self.root = None self.cache_dir = None self.lock_dir = None self.pubkey = None self.privkey = None self.password = None self.update(dictionary) def update(self, dictionary): if 'repo' in dictionary: self.repo = dictionary['repo'] if 'branch' in dictionary: self.branch = dictionary['branch'] if 'cache_dir' in dictionary: self.cache_dir = dictionary['cache_dir'] if 'lock_dir' in dictionary: self.lock_dir = dictionary['lock_dir'] if 'pubkey' in dictionary: self.pubkey = dictionary['pubkey'] if 'privkey' in dictionary: self.privkey = dictionary['privkey'] if 'password' in dictionary: self.password = dictionary['password'] if 'root' in dictionary: if dictionary['root'] is None: self.root = None else: self.root = dictionary['root'].replace('/', '.') def __repr__(self): return '<{0}: {1} {2} {3}>'.format(self.__class__.__name__, self.repo, self.branch, self.root) class LockFile(): def __init__(self, file): self._file = file def __enter__(self): self._fd = open(self._file, 'w+') start = time.time() while True: if (time.time() - start) > 120: raise IOError('Timeout waiting to lock file: {0}'.format(self._file)) try: fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB) break except IOError as e: # raise on unrelated IOErrors if e.errno != errno.EAGAIN: raise else: time.sleep(0.1) def __exit__(self, type, value, traceback): self._fd.close() class GitRepo(object): def __init__(self, uri, node_name_mangler, class_name_mangler): if pygit2 is None: raise errors.MissingModuleError('pygit2') self.transport, _, self.url = uri.repo.partition('://') self.name = self.url.replace('/', '_') self.credentials = None self.remotecallbacks = None if uri.cache_dir is None: self.cache_dir = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/git', self.name) else: self.cache_dir = '{0}/{1}'.format(uri.cache_dir, self.name) if uri.lock_dir is None: self.lock_file = '{0}/{1}/{2}'.format(os.path.expanduser("~"), '.reclass/cache/lock', self.name) else: self.lock_file = '{0}/{1}'.format(uri.lock_dir, self.name) lock_dir = os.path.dirname(self.lock_file) if not os.path.exists(lock_dir): os.makedirs(lock_dir) self._node_name_mangler = node_name_mangler self._class_name_mangler = class_name_mangler with LockFile(self.lock_file): self._init_repo(uri) self._fetch() self.branches = self.repo.listall_branches() self.files = self.files_in_repo() def _init_repo(self, uri): if os.path.exists(self.cache_dir): self.repo = pygit2.Repository(self.cache_dir) else: os.makedirs(self.cache_dir) self.repo = pygit2.init_repository(self.cache_dir, bare=True) self.repo.create_remote('origin', self.url) if 'ssh' in self.transport: if '@' in self.url: user, _, _ = self.url.partition('@') else: user = 'gitlab' if uri.pubkey is not None: creds = pygit2.Keypair(user, uri.pubkey, uri.privkey, uri.password) else: creds = pygit2.KeypairFromAgent(user) pygit2_version = pygit2.__version__ if distutils.version.LooseVersion(pygit2_version) >= distutils.version.LooseVersion('0.23.2'): self.remotecallbacks = pygit2.RemoteCallbacks(credentials=creds) self.credentials = None else: self.remotecallbacks = None self.credentials = creds def _fetch(self): origin = self.repo.remotes[0] fetch_kwargs = {} if self.remotecallbacks is not None: fetch_kwargs['callbacks'] = self.remotecallbacks if self.credentials is not None: origin.credentials = self.credentials fetch_results = origin.fetch(**fetch_kwargs) remote_branches = self.repo.listall_branches(pygit2.GIT_BRANCH_REMOTE) local_branches = self.repo.listall_branches() for remote_branch_name in remote_branches: _, _, local_branch_name = remote_branch_name.partition('/') remote_branch = self.repo.lookup_branch(remote_branch_name, pygit2.GIT_BRANCH_REMOTE) if local_branch_name not in local_branches: local_branch = self.repo.create_branch(local_branch_name, self.repo[remote_branch.target.hex]) local_branch.upstream = remote_branch else: local_branch = self.repo.lookup_branch(local_branch_name) if local_branch.target != remote_branch.target: local_branch.set_target(remote_branch.target) local_branches = self.repo.listall_branches() for local_branch_name in local_branches: remote_branch_name = '{0}/{1}'.format(origin.name, local_branch_name) if remote_branch_name not in remote_branches: local_branch = self.repo.lookup_branch(local_branch_name) local.branch.delete() def get(self, id): return self.repo.get(id) def files_in_tree(self, tree, path): files = [] for entry in tree: if entry.filemode == pygit2.GIT_FILEMODE_TREE: subtree = self.repo.get(entry.id) if path == '': subpath = entry.name else: subpath = '/'.join([path, entry.name]) files.extend(self.files_in_tree(subtree, subpath)) else: if path == '': relpath = entry.name else: relpath = '/'.join([path, entry.name]) files.append(GitMD(entry.name, relpath, entry.id)) return files def files_in_branch(self, branch): tree = self.repo.revparse_single(branch).tree return self.files_in_tree(tree, '') def files_in_repo(self): ret = {} for bname in self.branches: branch = {} files = self.files_in_branch(bname) for file in files: if file.name.endswith(FILE_EXTENSION): name = os.path.splitext(file.name)[0] relpath = os.path.dirname(file.path) if callable(self._class_name_mangler): relpath, name = self._class_name_mangler(relpath, name) if name in ret: raise reclass.errors.DuplicateNodeNameError(self.url + ' - ' + bname, name, ret[name], file) else: branch[name] = file ret[bname] = branch return ret def nodes(self, branch, subdir): ret = {} for (name, file) in iteritems(self.files[branch]): if subdir is None or name.startswith(subdir): node_name = os.path.splitext(file.name)[0] relpath = os.path.dirname(file.path) if callable(self._node_name_mangler): relpath, node_name = self._node_name_mangler(relpath, node_name) if node_name in ret: raise reclass.errors.DuplicateNodeNameError(self.url, name, ret[node_name].path, file.path) else: ret[node_name] = file return ret class ExternalNodeStorage(ExternalNodeStorageBase): def __init__(self, nodes_uri, classes_uri, compose_node_name): super(ExternalNodeStorage, self).__init__(STORAGE_NAME, compose_node_name) self._repos = dict() if nodes_uri is not None: self._nodes_uri = GitURI({ 'branch': 'master' }) self._nodes_uri.update(nodes_uri) self._load_repo(self._nodes_uri) self._nodes = self._repos[self._nodes_uri.repo].nodes(self._nodes_uri.branch, self._nodes_uri.root) if classes_uri is not None: self._classes_default_uri = GitURI({ 'branch': '__env__' }) self._classes_default_uri.update(classes_uri) self._load_repo(self._classes_default_uri) self._classes_uri = [] if 'env_overrides' in classes_uri: for override in classes_uri['env_overrides']: for (env, options) in iteritems(override): uri = GitURI(self._classes_default_uri) uri.update({ 'branch': env }) uri.update(options) self._classes_uri.append((env, uri)) self._load_repo(uri) self._classes_uri.append(('*', self._classes_default_uri)) nodes_uri = property(lambda self: self._nodes_uri) classes_uri = property(lambda self: self._classes_uri) def get_node(self, name, settings): file = self._nodes[name] blob = self._repos[self._nodes_uri.repo].get(file.id) uri = 'git_fs://{0} {1} {2}'.format(self._nodes_uri.repo, self._nodes_uri.branch, file.path) pathname = os.path.splitext(file.path)[0] entity = YamlData.from_string(blob.data, uri).get_entity(name, pathname, settings) return entity def get_class(self, name, environment, settings): uri = self._env_to_uri(environment) if uri.root is not None: name = '{0}.{1}'.format(uri.root, name) if uri.repo not in self._repos: raise reclass.errors.NotFoundError("Repo " + uri.repo + " unknown or missing") if uri.branch not in self._repos[uri.repo].files: raise reclass.errors.NotFoundError("Branch " + uri.branch + " missing from " + uri.repo) if name not in self._repos[uri.repo].files[uri.branch]: raise reclass.errors.NotFoundError("File " + name + " missing from " + uri.repo + " branch " + uri.branch) file = self._repos[uri.repo].files[uri.branch][name] blob = self._repos[uri.repo].get(file.id) uri = 'git_fs://{0} {1} {2}'.format(uri.repo, uri.branch, file.path) pathname = os.path.splitext(file.path)[0] entity = YamlData.from_string(blob.data, uri).get_entity(name, pathname, settings) return entity def enumerate_nodes(self): return self._nodes.keys() def _load_repo(self, uri): if uri.repo not in self._repos: self._repos[uri.repo] = GitRepo(uri, self.node_name_mangler, self.class_name_mangler) def _env_to_uri(self, environment): ret = None if environment is None: ret = self._classes_default_uri else: for env, uri in self._classes_uri: if env == environment: ret = uri break if ret is None: ret = self._classes_default_uri if ret.branch == '__env__': ret.branch = environment if ret.branch == None: ret.branch = 'master' return ret reclass-1.7.0/reclass/storage/yamldata.py000066400000000000000000000072111373565003400204300ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass import datatypes import yaml import os from reclass.errors import NotFoundError _SafeLoader = yaml.CSafeLoader if yaml.__with_libyaml__ else yaml.SafeLoader class YamlData(object): @classmethod def from_file(cls, path): ''' Initialise yaml data from a local file ''' abs_path = os.path.abspath(path) if not os.path.isfile(abs_path): raise NotFoundError('No such file: %s' % abs_path) if not os.access(abs_path, os.R_OK): raise NotFoundError('Cannot open: %s' % abs_path) y = cls('yaml_fs://{0}'.format(abs_path)) with open(abs_path) as fp: data = yaml.load(fp, Loader=_SafeLoader) if data is not None: y._data = data return y @classmethod def from_string(cls, string, uri): ''' Initialise yaml data from a string ''' y = cls(uri) data = yaml.load(string, Loader=_SafeLoader) if data is not None: y._data = data return y def __init__(self, uri): self._uri = uri self._data = dict() uri = property(lambda self: self._uri) def get_data(self): return self._data def set_absolute_names(self, name, names): new_names = [] for n in names: if n[0] == '.': dots = self.count_dots(n) levels_up = (dots * (-1)) parent = '.'.join(name.split('.')[0:levels_up]) if parent == '': n = n[dots:] else: n = parent + n[dots - 1:] new_names.append(n) return new_names def yield_dots(self, value): try: idx = value.index('.') except ValueError: return if idx == 0: yield '.' for dot in self.yield_dots(value[1:]): yield dot def count_dots(self, value): return len(list(self.yield_dots(value))) def get_entity(self, name, pathname, settings): classes = self._data.get('classes') if classes is None: classes = [] classes = self.set_absolute_names(name, classes) classes = datatypes.Classes(classes) applications = self._data.get('applications') if applications is None: applications = [] applications = datatypes.Applications(applications) parameters = self._data.get('parameters') if parameters is None: parameters = {} parameters = datatypes.Parameters(parameters, settings, self._uri) exports = self._data.get('exports') if exports is None: exports = {} exports = datatypes.Exports(exports, settings, self._uri) env = self._data.get('environment', None) return datatypes.Entity(settings, classes=classes, applications=applications, parameters=parameters, exports=exports, name=name, pathname=pathname, environment=env, uri=self.uri) def __str__(self): return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri, self._data) def __repr__(self): return '<{0} {1}, {2}>'.format(self.__class__.__name__, self._uri, self._data.keys()) reclass-1.7.0/reclass/tests/000077500000000000000000000000001373565003400157575ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/__init__.py000066400000000000000000000002511373565003400200660ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals reclass-1.7.0/reclass/tests/data/000077500000000000000000000000001373565003400166705ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/01/000077500000000000000000000000001373565003400171105ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/01/classes/000077500000000000000000000000001373565003400205455ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/01/classes/standard.yml000066400000000000000000000000601373565003400230640ustar00rootroot00000000000000parameters: int: 1 string: '1' bool: True reclass-1.7.0/reclass/tests/data/01/nodes/000077500000000000000000000000001373565003400202205ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/01/nodes/class_notfound.yml000066400000000000000000000000771373565003400237700ustar00rootroot00000000000000classes: - missing parameters: node_test: class not found reclass-1.7.0/reclass/tests/data/01/nodes/data_types.yml000066400000000000000000000000261373565003400230760ustar00rootroot00000000000000classes: - standard reclass-1.7.0/reclass/tests/data/02/000077500000000000000000000000001373565003400171115ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/02/classes/000077500000000000000000000000001373565003400205465ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/02/classes/four.yml000066400000000000000000000000341373565003400222410ustar00rootroot00000000000000parameters: four_alpha: 3 reclass-1.7.0/reclass/tests/data/02/classes/init.yml000066400000000000000000000000331373565003400222300ustar00rootroot00000000000000parameters: alpha_init: 5reclass-1.7.0/reclass/tests/data/02/classes/one/000077500000000000000000000000001373565003400213275ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/02/classes/one/alpha.yml000066400000000000000000000002671373565003400231440ustar00rootroot00000000000000classes: - .beta - two.beta - ..four - ..two.gamma - ..init parameters: test1: ${one_beta} test2: ${two_beta} test3: ${four_alpha} test4: ${two_gamma} test5: ${alpha_init} reclass-1.7.0/reclass/tests/data/02/classes/one/beta.yml000066400000000000000000000000321373565003400227600ustar00rootroot00000000000000parameters: one_beta: 1 reclass-1.7.0/reclass/tests/data/02/classes/three.yml000066400000000000000000000000261373565003400223760ustar00rootroot00000000000000classes: - .one.alpha reclass-1.7.0/reclass/tests/data/02/classes/two/000077500000000000000000000000001373565003400213575ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/02/classes/two/beta.yml000066400000000000000000000000321373565003400230100ustar00rootroot00000000000000parameters: two_beta: 2 reclass-1.7.0/reclass/tests/data/02/classes/two/gamma.yml000066400000000000000000000000331373565003400231600ustar00rootroot00000000000000parameters: two_gamma: 4 reclass-1.7.0/reclass/tests/data/02/nodes/000077500000000000000000000000001373565003400202215ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/02/nodes/relative.yml000066400000000000000000000000271373565003400225560ustar00rootroot00000000000000classes: - one.alpha reclass-1.7.0/reclass/tests/data/02/nodes/top_relative.yml000066400000000000000000000000231373565003400234340ustar00rootroot00000000000000classes: - three reclass-1.7.0/reclass/tests/data/03/000077500000000000000000000000001373565003400171125ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/03/classes/000077500000000000000000000000001373565003400205475ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/03/classes/a.yml000066400000000000000000000000711373565003400215100ustar00rootroot00000000000000parameters: a: 1 alpha: - ${a} beta: a: ${a} reclass-1.7.0/reclass/tests/data/03/classes/b.yml000066400000000000000000000000711373565003400215110ustar00rootroot00000000000000parameters: b: 2 alpha: - ${b} beta: b: ${b} reclass-1.7.0/reclass/tests/data/03/classes/c.yml000066400000000000000000000000711373565003400215120ustar00rootroot00000000000000parameters: c: 3 alpha: - ${c} beta: c: ${c} reclass-1.7.0/reclass/tests/data/03/classes/d.yml000066400000000000000000000000711373565003400215130ustar00rootroot00000000000000parameters: d: 4 alpha: - ${d} beta: d: ${d} reclass-1.7.0/reclass/tests/data/03/nodes/000077500000000000000000000000001373565003400202225ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/03/nodes/alpha/000077500000000000000000000000001373565003400213075ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/03/nodes/alpha/one.yml000066400000000000000000000000211373565003400226040ustar00rootroot00000000000000classes: - a - b reclass-1.7.0/reclass/tests/data/03/nodes/alpha/two.yml000066400000000000000000000000211373565003400226340ustar00rootroot00000000000000classes: - a - c reclass-1.7.0/reclass/tests/data/03/nodes/beta/000077500000000000000000000000001373565003400211355ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/03/nodes/beta/one.yml000066400000000000000000000000211373565003400224320ustar00rootroot00000000000000classes: - b - c reclass-1.7.0/reclass/tests/data/03/nodes/beta/two.yml000066400000000000000000000000211373565003400224620ustar00rootroot00000000000000classes: - c - d reclass-1.7.0/reclass/tests/data/04/000077500000000000000000000000001373565003400171135ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/04/classes/000077500000000000000000000000001373565003400205505ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/04/classes/one.yml000066400000000000000000000000271373565003400220530ustar00rootroot00000000000000parameters: test1: 1 reclass-1.7.0/reclass/tests/data/04/classes/three.yml000066400000000000000000000000271373565003400224010ustar00rootroot00000000000000parameters: test3: 3 reclass-1.7.0/reclass/tests/data/04/classes/two.yml000066400000000000000000000000271373565003400221030ustar00rootroot00000000000000parameters: test2: 2 reclass-1.7.0/reclass/tests/data/04/nodes/000077500000000000000000000000001373565003400202235ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/04/nodes/alpha/000077500000000000000000000000001373565003400213105ustar00rootroot00000000000000reclass-1.7.0/reclass/tests/data/04/nodes/alpha/node1.yml000066400000000000000000000000211373565003400230320ustar00rootroot00000000000000classes: - one reclass-1.7.0/reclass/tests/test_core.py000066400000000000000000000131441373565003400203230ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os from reclass import get_storage, get_path_mangler from reclass.core import Core from reclass.settings import Settings from reclass.errors import ClassNotFound import unittest try: import unittest.mock as mock except ImportError: import mock class TestCore(unittest.TestCase): def _core(self, dataset, opts={}, class_mappings=[]): inventory_uri = os.path.dirname(os.path.abspath(__file__)) + '/data/' + dataset path_mangler = get_path_mangler('yaml_fs') nodes_uri, classes_uri = path_mangler(inventory_uri, 'nodes', 'classes') settings = Settings(opts) storage = get_storage('yaml_fs', nodes_uri, classes_uri, settings.compose_node_name) return Core(storage, class_mappings, settings) def test_type_conversion(self): reclass = self._core('01') node = reclass.nodeinfo('data_types') params = { 'int': 1, 'bool': True, 'string': '1', '_reclass_': { 'environment': 'base', 'name': {'full': 'data_types', 'short': 'data_types' } } } self.assertEqual(node['parameters'], params) def test_raise_class_notfound(self): reclass = self._core('01') with self.assertRaises(ClassNotFound): node = reclass.nodeinfo('class_notfound') def test_ignore_class_notfound(self): reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False }) node = reclass.nodeinfo('class_notfound') params = { 'node_test': 'class not found', '_reclass_': { 'environment': 'base', 'name': {'full': 'class_notfound', 'short': 'class_notfound' } } } self.assertEqual(node['parameters'], params) def test_raise_class_notfound_with_regexp(self): reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False, 'ignore_class_notfound_regexp': 'notmatched.*' }) with self.assertRaises(ClassNotFound): node = reclass.nodeinfo('class_notfound') def test_ignore_class_notfound_with_regexp(self): reclass = self._core('01', opts={ 'ignore_class_notfound': True, 'ignore_class_notfound_warning': False, 'ignore_class_notfound_regexp': 'miss.*' }) node = reclass.nodeinfo('class_notfound') params = { 'node_test': 'class not found', '_reclass_': { 'environment': 'base', 'name': {'full': 'class_notfound', 'short': 'class_notfound' } } } self.assertEqual(node['parameters'], params) def test_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('relative') params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'relative', 'short': 'relative' } } } self.assertEqual(node['parameters'], params) def test_top_relative_class_names(self): reclass = self._core('02') node = reclass.nodeinfo('top_relative') params = { 'test1': 1, 'test2': 2, 'test3': 3, 'test4': 4, 'test5': 5, 'one_beta': 1, 'two_beta': 2, 'four_alpha': 3, 'two_gamma': 4, 'alpha_init': 5, '_reclass_': { 'environment': 'base', 'name': { 'full': 'top_relative', 'short': 'top_relative' } } } self.assertEqual(node['parameters'], params) def test_compose_node_names(self): reclass = self._core('03', opts={'compose_node_name': True}) alpha_one_node = reclass.nodeinfo('alpha.one') alpha_one_res = {'a': 1, 'alpha': [1, 2], 'beta': {'a': 1, 'b': 2}, 'b': 2, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.one', 'short': 'alpha'}}} alpha_two_node = reclass.nodeinfo('alpha.two') alpha_two_res = {'a': 1, 'alpha': [1, 3], 'beta': {'a': 1, 'c': 3}, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'alpha.two', 'short': 'alpha'}}} beta_one_node = reclass.nodeinfo('beta.one') beta_one_res = {'alpha': [2, 3], 'beta': {'c': 3, 'b': 2}, 'b': 2, 'c': 3, '_reclass_': {'environment': 'base', 'name': {'full': 'beta.one', 'short': 'beta'}}} beta_two_node = reclass.nodeinfo('beta.two') beta_two_res = {'alpha': [3, 4], 'c': 3, 'beta': {'c': 3, 'd': 4}, 'd': 4, '_reclass_': {'environment': u'base', 'name': {'full': u'beta.two', 'short': u'beta'}}} self.assertEqual(alpha_one_node['parameters'], alpha_one_res) self.assertEqual(alpha_two_node['parameters'], alpha_two_res) self.assertEqual(beta_one_node['parameters'], beta_one_res) self.assertEqual(beta_two_node['parameters'], beta_two_res) def test_class_mappings_match_path_false(self): reclass = self._core('04', opts={'class_mappings_match_path': False}, class_mappings=['node* two', 'alpha/node* three']) node = reclass.nodeinfo('node1') params = { 'test1': 1, 'test2': 2, '_reclass_': {'environment': u'base', 'name': {'full': 'node1', 'short': 'node1'}}} self.assertEqual(node['parameters'], params) def test_class_mappings_match_path_true(self): reclass = self._core('04', opts={'class_mappings_match_path': True}, class_mappings=['node* two', 'alpha/node* three']) node = reclass.nodeinfo('node1') params = { 'test1': 1, 'test3': 3, '_reclass_': {'environment': u'base', 'name': {'full': 'node1', 'short': 'node1'}}} self.assertEqual(node['parameters'], params) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/utils/000077500000000000000000000000001373565003400157555ustar00rootroot00000000000000reclass-1.7.0/reclass/utils/__init__.py000066400000000000000000000002511373565003400200640ustar00rootroot00000000000000# -*- coding: utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals reclass-1.7.0/reclass/utils/dictpath.py000066400000000000000000000122511373565003400201300ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import six import re class DictPath(object): ''' Represents a path into a nested dictionary. Given a dictionary like d['foo']['bar'] = 42 it can be desirable to obtain a reference to the value stored in the sub-levels, allowing that value to be accessed and changed. Unfortunately, Python provides no easy way to do this, since ref = d['foo']['bar'] does become a reference to the integer 42, but that reference is overwritten when one assigns to it. Hence, DictPath represents the path into a nested dictionary, and can be "applied to" a dictionary to obtain and set values, using a list of keys, or a string representation using a delimiter (which can be escaped): p = DictPath(':', 'foo:bar') p.get_value(d) p.set_value(d, 43) This is a bit backwards, but the right way around would require support by the dict() type. The primary purpose of this class within reclass is to cater for parameter interpolation, so that a reference such as ${foo:bar} in a parameter value may be resolved in the context of the Parameter collections (a nested dict). If the value is a list, then the "key" is assumed to be and interpreted as an integer index: d = {'list': [{'one':1},{'two':2}]} p = DictPath(':', 'list:1:two') p.get_value(d) → 2 This heuristic is okay within reclass, because dictionary keys (parameter names) will always be strings. Therefore it is okay to interpret each component of the path as a string, unless one finds a list at the current level down the nested dictionary. ''' def __init__(self, delim, contents=None): self._delim = delim if contents is None: self._parts = [] elif isinstance(contents, list): self._parts = contents elif isinstance(contents, six.string_types): self._parts = self._split_string(contents) elif isinstance(contents, tuple): self._parts = list(contents) else: raise TypeError('DictPath() takes string or list, '\ 'not %s' % type(contents)) def __repr__(self): return "DictPath(%r, %r)" % (self._delim, str(self)) def __str__(self): return self._delim.join(str(i) for i in self._parts) def __eq__(self, other): if not (isinstance(other, six.string_types) or isinstance(other, self.__class__)): return False if isinstance(other, six.string_types): other = DictPath(self._delim, other) return self._parts == other._parts and self._delim == other._delim def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(str(self)) @property def path(self): return self._parts def _get_key(self): if len(self._parts) == 0: return None return self._parts[-1] def _get_innermost_container(self, base): container = base for i in self.path[:-1]: if isinstance(container, (list, tuple)): container = container[int(i)] else: container = container[i] return container def _split_string(self, string): return re.split(r'(? # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.utils.dictpath import DictPath import unittest class TestDictPath(unittest.TestCase): def test_constructor0(self): p = DictPath(':') self.assertListEqual(p._parts, []) def test_constructor_list(self): l = ['a', 'b', 'c'] p = DictPath(':', l) self.assertListEqual(p._parts, l) def test_constructor_str(self): delim = ':' s = 'a{0}b{0}c'.format(delim) l = ['a', 'b', 'c'] p = DictPath(delim, s) self.assertListEqual(p._parts, l) def test_constructor_str_escaped(self): delim = ':' s = 'a{0}b\{0}b{0}c'.format(delim) l = ['a', 'b\\{0}b'.format(delim), 'c'] p = DictPath(delim, s) self.assertListEqual(p._parts, l) def test_constructor_invalid_type(self): with self.assertRaises(TypeError): p = DictPath(':', 5) def test_equality(self): delim = ':' s = 'a{0}b{0}c'.format(delim) l = ['a', 'b', 'c'] p1 = DictPath(delim, s) p2 = DictPath(delim, l) self.assertEqual(p1, p2) def test_inequality_content(self): delim = ':' s = 'a{0}b{0}c'.format(delim) l = ['d', 'e', 'f'] p1 = DictPath(delim, s) p2 = DictPath(delim, l) self.assertNotEqual(p1, p2) def test_inequality_delimiter(self): l = ['a', 'b', 'c'] p1 = DictPath(':', l) p2 = DictPath('%', l) self.assertNotEqual(p1, p2) def test_repr(self): delim = '%' s = 'a:b\:b:c' p = DictPath(delim, s) self.assertEqual('%r' % p, "DictPath(%r, %r)" % (delim, str(s))) def test_str(self): s = 'a:b\:b:c' p = DictPath(':', s) self.assertEqual(str(p), s) def test_path_accessor(self): l = ['a', 'b', 'c'] p = DictPath(':', l) self.assertListEqual(p.path, l) def test_new_subpath(self): l = ['a', 'b', 'c'] p = DictPath(':', l[:-1]) p = p.new_subpath(l[-1]) self.assertListEqual(p.path, l) def test_get_value(self): v = 42 l = ['a', 'b', 'c'] d = {'a':{'b':{'c':v}}} p = DictPath(':', l) self.assertEqual(p.get_value(d), v) def test_get_value_escaped(self): v = 42 l = ['a', 'b:b', 'c'] d = {'a':{'b:b':{'c':v}}} p = DictPath(':', l) self.assertEqual(p.get_value(d), v) def test_get_value_listindex_list(self): v = 42 l = ['a', 1, 'c'] d = {'a':[None, {'c':v}, None]} p = DictPath(':', l) self.assertEqual(p.get_value(d), v) def test_get_value_listindex_str(self): v = 42 s = 'a:1:c' d = {'a':[None, {'c':v}, None]} p = DictPath(':', s) self.assertEqual(p.get_value(d), v) def test_set_value(self): v = 42 l = ['a', 'b', 'c'] d = {'a':{'b':{'c':v}}} p = DictPath(':', l) p.set_value(d, v+1) self.assertEqual(d['a']['b']['c'], v+1) def test_set_value_escaped(self): v = 42 l = ['a', 'b:b', 'c'] d = {'a':{'b:b':{'c':v}}} p = DictPath(':', l) p.set_value(d, v+1) self.assertEqual(d['a']['b:b']['c'], v+1) def test_set_value_escaped_listindex_list(self): v = 42 l = ['a', 1, 'c'] d = {'a':[None, {'c':v}, None]} p = DictPath(':', l) p.set_value(d, v+1) self.assertEqual(d['a'][1]['c'], v+1) def test_set_value_escaped_listindex_str(self): v = 42 s = 'a:1:c' d = {'a':[None, {'c':v}, None]} p = DictPath(':', s) p.set_value(d, v+1) self.assertEqual(d['a'][1]['c'], v+1) def test_get_nonexistent_value(self): l = ['a', 'd'] p = DictPath(':', l) with self.assertRaises(KeyError): p.get_value(dict()) def test_set_nonexistent_value(self): l = ['a', 'd'] p = DictPath(':', l) with self.assertRaises(KeyError): p.set_value(dict(), 42) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/000077500000000000000000000000001373565003400161145ustar00rootroot00000000000000reclass-1.7.0/reclass/values/__init__.py000066400000000000000000000004421373565003400202250ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections NodeInventory = collections.namedtuple('NodeInventory', ['items', 'env_matches'], rename=False) reclass-1.7.0/reclass/values/compitem.py000066400000000000000000000015201373565003400203010ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from reclass.settings import Settings from reclass.values import item class CompItem(item.ItemWithReferences): type = item.ItemTypes.COMPOSITE def merge_over(self, other): if (other.type == item.ItemTypes.SCALAR or other.type == item.ItemTypes.COMPOSITE): return self raise RuntimeError('Failed to merge %s over %s' % (self, other)) def render(self, context, inventory): # Preserve type if only one item if len(self.contents) == 1: return self.contents[0].render(context, inventory) # Multiple items strings = [str(i.render(context, inventory)) for i in self.contents] return "".join(strings) def __str__(self): return ''.join([str(i) for i in self.contents]) reclass-1.7.0/reclass/values/dictitem.py000066400000000000000000000002521373565003400202670ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from reclass.values import item class DictItem(item.ContainerItem): type = item.ItemTypes.DICTIONARY reclass-1.7.0/reclass/values/invitem.py000066400000000000000000000223431373565003400201450ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy import itertools as it import operator import pyparsing as pp from six import iteritems from six import string_types from reclass.values import item, parser_funcs from reclass.settings import Settings from reclass.utils.dictpath import DictPath from reclass.errors import ExpressionError, ParseError, ResolveError # TODO: generalize expression handling. class BaseTestExpression(object): known_operators = {} def __init__(self, delimiter): self._delimiter = delimiter self.refs = [] self.inv_refs = [] class EqualityTest(BaseTestExpression): known_operators = { parser_funcs.EQUAL: operator.eq, parser_funcs.NOT_EQUAL: operator.ne} def __init__(self, expression, delimiter): # expression is a list of at least three tuples, of which first element # is a string tag, second is subelement value; other tuples apparently # are not used. # expression[0][1] effectively contains export path and apparently must # be treated as such, also left hand operand in comparison # expression[1][1] appa holds commparison operator == or != # expression[2][1] is the righhand operand super(EqualityTest, self).__init__(delimiter) # TODO: this double sommersault must be cleaned _ = self._get_vars(expression[2][1], *self._get_vars(expression[0][1])) self._export_path, self._parameter_path, self._parameter_value = _ try: self._export_path.drop_first() except AttributeError: raise ExpressionError('No export') try: self._compare = self.known_operators[expression[1][1]] except KeyError as e: msg = 'Unknown test {0}'.format(expression[1][1]) raise ExpressionError(msg, tbFlag=False) self.inv_refs = [self._export_path] if self._parameter_path is not None: self._parameter_path.drop_first() self.refs = [str(self._parameter_path)] def value(self, context, items): if self._parameter_path is not None: self._parameter_value = self._resolve(self._parameter_path, context) if self._parameter_value is None: raise ExpressionError('Failed to render %s' % str(self), tbFlag=False) if self._export_path.exists_in(items): export_value = self._resolve(self._export_path, items) return self._compare(export_value, self._parameter_value) return False def _resolve(self, path, dictionary): try: return path.get_value(dictionary) except KeyError as e: raise ResolveError(str(path)) def _get_vars(self, var, export=None, parameter=None, value=None): if isinstance(var, string_types): path = DictPath(self._delimiter, var) if path.path[0].lower() == 'exports': export = path elif path.path[0].lower() == 'self': parameter = path elif path.path[0].lower() == 'true': value = True elif path.path[0].lower() == 'false': value = False else: value = var else: value = var return export, parameter, value class LogicTest(BaseTestExpression): known_operators = { parser_funcs.AND: operator.and_, parser_funcs.OR: operator.or_} def __init__(self, expr, delimiter): super(LogicTest, self).__init__(delimiter) subtests = list(it.compress(expr, it.cycle([1, 1, 1, 0]))) self._els = [EqualityTest(subtests[j:j+3], self._delimiter) for j in range(0, len(subtests), 3)] for x in self._els: self.refs.extend(x.refs) self.inv_refs.extend(x.inv_refs) try: self._ops = [self.known_operators[x[1]] for x in expr[3::4]] except KeyError as e: msg = 'Unknown operator {0} {1}'.format(e.messsage, self._els) raise ExpressionError(msg, tbFlag=False) def value(self, context, items): if len(self._els) == 0: # NOTE: possible logic error return True result = self._els[0].value(context, items) for op, next_el in zip(self._ops, self._els[1:]): result = op(result, next_el.value(context, items)) return result class InvItem(item.Item): type = item.ItemTypes.INV_QUERY def __init__(self, newitem, settings): super(InvItem, self).__init__(newitem.render(None, None), settings) self.needs_all_envs = False self.has_inv_query = True self.ignore_failed_render = ( self._settings.inventory_ignore_failed_render) self._parse_expression(self.contents) def _parse_expression(self, expr): parser = parser_funcs.get_expression_parser() try: tokens = parser.parseString(expr).asList() except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) if len(tokens) == 2: # options are set passed_opts = [x[1] for x in tokens.pop(0)] self.ignore_failed_render = parser_funcs.IGNORE_ERRORS in passed_opts self.needs_all_envs = parser_funcs.ALL_ENVS in passed_opts elif len(tokens) > 2: raise ExpressionError('Failed to parse %s' % str(tokens), tbFlag=False) self._expr_type = tokens[0][0] self._expr = list(tokens[0][1]) if self._expr_type == parser_funcs.VALUE: self._value_path = DictPath(self._settings.delimiter, self._expr[0][1]).drop_first() self._question = LogicTest([], self._settings.delimiter) self.refs = [] self.inv_refs = [self._value_path] elif self._expr_type == parser_funcs.TEST: self._value_path = DictPath(self._settings.delimiter, self._expr[0][1]).drop_first() self._question = LogicTest(self._expr[2:], self._settings.delimiter) self.refs = self._question.refs self.inv_refs = self._question.inv_refs self.inv_refs.append(self._value_path) elif self._expr_type == parser_funcs.LIST_TEST: self._value_path = None self._question = LogicTest(self._expr[1:], self._settings.delimiter) self.refs = self._question.refs self.inv_refs = self._question.inv_refs else: msg = 'Unknown expression type: %s' raise ExpressionError(msg % self._expr_type, tbFlag=False) @property def has_references(self): return len(self._question.refs) > 0 def get_references(self): return self._question.refs def assembleRefs(self, context): return def get_inv_references(self): return self.inv_refs def _resolve(self, path, dictionary): try: return path.get_value(dictionary) except KeyError as e: raise ResolveError(str(path)) def _value_expression(self, inventory): results = {} for name, node in iteritems(inventory): if self.needs_all_envs or node.env_matches: if self._value_path.exists_in(node.items): answer = self._resolve(self._value_path, node.items) results[name] = copy.deepcopy(answer) return results def _test_expression(self, context, inventory): if self._value_path is None: msg = 'Failed to render %s' raise ExpressionError(msg % str(self), tbFlag=False) results = {} for name, node in iteritems(inventory): if self.needs_all_envs or node.env_matches: if (self._question.value(context, node.items) and self._value_path.exists_in(node.items)): answer = self._resolve(self._value_path, node.items) results[name] = copy.deepcopy(answer) return results def _list_test_expression(self, context, inventory): results = [] for name, node in iteritems(inventory): if self.needs_all_envs or node.env_matches: if self._question.value(context, node.items): results.append(name) results.sort() return results def render(self, context, inventory): if self._expr_type == parser_funcs.VALUE: return self._value_expression(inventory) elif self._expr_type == parser_funcs.TEST: return self._test_expression(context, inventory) elif self._expr_type == parser_funcs.LIST_TEST: return self._list_test_expression(context, inventory) raise ExpressionError('Failed to render %s' % str(self), tbFlag=False) def __str__(self): return ' '.join(str(j) for i,j in self._expr) def __repr__(self): # had to leave it here for now as the behaviour differs from basic return 'InvItem(%r)' % self._expr reclass-1.7.0/reclass/values/item.py000066400000000000000000000044631373565003400174330ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from enum import Enum from reclass.utils.dictpath import DictPath ItemTypes = Enum('ItemTypes', ['COMPOSITE', 'DICTIONARY', 'INV_QUERY', 'LIST', 'REFERENCE', 'SCALAR']) class Item(object): def __init__(self, item, settings): self._settings = settings self.contents = item self.has_inv_query = False def allRefs(self): return True @property def has_references(self): return False def is_container(self): return False @property def is_complex(self): return (self.has_references | self.has_inv_query) def merge_over(self, item): msg = "Item class {0} does not implement merge_over()" raise NotImplementedError(msg.format(self.__class__.__name__)) def render(self, context, exports): msg = "Item class {0} does not implement render()" raise NotImplementedError(msg.format(self.__class__.__name__)) def type_str(self): return self.type.name.lower() def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.contents) class ItemWithReferences(Item): def __init__(self, items, settings): super(ItemWithReferences, self).__init__(items, settings) try: iter(self.contents) except TypeError: self.contents = [self.contents] self.assembleRefs() @property def has_references(self): return len(self._refs) > 0 def get_references(self): return self._refs # NOTE: possibility of confusion. Looks like 'assemble' should be either # 'gather' or 'extract'. def assembleRefs(self, context={}): self._refs = [] self.allRefs = True for item in self.contents: if item.has_references: item.assembleRefs(context) self._refs.extend(item.get_references()) if item.allRefs is False: self.allRefs = False class ContainerItem(Item): def is_container(self): return True def render(self, context, inventory): return self.contents reclass-1.7.0/reclass/values/listitem.py000066400000000000000000000006101373565003400203150ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from reclass.values import item class ListItem(item.ContainerItem): type = item.ItemTypes.LIST def merge_over(self, other): if other.type == item.ItemTypes.LIST: other.contents.extend(self.contents) return other raise RuntimeError('Failed to merge %s over %s' % (self, other)) reclass-1.7.0/reclass/values/parser.py000066400000000000000000000057271373565003400177750ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pyparsing as pp from .compitem import CompItem from .invitem import InvItem from .refitem import RefItem from .scaitem import ScaItem from reclass.errors import ParseError from reclass.values.parser_funcs import tags import reclass.values.parser_funcs as parsers import collections import six class Parser(object): def __init__(self): self._ref_parser = None self._simple_parser = None self._old_settings = None @property def ref_parser(self): if self._ref_parser is None or self._settings != self._old_settings: self._ref_parser = parsers.get_ref_parser(self._settings) self._old_settings = self._settings return self._ref_parser @property def simple_ref_parser(self): if self._simple_parser is None or self._settings != self._old_settings: self._simple_parser = parsers.get_simple_ref_parser(self._settings) self._old_settings = self._settings return self._simple_parser def parse(self, value, settings): def full_parse(): try: return self.ref_parser.parseString(value) except pp.ParseException as e: raise ParseError(e.msg, e.line, e.col, e.lineno) self._settings = settings sentinel_count = (value.count(settings.reference_sentinels[0]) + value.count(settings.export_sentinels[0])) if sentinel_count == 0: # speed up: only use pyparsing if there are sentinels in the value return ScaItem(value, self._settings) elif sentinel_count == 1: # speed up: try a simple reference try: tokens = self.simple_ref_parser.parseString(value) except pp.ParseException: tokens = full_parse() # fall back on the full parser else: tokens = full_parse() # use the full parser tokens = parsers.listify(tokens) items = self._create_items(tokens) if len(items) == 1: return items[0] return CompItem(items, self._settings) _item_builders = {tags.STR: (lambda s, v: ScaItem(v, s._settings)), tags.REF: (lambda s, v: s._create_ref(v)), tags.INV: (lambda s, v: s._create_inv(v)) } def _create_items(self, tokens): return [self._item_builders[t](self, v) for t, v in tokens ] def _create_ref(self, tokens): items = [ self._item_builders[t](self, v) for t, v in tokens ] return RefItem(items, self._settings) def _create_inv(self, tokens): items = [ScaItem(v, self._settings) for t, v in tokens] if len(items) == 1: return InvItem(items[0], self._settings) return InvItem(CompItem(items), self._settings) reclass-1.7.0/reclass/values/parser_funcs.py000066400000000000000000000157201373565003400211650ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import collections import enum import functools import pyparsing as pp import six tags = enum.Enum('Tags', ['STR', 'REF', 'INV']) _OBJ = 'OBJ' _LOGICAL = 'LOGICAL' _OPTION = 'OPTION' _IF = 'IF' TEST = 'TEST' LIST_TEST = 'LIST_TEST' VALUE = 'VALUE' AND = 'AND' OR = 'OR' EQUAL = '==' NOT_EQUAL = '!=' IGNORE_ERRORS = '+IgnoreErrors' ALL_ENVS = '+AllEnvs' s_end = pp.StringEnd() def _tag_with(tag, transform=lambda x:x): def inner(tag, string, location, tokens): token = transform(tokens[0]) tokens[0] = (tag, token) return functools.partial(inner, tag) def _asList(x): if isinstance(x, pp.ParseResults): return x.asList() return x def listify(w, modifier=_asList): if (isinstance(w, collections.Iterable) and not isinstance(w, six.string_types)): cls = type(w) if cls == pp.ParseResults: cls = list return cls([listify(x) for x in w]) return modifier(w) def get_expression_parser(): sign = pp.Optional(pp.Literal('-')) number = pp.Word(pp.nums) dpoint = pp.Literal('.') ignore_errors = pp.CaselessLiteral(IGNORE_ERRORS) all_envs = pp.CaselessLiteral(ALL_ENVS) eq, neq = pp.Literal(EQUAL), pp.Literal(NOT_EQUAL) eand, eor = pp.CaselessLiteral(AND), pp.CaselessLiteral(OR) option = (ignore_errors | all_envs).setParseAction(_tag_with(_OPTION)) options = pp.Group(pp.ZeroOrMore(option)) operator_test = (eq | neq).setParseAction(_tag_with(TEST)) operator_logical = (eand | eor).setParseAction(_tag_with(_LOGICAL)) begin_if = pp.CaselessLiteral(_IF).setParseAction(_tag_with(_IF)) obj = pp.Word(pp.printables).setParseAction(_tag_with(_OBJ)) integer = pp.Combine(sign + number + pp.WordEnd()).setParseAction( _tag_with(_OBJ, int)) real = pp.Combine(sign + ((number + dpoint + number) | (dpoint + number) | (number + dpoint)) ).setParseAction(_tag_with(_OBJ, float)) expritem = integer | real | obj single_test = expritem + operator_test + expritem additional_test = operator_logical + single_test expr_var = pp.Group(obj + s_end).setParseAction(_tag_with(VALUE)) expr_test = pp.Group(obj + begin_if + single_test + pp.ZeroOrMore(additional_test) + s_end).setParseAction(_tag_with(TEST)) expr_list_test = pp.Group(begin_if + single_test + pp.ZeroOrMore(additional_test) + s_end).setParseAction(_tag_with(LIST_TEST)) expr = expr_test | expr_var | expr_list_test line = options + expr + s_end return line def get_ref_parser(settings): _ESCAPE = settings.escape_character _DOUBLE_ESCAPE = _ESCAPE + _ESCAPE _REF_OPEN, _REF_CLOSE = settings.reference_sentinels _REF_CLOSE_FIRST = _REF_CLOSE[0] _REF_ESCAPE_OPEN = _ESCAPE + _REF_OPEN _REF_ESCAPE_CLOSE = _ESCAPE + _REF_CLOSE _REF_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _REF_OPEN _REF_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _REF_CLOSE _REF_EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE _INV_OPEN, _INV_CLOSE = settings.export_sentinels _INV_CLOSE_FIRST = _INV_CLOSE[0] _INV_ESCAPE_OPEN = _ESCAPE + _INV_OPEN _INV_ESCAPE_CLOSE = _ESCAPE + _INV_CLOSE _INV_DOUBLE_ESCAPE_OPEN = _DOUBLE_ESCAPE + _INV_OPEN _INV_DOUBLE_ESCAPE_CLOSE = _DOUBLE_ESCAPE + _INV_CLOSE _INV_EXCLUDES = _ESCAPE + _INV_OPEN + _INV_CLOSE _EXCLUDES = _ESCAPE + _REF_OPEN + _REF_CLOSE + _INV_OPEN + _INV_CLOSE double_escape = pp.Combine(pp.Literal(_DOUBLE_ESCAPE) + pp.MatchFirst([pp.FollowedBy(_REF_OPEN), pp.FollowedBy(_REF_CLOSE), pp.FollowedBy(_INV_OPEN), pp.FollowedBy(_INV_CLOSE)])).setParseAction( pp.replaceWith(_ESCAPE)) ref_open = pp.Literal(_REF_OPEN).suppress() ref_close = pp.Literal(_REF_CLOSE).suppress() ref_not_open = ~pp.Literal(_REF_OPEN) + ~pp.Literal(_REF_ESCAPE_OPEN) + ~pp.Literal(_REF_DOUBLE_ESCAPE_OPEN) ref_not_close = ~pp.Literal(_REF_CLOSE) + ~pp.Literal(_REF_ESCAPE_CLOSE) + ~pp.Literal(_REF_DOUBLE_ESCAPE_CLOSE) ref_escape_open = pp.Literal(_REF_ESCAPE_OPEN).setParseAction(pp.replaceWith(_REF_OPEN)) ref_escape_close = pp.Literal(_REF_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_REF_CLOSE)) ref_text = pp.CharsNotIn(_REF_EXCLUDES) | pp.CharsNotIn(_REF_CLOSE_FIRST, exact=1) ref_content = pp.Combine(pp.OneOrMore(ref_not_open + ref_not_close + ref_text)) ref_string = pp.MatchFirst([double_escape, ref_escape_open, ref_escape_close, ref_content]).setParseAction(_tag_with(tags.STR)) ref_item = pp.Forward() ref_items = pp.OneOrMore(ref_item) reference = (ref_open + pp.Group(ref_items) + ref_close).setParseAction(_tag_with(tags.REF)) ref_item << (reference | ref_string) inv_open = pp.Literal(_INV_OPEN).suppress() inv_close = pp.Literal(_INV_CLOSE).suppress() inv_not_open = ~pp.Literal(_INV_OPEN) + ~pp.Literal(_INV_ESCAPE_OPEN) + ~pp.Literal(_INV_DOUBLE_ESCAPE_OPEN) inv_not_close = ~pp.Literal(_INV_CLOSE) + ~pp.Literal(_INV_ESCAPE_CLOSE) + ~pp.Literal(_INV_DOUBLE_ESCAPE_CLOSE) inv_escape_open = pp.Literal(_INV_ESCAPE_OPEN).setParseAction(pp.replaceWith(_INV_OPEN)) inv_escape_close = pp.Literal(_INV_ESCAPE_CLOSE).setParseAction(pp.replaceWith(_INV_CLOSE)) inv_text = pp.CharsNotIn(_INV_CLOSE_FIRST) inv_content = pp.Combine(pp.OneOrMore(inv_not_close + inv_text)) inv_string = pp.MatchFirst( [double_escape, inv_escape_open, inv_escape_close, inv_content] ).setParseAction(_tag_with(tags.STR)) inv_items = pp.OneOrMore(inv_string) export = (inv_open + pp.Group(inv_items) + inv_close).setParseAction(_tag_with(tags.INV)) text = pp.CharsNotIn(_EXCLUDES) | pp.CharsNotIn('', exact=1) content = pp.Combine(pp.OneOrMore(ref_not_open + inv_not_open + text)) string = pp.MatchFirst( [double_escape, ref_escape_open, inv_escape_open, content] ).setParseAction(_tag_with(tags.STR)) item = reference | export | string line = pp.OneOrMore(item) + s_end return line.leaveWhitespace() def get_simple_ref_parser(settings): ESCAPE = settings.escape_character REF_OPEN, REF_CLOSE = settings.reference_sentinels INV_OPEN, INV_CLOSE = settings.export_sentinels EXCLUDES = ESCAPE + REF_OPEN + REF_CLOSE + INV_OPEN + INV_CLOSE string = pp.CharsNotIn(EXCLUDES).setParseAction(_tag_with(tags.STR)) ref_open = pp.Literal(REF_OPEN).suppress() ref_close = pp.Literal(REF_CLOSE).suppress() reference = (ref_open + pp.Group(string) + ref_close).setParseAction(_tag_with(tags.REF)) line = pp.StringStart() + pp.Optional(string) + reference + pp.Optional(string) + s_end return line.leaveWhitespace() reclass-1.7.0/reclass/values/refitem.py000066400000000000000000000025241373565003400201240ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from reclass.values import item from reclass.utils.dictpath import DictPath from reclass.errors import ResolveError class RefItem(item.ItemWithReferences): type = item.ItemTypes.REFERENCE def assembleRefs(self, context={}): super(RefItem, self).assembleRefs(context) try: self._refs.append(self._flatten_contents(context)) except ResolveError as e: self.allRefs = False def _flatten_contents(self, context, inventory=None): result = [str(i.render(context, inventory)) for i in self.contents] return "".join(result) def _resolve(self, ref, context): path = DictPath(self._settings.delimiter, ref) try: return path.get_value(context) except (KeyError, TypeError) as e: raise ResolveError(ref) def render(self, context, inventory): #strings = [str(i.render(context, inventory)) for i in self.contents] #return self._resolve("".join(strings), context) return self._resolve(self._flatten_contents(context, inventory), context) def __str__(self): strings = [str(i) for i in self.contents] rs = self._settings.reference_sentinels return '{0}{1}{2}'.format(rs[0], ''.join(strings), rs[1]) reclass-1.7.0/reclass/values/scaitem.py000066400000000000000000000014111373565003400201100ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.settings import Settings from reclass.values import item class ScaItem(item.Item): type = item.ItemTypes.SCALAR def __init__(self, value, settings): super(ScaItem, self).__init__(value, settings) def merge_over(self, other): if other.type in [item.ItemTypes.SCALAR, item.ItemTypes.COMPOSITE]: return self raise RuntimeError('Failed to merge %s over %s' % (self, other)) def render(self, context, inventory): return self.contents def __str__(self): return str(self.contents) reclass-1.7.0/reclass/values/tests/000077500000000000000000000000001373565003400172565ustar00rootroot00000000000000reclass-1.7.0/reclass/values/tests/__init__.py000066400000000000000000000000001373565003400213550ustar00rootroot00000000000000reclass-1.7.0/reclass/values/tests/test_compitem.py000066400000000000000000000077131373565003400225140ustar00rootroot00000000000000from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem import unittest SETTINGS = Settings() class TestCompItem(unittest.TestCase): def test_assembleRefs_no_items(self): composite = CompItem([], SETTINGS) self.assertFalse(composite.has_references) def test_assembleRefs_one_item_without_refs(self): val1 = Value('foo', SETTINGS, '') composite = CompItem([val1], SETTINGS) self.assertFalse(composite.has_references) def test_assembleRefs_one_item_with_one_ref(self): val1 = Value('${foo}', SETTINGS, '') expected_refs = ['foo'] composite = CompItem([val1], SETTINGS) self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) def test_assembleRefs_one_item_with_two_refs(self): val1 = Value('${foo}${bar}', SETTINGS, '') expected_refs = ['foo', 'bar'] composite = CompItem([val1], SETTINGS) self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) def test_assembleRefs_two_items_one_with_one_ref_one_without(self): val1 = Value('${foo}bar', SETTINGS, '') val2 = Value('baz', SETTINGS, '') expected_refs = ['foo'] composite = CompItem([val1, val2], SETTINGS) self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) def test_assembleRefs_two_items_both_with_one_ref(self): val1 = Value('${foo}', SETTINGS, '') val2 = Value('${bar}', SETTINGS, '') expected_refs = ['foo', 'bar'] composite = CompItem([val1, val2], SETTINGS) self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) def test_assembleRefs_two_items_with_two_refs(self): val1 = Value('${foo}${baz}', SETTINGS, '') val2 = Value('${bar}${meep}', SETTINGS, '') expected_refs = ['foo', 'baz', 'bar', 'meep'] composite = CompItem([val1, val2], SETTINGS) self.assertTrue(composite.has_references) self.assertEquals(composite.get_references(), expected_refs) def test_string_representation(self): composite = CompItem(Value(1, SETTINGS, ''), SETTINGS) expected = '1' result = str(composite) self.assertEquals(result, expected) def test_render_single_item(self): val1 = Value('${foo}', SETTINGS, '') composite = CompItem([val1], SETTINGS) self.assertEquals(1, composite.render({'foo': 1}, None)) def test_render_multiple_items(self): val1 = Value('${foo}', SETTINGS, '') val2 = Value('${bar}', SETTINGS, '') composite = CompItem([val1, val2], SETTINGS) self.assertEquals('12', composite.render({'foo': 1, 'bar': 2}, None)) def test_merge_over_merge_scalar(self): val1 = Value(None, SETTINGS, '') scalar = ScaItem(1, SETTINGS) composite = CompItem([val1], SETTINGS) result = composite.merge_over(scalar) self.assertEquals(result, composite) def test_merge_over_merge_composite(self): val1 = Value(None, SETTINGS, '') val2 = Value(None, SETTINGS, '') composite1 = CompItem([val1], SETTINGS) composite2 = CompItem([val2], SETTINGS) result = composite2.merge_over(composite1) self.assertEquals(result, composite2) def test_merge_other_types_not_allowed(self): other = type('Other', (object,), {'type': 34}) val1 = Value(None, SETTINGS, '') composite = CompItem([val1], SETTINGS) self.assertRaises(RuntimeError, composite.merge_over, other) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_item.py000066400000000000000000000025661373565003400216360ustar00rootroot00000000000000from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem from reclass.values.item import ContainerItem from reclass.values.item import ItemWithReferences import unittest from mock import MagicMock SETTINGS = Settings() class TestItemWithReferences(unittest.TestCase): def test_assembleRef_allrefs(self): phonyitem = MagicMock() phonyitem.has_references = True phonyitem.get_references = lambda *x: [1] iwr = ItemWithReferences([phonyitem], {}) self.assertEquals(iwr.get_references(), [1]) self.assertTrue(iwr.allRefs) def test_assembleRef_partial(self): phonyitem = MagicMock() phonyitem.has_references = True phonyitem.allRefs = False phonyitem.get_references = lambda *x: [1] iwr = ItemWithReferences([phonyitem], {}) self.assertEquals(iwr.get_references(), [1]) self.assertFalse(iwr.allRefs) class TestContainerItem(unittest.TestCase): def test_render(self): container = ContainerItem('foo', SETTINGS) self.assertEquals(container.render(None, None), 'foo') if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_listitem.py000066400000000000000000000017231373565003400225240ustar00rootroot00000000000000from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem import unittest SETTINGS = Settings() class TestListItem(unittest.TestCase): def test_merge_over_merge_list(self): listitem1 = ListItem([1], SETTINGS) listitem2 = ListItem([2], SETTINGS) expected = ListItem([1, 2], SETTINGS) result = listitem2.merge_over(listitem1) self.assertEquals(result.contents, expected.contents) def test_merge_other_types_not_allowed(self): other = type('Other', (object,), {'type': 34}) val1 = Value(None, SETTINGS, '') listitem = ListItem(val1, SETTINGS) self.assertRaises(RuntimeError, listitem.merge_over, other) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_parser_functions.py000066400000000000000000000106401373565003400242540ustar00rootroot00000000000000from reclass import settings from reclass.values import parser_funcs as pf import unittest import ddt SETTINGS = settings.Settings() # Test cases for parsers. Each test case is a two-tuple of input string and # expected output. NOTE: default values for sentinels are used here to avoid # cluttering up the code. test_pairs_simple = ( # Basic test cases. ('${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), # Basic combinations. ('bar${foo}', [(pf.tags.STR, 'bar'), (pf.tags.REF, [(pf.tags.STR, 'foo')])]), ('bar${foo}baz', [(pf.tags.STR, 'bar'), (pf.tags.REF, [(pf.tags.STR, 'foo')]), (pf.tags.STR, 'baz')]), ('${foo}baz', [(pf.tags.REF, [(pf.tags.STR, 'foo')]), (pf.tags.STR, 'baz')]), # Whitespace preservation cases. ('bar ${foo}', [(pf.tags.STR, 'bar '), (pf.tags.REF, [(pf.tags.STR, 'foo')])]), ('bar ${foo baz}', [(pf.tags.STR, 'bar '), (pf.tags.REF, [(pf.tags.STR, 'foo baz')])]), ('bar${foo} baz', [(pf.tags.STR, 'bar'), (pf.tags.REF, [(pf.tags.STR, 'foo')]), (pf.tags.STR, ' baz')]), (' bar${foo} baz ', [(pf.tags.STR, ' bar'), (pf.tags.REF, [(pf.tags.STR, 'foo')]), (pf.tags.STR, ' baz ')]), ) # Simple parser test cases are also included in this test grouop. test_pairs_full = ( # Single elements sanity. ('foo', [(pf.tags.STR, 'foo')]), ('$foo', [(pf.tags.STR, '$foo')]), ('{foo}', [(pf.tags.STR, '{foo}')]), ('[foo]', [(pf.tags.STR, '[foo]')]), ('$(foo)', [(pf.tags.STR, '$(foo)')]), ('$[foo]', [(pf.tags.INV, [(pf.tags.STR, 'foo')])]), # Escape sequences. # NOTE: these sequences apparently are not working as expected. #(r'\\\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), #(r'\\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), #(r'\${foo}', [(pf.tags.REF, [(pf.tags.STR, 'foo')])]), # Basic combinations. ('bar$[foo]', [(pf.tags.STR, 'bar'), (pf.tags.INV, [(pf.tags.STR, 'foo')])]), ('bar$[foo]baz', [(pf.tags.STR, 'bar'), (pf.tags.INV, [(pf.tags.STR, 'foo')]), (pf.tags.STR, 'baz')]), ('$[foo]baz', [(pf.tags.INV, [(pf.tags.STR, 'foo')]), (pf.tags.STR, 'baz')]), # Whitespace preservation in various positions. (' foo ', [(pf.tags.STR, ' foo ')]), ('foo bar', [(pf.tags.STR, 'foo bar')]), ('bar $[foo baz]', [(pf.tags.STR, 'bar '), (pf.tags.INV, [(pf.tags.STR, 'foo baz')])]), ('bar$[foo] baz ', [(pf.tags.STR, 'bar'), (pf.tags.INV, [(pf.tags.STR, 'foo')]), (pf.tags.STR, ' baz ')]), # Nested references and inventory items. ('${foo}${bar}',[(pf.tags.REF, [(pf.tags.STR, 'foo')]), (pf.tags.REF, [(pf.tags.STR, 'bar')])]), ('${foo${bar}}',[(pf.tags.REF, [(pf.tags.STR, 'foo'), (pf.tags.REF, [(pf.tags.STR, 'bar')])])]), ('$[foo]$[bar]',[(pf.tags.INV, [(pf.tags.STR, 'foo')]), (pf.tags.INV, [(pf.tags.STR, 'bar')])]), # NOTE: the cases below do not work as expected, which is probably a bug. # Any nesting in INV creates a string. #('${$[foo]}', [(pf.tags.REF, [(pf.tags.INV, [(pf.tags.STR, 'foo')])])]), #('$[${foo}]', [(pf.tags.INV, [(pf.tags.REF, [(pf.tags.STR, 'foo')])])]), #('$[foo$[bar]]',[(pf.tags.INV, [(pf.tags.STR, 'foo'), # (pf.tags.INV, [(pf.tags.STR, 'bar')])])]), ) + test_pairs_simple @ddt.ddt class TestRefParser(unittest.TestCase): @ddt.data(*test_pairs_full) def test_standard_reference_parser(self, data): instring, expected = data parser = pf.get_ref_parser(SETTINGS) result = pf.listify(parser.parseString(instring).asList()) self.assertEquals(expected, result) @ddt.ddt class TestSimpleRefParser(unittest.TestCase): @ddt.data(*test_pairs_simple) def test_standard_reference_parser(self, data): # NOTE: simple reference parser can parse references only. It fails # on inventory items. instring, expected = data parser = pf.get_simple_ref_parser(SETTINGS) result = pf.listify(parser.parseString(instring).asList()) self.assertEquals(expected, result) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_refitem.py000066400000000000000000000032771373565003400223330ustar00rootroot00000000000000from reclass import errors from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem from reclass.values.refitem import RefItem import unittest from mock import MagicMock SETTINGS = Settings() class TestRefItem(unittest.TestCase): def test_assembleRefs_ok(self): phonyitem = MagicMock() phonyitem.render = lambda x, k: 'bar' phonyitem.has_references = True phonyitem.get_references = lambda *x: ['foo'] iwr = RefItem([phonyitem], {}) self.assertEquals(iwr.get_references(), ['foo', 'bar']) self.assertTrue(iwr.allRefs) def test_assembleRefs_failedrefs(self): phonyitem = MagicMock() phonyitem.render.side_effect = errors.ResolveError('foo') phonyitem.has_references = True phonyitem.get_references = lambda *x: ['foo'] iwr = RefItem([phonyitem], {}) self.assertEquals(iwr.get_references(), ['foo']) self.assertFalse(iwr.allRefs) def test__resolve_ok(self): reference = RefItem('', Settings({'delimiter': ':'})) result = reference._resolve('foo:bar', {'foo':{'bar': 1}}) self.assertEquals(result, 1) def test__resolve_fails(self): refitem = RefItem('', Settings({'delimiter': ':'})) context = {'foo':{'bar': 1}} reference = 'foo:baz' self.assertRaises(errors.ResolveError, refitem._resolve, reference, context) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_scaitem.py000066400000000000000000000022301373565003400223110ustar00rootroot00000000000000from reclass.settings import Settings from reclass.values.value import Value from reclass.values.compitem import CompItem from reclass.values.scaitem import ScaItem from reclass.values.valuelist import ValueList from reclass.values.listitem import ListItem from reclass.values.dictitem import DictItem import unittest SETTINGS = Settings() class TestScaItem(unittest.TestCase): def test_merge_over_merge_scalar(self): scalar1 = ScaItem([1], SETTINGS) scalar2 = ScaItem([2], SETTINGS) result = scalar2.merge_over(scalar1) self.assertEquals(result.contents, scalar2.contents) def test_merge_over_merge_composite(self): scalar1 = CompItem(Value(1, SETTINGS, ''), SETTINGS) scalar2 = ScaItem([2], SETTINGS) result = scalar2.merge_over(scalar1) self.assertEquals(result.contents, scalar2.contents) def test_merge_other_types_not_allowed(self): other = type('Other', (object,), {'type': 34}) val1 = Value(None, SETTINGS, '') scalar = ScaItem(val1, SETTINGS) self.assertRaises(RuntimeError, scalar.merge_over, other) if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/tests/test_value.py000066400000000000000000000107531373565003400220110ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.settings import Settings from reclass.values.value import Value from reclass.errors import ResolveError, ParseError import unittest SETTINGS = Settings() def _var(s): return '%s%s%s' % (SETTINGS.reference_sentinels[0], s, SETTINGS.reference_sentinels[1]) CONTEXT = {'favcolour':'yellow', 'motd':{'greeting':'Servus!', 'colour':'${favcolour}' }, 'int':1, 'list':[1,2,3], 'dict':{1:2,3:4}, 'bool':True } def _poor_mans_template(s, var, value): return s.replace(_var(var), value) class TestValue(unittest.TestCase): def test_simple_string(self): s = 'my cat likes to hide in boxes' tv = Value(s, SETTINGS, '') self.assertFalse(tv.has_references) self.assertEquals(tv.render(CONTEXT, None), s) def _test_solo_ref(self, key): s = _var(key) tv = Value(s, SETTINGS, '') res = tv.render(CONTEXT, None) self.assertTrue(tv.has_references) self.assertEqual(res, CONTEXT[key]) def test_solo_ref_string(self): self._test_solo_ref('favcolour') def test_solo_ref_int(self): self._test_solo_ref('int') def test_solo_ref_list(self): self._test_solo_ref('list') def test_solo_ref_dict(self): self._test_solo_ref('dict') def test_solo_ref_bool(self): self._test_solo_ref('bool') def test_single_subst_bothends(self): s = 'I like ' + _var('favcolour') + ' and I like it' tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_single_subst_start(self): s = _var('favcolour') + ' is my favourite colour' tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_single_subst_end(self): s = 'I like ' + _var('favcolour') tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, 'favcolour', CONTEXT['favcolour'])) def test_deep_subst_solo(self): motd = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(motd) tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) self.assertEqual(tv.render(CONTEXT, None), _poor_mans_template(s, motd, CONTEXT['motd']['greeting'])) def test_multiple_subst(self): greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') + '!' tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) self.assertEqual(tv.render(CONTEXT, None), want) def test_multiple_subst_flush(self): greet = SETTINGS.delimiter.join(('motd', 'greeting')) s = _var(greet) + ' I like ' + _var('favcolour') tv = Value(s, SETTINGS, '') self.assertTrue(tv.has_references) want = _poor_mans_template(s, greet, CONTEXT['motd']['greeting']) want = _poor_mans_template(want, 'favcolour', CONTEXT['favcolour']) self.assertEqual(tv.render(CONTEXT, None), want) def test_undefined_variable(self): s = _var('no_such_variable') tv = Value(s, SETTINGS, '') with self.assertRaises(ResolveError): tv.render(CONTEXT, None) def test_incomplete_variable(self): s = SETTINGS.reference_sentinels[0] + 'incomplete' with self.assertRaises(ParseError): tv = Value(s, SETTINGS, '') if __name__ == '__main__': unittest.main() reclass-1.7.0/reclass/values/value.py000066400000000000000000000053341373565003400176070ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .parser import Parser from .dictitem import DictItem from .listitem import ListItem from .scaitem import ScaItem from reclass.errors import InterpolationError from six import string_types class Value(object): _parser = Parser() def __init__(self, value, settings, uri, parse_string=True): self._settings = settings self.uri = uri self.overwrite = False self.constant = False if isinstance(value, string_types): if parse_string: try: self._item = self._parser.parse(value, self._settings) except InterpolationError as e: e.uri = self.uri raise else: self._item = ScaItem(value, self._settings) elif isinstance(value, list): self._item = ListItem(value, self._settings) elif isinstance(value, dict): self._item = DictItem(value, self._settings) else: self._item = ScaItem(value, self._settings) def item_type(self): return self._item.type def item_type_str(self): return self._item.type_str() def is_container(self): return self._item.is_container() @property def allRefs(self): return self._item.allRefs @property def has_references(self): return self._item.has_references @property def has_inv_query(self): return self._item.has_inv_query @property def needs_all_envs(self): if self._item.has_inv_query: return self._item.needs_all_envs return False def ignore_failed_render(self): return self._item.ignore_failed_render @property def is_complex(self): return self._item.is_complex def get_references(self): return self._item.get_references() def get_inv_references(self): return self._item.get_inv_references() def assembleRefs(self, context): if self._item.has_references: self._item.assembleRefs(context) def render(self, context, inventory): try: return self._item.render(context, inventory) except InterpolationError as e: e.uri = self.uri raise @property def contents(self): return self._item.contents def merge_over(self, value): self._item = self._item.merge_over(value._item) return self def __repr__(self): return 'Value(%r)' % self._item def __str__(self): return str(self._item) reclass-1.7.0/reclass/values/valuelist.py000066400000000000000000000152151373565003400205020ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import copy import sys from reclass.errors import ChangedConstantError, ResolveError, TypeMergeError class ValueList(object): def __init__(self, value, settings): self._settings = settings self._refs = [] self.allRefs = True self._values = [value] self._inv_refs = [] self.has_inv_query = False self.ignore_failed_render = False self.is_complex = False self._update() @property def uri(self): return '; '.join([str(x.uri) for x in self._values]) def append(self, value): self._values.append(value) self._update() def extend(self, values): self._values.extend(values._values) self._update() def _update(self): self.assembleRefs() self._check_for_inv_query() self.is_complex = False item_type = self._values[0].item_type() for v in self._values: if v.is_complex or v.constant or v.overwrite or v.item_type() != item_type: self.is_complex = True @property def has_references(self): return len(self._refs) > 0 def get_inv_references(self): return self._inv_refs def get_references(self): return self._refs def _check_for_inv_query(self): self.has_inv_query = False self.ignore_failed_render = True for value in self._values: if value.has_inv_query: self._inv_refs.extend(value.get_inv_references()) self.has_inv_query = True if value.ignore_failed_render() is False: self.ignore_failed_render = False if self.has_inv_query is False: self.ignore_failed_render = False def assembleRefs(self, context={}): self._refs = [] self.allRefs = True for value in self._values: value.assembleRefs(context) if value.has_references: self._refs.extend(value.get_references()) if value.allRefs is False: self.allRefs = False @property def needs_all_envs(self): for value in self._values: if value.needs_all_envs: return True return False def merge(self): output = None for n, value in enumerate(self._values): if output is None: output = value else: output = value.merge_over(output) return output def render(self, context, inventory): from reclass.datatypes.parameters import Parameters output = None deepCopied = False last_error = None constant = False for n, value in enumerate(self._values): try: new = value.render(context, inventory) except ResolveError as e: # only ignore failed renders if # ignore_overwritten_missing_references is set and we are # dealing with a scalar value and it's not the last item in the # values list if (self._settings.ignore_overwritten_missing_references and not isinstance(output, (dict, list)) and n != (len(self._values)-1)): new = None last_error = e print("[WARNING] Reference '%s' undefined" % str(value), file=sys.stderr) else: raise e if constant: if self._settings.strict_constant_parameters: raise ChangedConstantError('{0}; {1}'.format(self._values[n-1].uri, self._values[n].uri)) else: continue if output is None or value.overwrite: output = new deepCopied = False else: if isinstance(output, dict): if isinstance(new, dict): p1 = Parameters(output, self._settings, None, parse_strings=False) p2 = Parameters(new, self._settings, None, parse_strings=False) p1.merge(p2) output = p1.as_dict() elif isinstance(new, list): raise TypeMergeError(self._values[n], self._values[n-1], self.uri) elif self._settings.allow_scalar_over_dict or (self._settings.allow_none_override and new is None): output = new deepCopied = False else: raise TypeMergeError(self._values[n], self._values[n-1], self.uri) elif isinstance(output, list): if isinstance(new, list): if not deepCopied: output = copy.deepcopy(output) deepCopied = True output.extend(new) elif isinstance(new, dict): raise TypeMergeError(self._values[n], self._values[n-1], self.uri) elif self._settings.allow_scalar_over_list or (self._settings.allow_none_override and new is None): output = new deepCopied = False else: raise TypeMergeError(self._values[n], self._values[n-1], self.uri) else: if isinstance(new, dict): if self._settings.allow_dict_over_scalar: output = new deepCopied = False else: raise TypeMergeError(self._values[n], self._values[n-1], self.uri) elif isinstance(new, list): if self._settings.allow_list_over_scalar: output_list = list() output_list.append(output) output_list.extend(new) output = output_list deepCopied = True else: raise TypeMergeError(self._values[n], self._values[n-1], self.uri) else: output = new deepCopied = False if value.constant: constant = True if isinstance(output, (dict, list)) and last_error is not None: raise last_error return output reclass-1.7.0/reclass/version.py000066400000000000000000000017061373565003400166600ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–14 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals RECLASS_NAME = 'reclass' DESCRIPTION = ('merge data by recursive descent down an ancestry hierarchy ' '(forked extended version)') VERSION = '1.7.0' AUTHOR = 'martin f. krafft / Andrew Pickford / salt-formulas community' AUTHOR_EMAIL = 'salt-formulas@freelists.org' MAINTAINER = 'salt-formulas community' MAINTAINER_EMAIL = 'salt-formulas@freelists.org' COPYRIGHT = ('Copyright © 2007–14 martin f. krafft, extensions © 2017 Andrew' ' Pickford, extensions © salt-formulas community') LICENCE = 'Artistic Licence 2.0' URL = 'https://github.com/salt-formulas/reclass' reclass-1.7.0/releasenotes/000077500000000000000000000000001373565003400156525ustar00rootroot00000000000000reclass-1.7.0/releasenotes/config.yaml000066400000000000000000000036341373565003400200110ustar00rootroot00000000000000--- # Usage: # # reno -qd .releasenotes list # reno -qd .releasenotes new slug-title --edit # reno -qd .releasenotes report --no-show-source # Change prelude_section_name to 'summary' from default value prelude prelude_section_name: summary show_source: False sections: - [summary, Summary] - [features, New features] - [fixes, Bug fixes] - [others, Other notes] template: | --- # Author the following sections or remove the section if it is not related. # Use one release note per a feature. # # If you miss a section from the list below, please first submit a review # adding it to .releasenotes/config.yaml. # # Format content with reStructuredText (RST). # **Formatting examples:** # - | # This is a brief description of the feature. It may include a # number of components: # # * List item 1 # * List item 2. # This code block below will appear as part of the list item 2: # # .. code-block:: yaml # # classes: # - system.class.to.load # # The code block below will appear on the same level as the feature # description: # # .. code-block:: text # # provide model/formula pillar snippets summary: > This section is not mandatory. Use it to highlight the change. features: - Use the list to record summary of **NEW** features - Provide detailed description of the feature indicating the use cases when users benefit from using it - Provide steps to deploy the feature (if the procedure is complicated indicate during what stage of the deployment workflow it should be deployed). - Provide troubleshooting information, if any. fixes: - Use the list to record summary of a bug fix for blocker, critical. - Provide a brief summary of what has been fixed. others: - Author any additional notes. Use this section if note is not related to any of the common sections above. reclass-1.7.0/releasenotes/notes/000077500000000000000000000000001373565003400170025ustar00rootroot00000000000000reclass-1.7.0/releasenotes/notes/escaping-references-e76699d8ca010013.yaml000066400000000000000000000001551373565003400256530ustar00rootroot00000000000000--- others: - The escaping of references changes how the constructs '\${xxx}' and '\\${xxx}' are rendered. reclass-1.7.0/requirements.txt000066400000000000000000000000401373565003400164370ustar00rootroot00000000000000pyparsing pyyaml six enum34 ddt reclass-1.7.0/run_tests.py000077500000000000000000000007661373565003400155750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import unittest tests = unittest.TestLoader().discover('reclass') unittest.TextTestRunner(verbosity=1).run(tests) reclass-1.7.0/setup.cfg000066400000000000000000000003761373565003400150100ustar00rootroot00000000000000[bdist_wheel] # This flag says that the code is written to work on both Python 2 and Python # 3. If at all possible, it is good practice to do this. If you cannot, you # will need to generate wheels for each Python version that you support. universal=0 reclass-1.7.0/setup.py000066400000000000000000000033641373565003400147010ustar00rootroot00000000000000# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft # Released under the terms of the Artistic Licence 2.0 # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from reclass.version import * from setuptools import setup, find_packages # use consistent encoding of readme for pypi from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() ADAPTERS = ['salt', 'ansible'] console_scripts = ['reclass = reclass.cli:main'] console_scripts.extend('reclass-{0} = reclass.adapters.{0}:cli'.format(i) for i in ADAPTERS) setup( name = RECLASS_NAME, description = DESCRIPTION, long_description=long_description, version = VERSION, author = AUTHOR, author_email = AUTHOR_EMAIL, maintainer = MAINTAINER, maintainer_email = MAINTAINER_EMAIL, license = LICENCE, url = URL, packages = find_packages(exclude=['*tests']), #FIXME validate this entry_points = { 'console_scripts': console_scripts }, install_requires = ['pyparsing', 'pyyaml', 'six', 'enum34', 'ddt'], #FIXME pygit2 (require libffi-dev, libgit2-dev 0.26.x ) classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: System Administrators', 'Topic :: System :: Systems Administration', 'License :: OSI Approved :: Artistic License', 'Programming Language :: Python :: 2.7', ], keywords='enc ansible salt' ) reclass-1.7.0/test/000077500000000000000000000000001373565003400141405ustar00rootroot00000000000000reclass-1.7.0/test/model/000077500000000000000000000000001373565003400152405ustar00rootroot00000000000000reclass-1.7.0/test/model/default/000077500000000000000000000000001373565003400166645ustar00rootroot00000000000000reclass-1.7.0/test/model/default/classes/000077500000000000000000000000001373565003400203215ustar00rootroot00000000000000reclass-1.7.0/test/model/default/classes/first.yml000066400000000000000000000006351373565003400221770ustar00rootroot00000000000000parameters: _param: some: param colour: red lab: name: test label: first colour: escaped: \${_param:colour} doubleescaped: \\${_param:colour} unescaped: ${_param:colour} colours: red: name: red blue: name: blue one: a: 1 b: 2 two: c: 3 d: 4 three: e: 5 list_to_override: - one - two dict_to_override: one: 1 two: 2 reclass-1.7.0/test/model/default/classes/lab/000077500000000000000000000000001373565003400210575ustar00rootroot00000000000000reclass-1.7.0/test/model/default/classes/lab/env/000077500000000000000000000000001373565003400216475ustar00rootroot00000000000000reclass-1.7.0/test/model/default/classes/lab/env/dev.yml000066400000000000000000000000421373565003400231440ustar00rootroot00000000000000 parameters: lab: name: dev reclass-1.7.0/test/model/default/classes/second.yml000066400000000000000000000001551373565003400223200ustar00rootroot00000000000000classes: - first parameters: will: warn: at: second: ${_param:notfound} three: ${one} reclass-1.7.0/test/model/default/classes/third.yml000066400000000000000000000005511373565003400221570ustar00rootroot00000000000000classes: - second parameters: _param: notfound: exist myparam: ${_param:some} will: not: fail: at: tree: ${_param:notfound} 1: an_numeric_key: true as_a_dict: 1 2: - as_a_list 3: value three: ${two} empty: list: [] dict: {} ~list_to_override: ${empty:list} ~dict_to_override: ${empty:dict} reclass-1.7.0/test/model/default/nodes/000077500000000000000000000000001373565003400177745ustar00rootroot00000000000000reclass-1.7.0/test/model/default/nodes/reclass.yml000066400000000000000000000000221373565003400221450ustar00rootroot00000000000000 classes: - third reclass-1.7.0/test/model/default/reclass-config.yml000066400000000000000000000000261373565003400223040ustar00rootroot00000000000000storage_type: yaml_fs reclass-1.7.0/test/model/extensions/000077500000000000000000000000001373565003400174375ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/000077500000000000000000000000001373565003400210745ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/defaults.yml000066400000000000000000000000521373565003400234230ustar00rootroot00000000000000 parameters: config: defaults: True reclass-1.7.0/test/model/extensions/classes/first.yml000066400000000000000000000001151373565003400227430ustar00rootroot00000000000000parameters: _param: some: param lab: name: test label: first reclass-1.7.0/test/model/extensions/classes/lab/000077500000000000000000000000001373565003400216325ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/lab/env/000077500000000000000000000000001373565003400224225ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/lab/env/dev.yml000066400000000000000000000000421373565003400237170ustar00rootroot00000000000000 parameters: lab: name: dev reclass-1.7.0/test/model/extensions/classes/relative/000077500000000000000000000000001373565003400227075ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/relative/init.yml000066400000000000000000000000261373565003400243730ustar00rootroot00000000000000 classes: - .nested reclass-1.7.0/test/model/extensions/classes/relative/nested/000077500000000000000000000000001373565003400241715ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/relative/nested/common.yml000066400000000000000000000000771373565003400262100ustar00rootroot00000000000000 parameters: nested: deep: common: to be overriden reclass-1.7.0/test/model/extensions/classes/relative/nested/deep/000077500000000000000000000000001373565003400251065ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/relative/nested/deep/common.yml000066400000000000000000000000651373565003400271220ustar00rootroot00000000000000 parameters: nested: deep: common: False reclass-1.7.0/test/model/extensions/classes/relative/nested/deep/init.yml000066400000000000000000000001331373565003400265710ustar00rootroot00000000000000 classes: - .common parameters: nested: deep: init: True common: True reclass-1.7.0/test/model/extensions/classes/relative/nested/dive/000077500000000000000000000000001373565003400251205ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/classes/relative/nested/dive/session.yml000066400000000000000000000000651373565003400273270ustar00rootroot00000000000000 parameters: nested: deep: session: True reclass-1.7.0/test/model/extensions/classes/relative/nested/init.yml000066400000000000000000000001441373565003400256560ustar00rootroot00000000000000 classes: - .common - .deep - .dive.session parameters: nested: deep: init: True reclass-1.7.0/test/model/extensions/classes/second.yml000066400000000000000000000001501373565003400230660ustar00rootroot00000000000000classes: - first - relative parameters: will: warn: at: second: ${_param:notfound} reclass-1.7.0/test/model/extensions/classes/third.yml000066400000000000000000000002771373565003400227370ustar00rootroot00000000000000classes: - missing.class - second - .defaults parameters: _param: notfound: exist myparam: ${_param:some} will: not: fail: at: tree: ${_param:notfound} reclass-1.7.0/test/model/extensions/nodes/000077500000000000000000000000001373565003400205475ustar00rootroot00000000000000reclass-1.7.0/test/model/extensions/nodes/reclass.yml000066400000000000000000000000231373565003400227210ustar00rootroot00000000000000 classes: - .third reclass-1.7.0/test/model/extensions/reclass-config.yml000066400000000000000000000001161373565003400230570ustar00rootroot00000000000000storage_type: yaml_fs ignore_class_notfound: True ignore_class_regexp: ['.*'] reclass-1.7.0/tox.ini000066400000000000000000000017201373565003400144740ustar00rootroot00000000000000# this file is *not* meant to cover or endorse the use of tox or pytest or # testing in general, # # It's meant to show the use of: # # - check-manifest # confirm items checked into vcs are in your sdist # - python setup.py check (using the readme_renderer extension) # confirms your long_description will render correctly on pypi # # and also to help confirm pull requests to this project. [tox] envlist = py{27} [testenv] basepython = py27: python2.7 whitelist_externals= make deps = check-manifest {py27}: readme_renderer # flake8 out of the picture right now pytest mock pylint nose commands = check-manifest --ignore tox.ini,tests* {py27}: python setup.py check -m -r -s # flake8 . # FIXME: This code smell check goes poorly for us at present make tests # make lint-errors # FIXME: Cause these to operate properly inside tox # make coverage [flake8] exclude = .tox,*.egg,build,data select = E,W,F