pax_global_header00006660000000000000000000000064146003376720014522gustar00rootroot0000000000000052 comment=04b594d57e67591843a56d785666bcf4df81fda3 r10k-4.0.2/000077500000000000000000000000001460033767200123025ustar00rootroot00000000000000r10k-4.0.2/.gitattributes000066400000000000000000000000261460033767200151730ustar00rootroot00000000000000docker/** text eol=lf r10k-4.0.2/.github/000077500000000000000000000000001460033767200136425ustar00rootroot00000000000000r10k-4.0.2/.github/dependabot.yml000066400000000000000000000005411460033767200164720ustar00rootroot00000000000000version: 2 updates: # raise PRs for gem updates - package-ecosystem: bundler directory: "/" schedule: interval: daily time: "13:00" open-pull-requests-limit: 10 # Maintain dependencies for GitHub Actions - package-ecosystem: github-actions directory: "/" schedule: interval: daily time: "13:00" open-pull-requests-limit: 10 r10k-4.0.2/.github/pull_request_template.md000066400000000000000000000002541460033767200206040ustar00rootroot00000000000000Please add all notable changes to the "Unreleased" section of the CHANGELOG in the format: ``` - (JIRA ticket) Summary of changes. [Issue or PR #](link to issue or PR) ``` r10k-4.0.2/.github/workflows/000077500000000000000000000000001460033767200156775ustar00rootroot00000000000000r10k-4.0.2/.github/workflows/docker.yml000066400000000000000000000034761460033767200177030ustar00rootroot00000000000000name: Docker test and publish on: push: branches: - main jobs: build-and-publish: env: PUPPERWARE_ANALYTICS_STREAM: production IS_LATEST: true # Still need these env vars for README publishing DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: azure/docker-login@v1 with: # This doesn't seem to work unless we point directly to the secrets username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_PASSWORD }} - name: Set up Ruby 2.6 uses: actions/setup-ruby@v1 with: ruby-version: 2.6.x - run: gem install bundler - uses: actions/checkout@v3 - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Build container working-directory: docker run: | docker system prune --all --force --volumes docker builder prune --force --keep-storage=10GB make lint build test - name: Publish container working-directory: docker run: | make publish - name: Build release container env: IS_RELEASE: true working-directory: docker run: | if make prep; then make lint build test else echo "Skipping release container building and testing" fi - name: Publish release container env: IS_RELEASE: true working-directory: docker run: | if make prep; then make publish else echo "Skipping release container publishing" fi r10k-4.0.2/.github/workflows/release.yml000066400000000000000000000016541460033767200200500ustar00rootroot00000000000000name: Tag and release on: push: branches: - main paths: - 'lib/r10k/version.rb' jobs: release: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: fetch-depth: '0' - name: Bump version and push tag uses: anothrNick/github-tag-action@1.67.0 env: GITHUB_TOKEN: ${{ secrets.PUPPET_RELEASE_GH_TOKEN }} DEFAULT_BUMP: patch TAG_CONTEXT: branch WITH_V: false # Uncomment this if the tag and version file become out-of-sync and # you need to tag at a specific version. # CUSTOM_TAG: - name: Build gem uses: scarhand/actions-ruby@master with: args: build *.gemspec - name: Publish gem uses: scarhand/actions-ruby@master env: RUBYGEMS_AUTH_TOKEN: ${{ secrets.RUBYGEMS_AUTH_TOKEN }} with: args: push *.gem r10k-4.0.2/.github/workflows/rspec_tests.yml000066400000000000000000000050261460033767200207630ustar00rootroot00000000000000name: Rspec tests on: pull_request: branches: - main - 3.x jobs: rspec_tests: name: ${{ matrix.cfg.os }}(ruby ${{ matrix.cfg.ruby }}) strategy: matrix: cfg: - {os: ubuntu-latest, ruby: 2.6} - {os: ubuntu-latest, ruby: 2.7} - {os: ubuntu-latest, ruby: 3.1} - {os: ubuntu-latest, ruby: 3.2} - {os: ubuntu-latest, ruby: jruby-9.3} - {os: ubuntu-latest, ruby: jruby-9.4} - {os: windows-latest, ruby: 2.6} - {os: windows-latest, ruby: 3.2} runs-on: ${{ matrix.cfg.os }} steps: - name: Checkout current PR uses: actions/checkout@v3 - name: Install ruby version ${{ matrix.cfg.ruby }} uses: ruby/setup-ruby@v1 with: ruby-version: ${{ matrix.cfg.ruby }} - name: Install bundler and gems run: | bundle config set without packaging documentation bundle install --jobs 4 --retry 3 - name: Run tests on Windows if: runner.os == 'Windows' run: | # https://github.com/ruby/ruby/pull/2791/files#diff-ff5ff976e81bebd977f0834e60416abbR97-R100 # Actions uses UTF8, causes test failures, similar to normal OS setup $PSDefaultParameterValues['*:Encoding'] = 'utf8' [Console]::OutputEncoding = [System.Text.Encoding]::GetEncoding("IBM437") [Console]::InputEncoding = [System.Text.Encoding]::GetEncoding("IBM437") $Env:LOG_SPEC_ORDER = 'true' # debug information chcp Get-WinSystemLocale Get-ChildItem Env: | % { Write-Output "$($_.Key): $($_.Value)" } # list current OpenSSL install gem list openssl ruby -ropenssl -e 'puts \"OpenSSL Version - #{OpenSSL::OPENSSL_VERSION}\"; puts \"OpenSSL Library Version - #{OpenSSL::OPENSSL_LIBRARY_VERSION}\"' Get-Content Gemfile.lock ruby -v gem --version bundle --version # Run tests bundle exec rspec --color --format documentation spec - name: Run tests on Linux if: runner.os == 'Linux' run: | # debug information cat Gemfile.lock ruby -v gem --version bundle --version if [[ ${{ matrix.cfg.ruby }} =~ "jruby" ]]; then export _JAVA_OPTIONS='-Xmx1024m -Xms512m' # workaround for PUP-10683 sudo apt remove rpm fi # Run tests bundle exec rspec --color --format documentation spec r10k-4.0.2/.github/workflows/stale.yml000066400000000000000000000017461460033767200175420ustar00rootroot00000000000000name: Mark stale issues on: schedule: - cron: "30 1 * * *" jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@v8 with: repo-token: ${{ secrets.GITHUB_TOKEN }} days-before-stale: 60 days-before-close: 7 stale-issue-message: 'This issue has been marked stale because it has had no activity for 60 days. The Puppet Team is actively prioritizing existing bugs and new features, if this issue is still important to you please comment and we will add this to our backlog to complete. Otherwise, it will be closed in 7 days.' stale-issue-label: 'stale' exempt-issue-labels: 'community interest' stale-pr-message: "This PR has been marked stale because it has had no activity for 60 days. If you are still interested in getting this merged, please comment and we'll try to move it forward. Otherwise, it will be closed in 7 days." stale-pr-label: 'stale' exempt-pr-labels: 'community interest' r10k-4.0.2/.gitignore000066400000000000000000000001751460033767200142750ustar00rootroot00000000000000/r10k*.gem Gemfile.local Gemfile.lock .bundle bundle coverage integration/log integration/junit integration/configs r10k.log r10k-4.0.2/CHANGELOG.mkd000066400000000000000000002271141460033767200142750ustar00rootroot00000000000000CHANGELOG ========= Unreleased ---------- 4.0.2 ----- - Make metadata loading during incremental deploys more robust [PE-34917](https://perforce.atlassian.net/browse/PE-34917) - Use the production forge for integration testing - (maint) Update beaker-pe to 3 [#1376](https://github.com/puppetlabs/r10k/pull/1376) - (maint) Stop puppet service during tests [#1377](https://github.com/puppetlabs/r10k/pull/1377) 4.0.1 ----- - shellgit: Ensure the passed filepath to diff-index is interpreted as filepath [#1367](https://github.com/puppetlabs/r10k/pull/1367) 4.0.0 ----- - Drop Ruby 2.3/2.4/2.5 support; use puppet_forge 4.1 or newer [#1336](https://github.com/puppetlabs/r10k/pull/1336) - (maint) Add Ruby 3.0 to rspec CI matrix [#1261](https://github.com/puppetlabs/r10k/pull/1261) - (RK-368) remove `purge_whitelist` setting [#1277](https://github.com/puppetlabs/r10k/pull/1277) - (RK-390) Remove default ref for deploying git modules [#1275](http://github.com/puppetlabs/r10k/pull/1275) - (RK-391) Change `exclude_spec` default to true for module spec dir deletion [#1264](https://github.com/puppetlabs/r10k/pull/1261) - (RK-383) Remove deprecated `basedir` method from Puppetfile DSL. Users should use `environment_name` instead. [#1254](https://github.com/puppetlabs/r10k/pull/1254) - (RK-386) Remove deprecated `bare` environment type. [#1235](https://github.com/puppetlabs/r10k/issues/1235) 3.16.1 ------ - Make metadata loading during incremental deploys more robust [PE-34917](https://perforce.atlassian.net/browse/PE-34917) - Use the production forge for integration testing 3.16.0 ------ - Emit more debug output when modules fail to sync [#1347](https://github.com/puppetlabs/r10k/pull/1347) - Update GitHub Actions & introduce dependabot [#1337](https://github.com/puppetlabs/r10k/pull/1337) - Update R10K proxy usage to follow newer rugged best practices [PE-35980](https://tickets.puppet.com/browse/PE-35980) - Update Acceptance tests to be compatible with Puppet 8 [#1349](https://github.com/puppetlabs/r10k/pull/1349) 3.15.4 ------ - Pin dependencies to maintain support for old Ruby versions [#1329](https://github.com/puppetlabs/r10k/pull/1329) 3.15.3 ------ - Fix dirty working copy debug logging [#1321](https://github.com/puppetlabs/r10k/pull/1321) - Allow gettext-setup < 2 for compatibility with Ruby 3.2 and Puppet 8 [#1325](https://github.com/puppetlabs/r10k/pull/1325) 3.15.2 ------ - Implement exclude regex for puppetfile install [#1248](https://github.com/puppetlabs/r10k/issues/1248) 3.15.1 ------ - Add TOC to configuration docs [#1298](https://github.com/puppetlabs/r10k/issues/1298) - Remove the spec folder from gemspec [#1316](https://github.com/puppetlabs/r10k/issues/1316) 3.15.0 ------ - Support and test Ruby 3 - Allow puppet_forge 3.x & newer versions of fast_gettext/gettext [#1302](https://github.com/puppetlabs/r10k/pull/1302) - Allow newer cri versions [#1302](https://github.com/puppetlabs/r10k/pull/1302) - Fix error when using install_path from environment module [#1288](https://github.com/puppetlabs/r10k/issues/1288) - (RK-399) Do not warn about local modifications in the spec directory when `exclude_spec` is set [#1291](https://github.com/puppetlabs/r10k/pull/1291) 3.14.2 ------ - (RK-397) Ensure `--incremental` does not skip undeployed modules [#1278](https://github.com/puppetlabs/r10k/pull/1278) 3.14.1 ------ - (RK-395) Make `exclude_spec` from a Puppetfile the priority override [#1271](https://github.com/puppetlabs/r10k/issues/1271) - (RK-394) Fix `force` always resolving to true for `puppetfile install` [#1269](https://github.com/puppetlabs/r10k/issues/1265) - (RK-393) Bug fix: not all spec directories are deleted when :exclude_spec is true [#1267](https://github.com/puppetlabs/r10k/pull/1267) - Refactor internal module creation to always expect a hash, even for Forge modules, which can be specified in the Puppetfile with just a version string. [#1170](https://github.com/puppetlabs/r10k/pull/1170) 3.14.0 ------ - Record unprocessed environment name, so that `strip_component` does not cause truncated environment names to be used as git branches, resulting in errors or incorrect deploys. [#1240](https://github.com/puppetlabs/r10k/pull/1240) - (CODEMGMT-1294) Resync repos with unresolvable refs [#1239](https://github.com/puppetlabs/r10k/pull/1239) - (RK-378) Restore access to the environment name from the Puppetfile [#1241](https://github.com/puppetlabs/r10k/pull/1241) - (CODEMGMT-1300) Ensure the remote url in rugged cache directories is current [#1245](https://github.com/puppetlabs/r10k/pull/1245) - Add support for tarball module type, allowing module content to be packaged and sourced from generic fileservers [#1244](https://github.com/puppetlabs/r10k/pull/1244) - Add experimental support for tarball environment type, allowing whole environments to be packaged and sourced from generic fileservers [#1244](https://github.com/puppetlabs/r10k/pull/1244) 3.13.0 ------ - Restore Ruby 3 compatibility [#1234](https://github.com/puppetlabs/r10k/pull/1234) - (RK-381) Do not recurse into symlinked dirs when finding files to purge. [#1233](https://github.com/puppetlabs/r10k/pull/1233) - Purge should remove unmanaged directories, in addition to unmanaged files. [#1222](https://github.com/puppetlabs/r10k/pull/1222) - Rename experimental environment type "bare" to "plain". [#1228](https://github.com/puppetlabs/r10k/pull/1228) - Add support for specifying additional logging ouputs. [#1230](https://github.com/puppetlabs/r10k/issues/1230) 3.12.1 ------ - Fix requiring individual R10K::Actions without having already required 'r10k'. [#1223](https://github.com/puppetlabs/r10k/issues/1223) - Fix evaluation of Puppetfiles that include local modules. [#1224](https://github.com/puppetlabs/r10k/pull/1224) 3.12.0 ------ - (RK-308) Provide a `forge.allow_puppetfile_override` setting that, when true, causes a `forge` declaration in the Puppetfile to override `forge.baseurl`. [#1214](https://github.com/puppetlabs/r10k/pull/1214) - (CODEMGMT-1415) Provide an `--incremental` flag to only sync those modules in a Puppetfile whose definitions have changed since last sync, or those whose versions could change. [#1200](https://github.com/puppetlabs/r10k/pull/1200) - (CODEMGMT-1454) Ensure missing repo caches are re-synced [#1210](https://github.com/puppetlabs/r10k/pull/1210) - (PF-2437) Allow token authentication to be used with the Forge. [#1192](https://github.com/puppetlabs/r10k/pull/1192) - Only run the module postrun command for environments in which the module was modified. [#1215](https://github.com/puppetlabs/r10k/issues/1215) 3.11.0 ------ - Always sync git cache on `ref: 'HEAD'` [#1182](https://github.com/puppetlabs/r10k/pull/1182) - (CODEMGMT-1421, CODEMGMT-1422, CODEMGMT-1457) Add setting `exclude_spec` to remove the spec dir from module deployment[#1189](https://github.com/puppetlabs/r10k/pull/1189)[#1198](https://github.com/puppetlabs/r10k/pull/1198)[#1204](https://github.com/puppetlabs/r10k/pull/1204) - (RK-369) Make module deploys run the postrun command if any environments were updated. [#982](https://github.com/puppetlabs/r10k/issues/982) - Add support for Github App auth token. This allows r10k to authenticate under strict SSO/2FA guidelines that cannot utilize machine users for code deployment. [#1180](https://github.com/puppetlabs/r10k/pull/1180) - Restore the ability to load a Puppetfile from a relative `basedir`. [#1202](https://github.com/puppetlabs/r10k/pull/1202), [#1203](https://github.com/puppetlabs/r10k/pull/1203) 3.10.0 ------ - Add `authorization_token` setting to allow authentication to a custom Forge server. [#1181](https://github.com/puppetlabs/r10k/pull/1181) - (RK-135) Attempting to download the latest version for a module that has no Forge releases will now issue a meaningful error. [#1177](https://github.com/puppetlabs/r10k/pull/1177) - Added an interface to R10K::Source::Base named `reload!` for updating the environments list for a given deployment; `reload!` is called before deployment purges to make r10k deploy pools more threadsafe. [#1172](https://github.com/puppetlabs/r10k/pull/1172) - Remove username and password from remote url in cache directory name [#1186](https://github.com/puppetlabs/r10k/pull/1186) - Purging efficiency is greatly improved. R10K will no longer recurse into directories that match recursive purge exclusions. This should significantly improve the deploy times for those users who enable the "environment" purge level. [#1178](https://github.com/puppetlabs/r10k/pull/1178) 3.9.3 ----- - Fixes a regression when using `--default_branch_override` with Puppetfiles containing Forge modules. [#1173](https://github.com/puppetlabs/r10k/issues/1173) 3.9.2 ----- - Makes the third parameter to R10K::Actions optional, restoring backwards compatability broken in 3.9.1. 3.9.1 ----- - Invalid module specifications in a Puppetfile will cause the R10K run to abort earlier than before. Prior to this release, the R10K run would complete, sync all other modules, and return an exit code of 1. R10K will now stop syncing modules and abort immediately. [#1161](https://github.com/puppetlabs/r10k/pull/1161) 3.9.0 ----- - Add '--modules' flag to `deploy` subcommand as a replacement to '--puppetfile', deprecate '--puppetfile'. [#1147](https://github.com/puppetlabs/r10k/pull/1147) - Deprecate 'purge_whitelist' and favor usage of 'purge_allowlist'. [#1144](https://github.com/puppetlabs/r10k/pull/1144) - Add 'strip\_component' environment source configuration setting, to allow deploying Git branches named like "env/production" as Puppet environments named like "production". [#1128](https://github.com/puppetlabs/r10k/pull/1128) - A warning will be emitted when the user supplies conflicting arguments to module definitions in a Puppetfile, such as when specifying both :commit and :branch [#1130](https://github.com/puppetlabs/r10k/pull/1130) - Add optional standard module and environment specification interface: name, type, source, version. These options can be used when specifying environments and/or modules in a yaml/exec source, as well as when specifying modules in a Puppetfile. Providing the standard interface simplifies integrations with external services [#1131](https://github.com/puppetlabs/r10k/pull/1131) - Pin cri to 2.15.10 to maintain support for Ruby 2.3 and 2.4 [#1121](https://github.com/puppetlabs/r10k/issues/1121) 3.8.0 ----- - When a forge module fails name validation the offending name will now be printed in the error message. [#1126](https://github.com/puppetlabs/r10k/pull/1126) - Module ref resolution will now fall back to the normal default branch if the default branch override cannot be resolved. [#1122](https://github.com/puppetlabs/r10k/pull/1122) - Experimental feature change: conflicts between environment-defined modules and Puppetfile-defined modules now default to logging a warning and deploying the environment module version, overriding the Puppetfile. Previously, conflicts would result in an error. The behavior is now configurable via the `module_conflicts` environment setting [#1107](https://github.com/puppetlabs/r10k/pull/1107) 3.7.0 ----- - Use %LOCALAPPDATA% for Windows cachedir [#1049](https://github.com/puppetlabs/r10k/issues/1049) - Add support for defining custom puppet.conf when generating types [#993](https://github.com/puppetlabs/r10k/pull/993) - Update test cases to account for error message changes in Puppet 7 3.6.0 ----- - Add filter_command configuration option for git repositories. (Thanks to [mhumpula](https://github.com/mhumpula) for the feature.) [#823](https://github.com/puppetlabs/r10k/pull/823) - Increase default pool_size to 4, allowing modules to be downloaded on 4 threads concurrently. [#1038](https://github.com/puppetlabs/r10k/issues/1038) - Ensure that modules that share a cachedir download serially, to avoid cache corruption. [#1058](https://github.com/puppetlabs/r10k/issues/1058) - Don't purge root when using `puppetfile install`. [#1084](https://github.com/puppetlabs/r10k/issues/1084) 3.5.2 ----- - (RK-319) Clean up tmp directories used for downloading modules 3.5.1 ----- - Upgrade Rugged Gemfile dependency for local development to ~> 1.0 3.5.0 ----- - Add exec environment source type. The exec source type allows for the implementation of external environment sources [#1042](https://github.com/puppetlabs/r10k/pull/1042). - Improve atomicity of .r10k-deploy.json writes. Fixes [#813](https://github.com/puppetlabs/r10k/issues/813) 3.4.1 ----- - Add support for Ruby 2.7 - (RK-357) Restrict gettext and fast_gettext versions for compatibility with Ruby 2.4 - Bump cri to 2.15.10 - (RK-360) Allow overriding the cachedir in `deploy module` - Bump puppet_forge to 2.3.0 3.4.0 ----- - Switch to supported `colored2` gem - Add YAML environment source types [#983](https://github.com/puppetlabs/r10k/pull/983), [#1002](https://github.com/puppetlabs/r10k/pull/1002). YAML source types use configuration files to define environments, rather than VCS branches, and support deploying modules to an environment without modifying a control-repo. YAML environment source types are experimental in this release 3.3.3 ----- ### Changes - Update minitar version to 0.9, to match the version installed with puppet-agent 3.3.2 ----- ### Bug Fixes - Switched to thread-safe OpenSSL::Digest creation. [#979](https://github.com/puppetlabs/r10k/issues/979) 3.3.1 ----- ### Changes - Updated thread pool error handling and logging 3.3.0 ----- ### New Feature - Adds support for installing modules concurrently ### Bug Fixes - (RK-343) Pins CRI dependency to 2.15.6 to resolve regression in options parsing. 3.2.1 ---- ### Changes - Flag for overriding default branch configuration in Puppetfile - Plumbing for internationalization - Numerous test fixes and legacy docker work 3.2.0 ----- ### Changes - Add support for running `puppet generate types` 3.1.1 ----- ### Changes - (RK-335) Postrun `modifiedenvs` doesn't include environment prefixes 3.1.0 ----- ### Changes - Substitute environments acted on in postrun command. Now post run commands that contain the string "$modifiedenvs" (eg. ["/usr/local/bin/my-postrun-cmd", "--verbose", "$modifiedenvs"]) will have the string substituted with a space separated list of environments acted upon (either a single environment if specified on the command line or all environments). Specifically this should allow users to easily wrap `puppet generate types` and matches the terminology used in g10k. Many thanks to @raphink for the contribution. 3.0.4 ---- ### Changes - Flag for overriding default branch configuration in Puppetfile - Plumbing for internationalization - Numerous test fixes and legacy docker work 3.0.3 ---- ### Changes - (RK-324) Fix Ruby pipe bug affecting Ubuntu 3.0.2 ---- ### Changes - Minor test fixes. 3.0.1 ---- ### Changes Because of dependency issues R10K 3.0.0 required Ruby >= 2.3 rather than the reported 2.0. This release makes the requirement of Ruby >= 2.3 official and documented. - (#853) ([RK-327](https://tickets.puppetlabs.com/browse/RK-327) Uninitialized Constant Cri::Error When resolving the Cri dependency >= 2.13 R10K would fail with an uninitialized constant error. Thanks to @ostavnaas for the bug report, @ddfreyne for the fix, and @baurmatt for the review. 3.0.0 ---- ### Changes #### Known issues - Child processes may die unexpectedly when deploying many environments on Ubuntu Bionic. See [RK-324](https://tickets.puppetlabs.com/browse/RK-324). #### Backwards breaking changes - Drop support for Ruby < 2.0 - Remove support for PUPPETFILE and PUPPETFILE_DIR environment variables when running the `puppetfile` action, please use flags instead. - Fail when duplicate module definitions in Puppetfile #### Bug fixes - More reliable pruning of refs on fetch - Improved error messaging when: - Unable to connect to a proxy - r10k.yaml file is empty - Unable to parse Puppetfile - Various perfomance improvements 2.6.9 ---- - Don't purge root when using `puppetfile install`. [#1084](https://github.com/puppetlabs/r10k/issues/1084) 2.6.8 ---- ### Changes (RK-357) Restrict gettext and fast_gettext versions for compatibility with Ruby 2.4. (RK-358) Update puppet_forge to ~> 2.3.0. 2.6.7 ---- ### Changes (CDPE-1813) Add module deploy info to .r10k-deploy.json. (RK-351) Update minitar to ~> 0.9.0. 2.6.6 ---- ### Changes - Flag for overriding default branch configuration in Puppetfile - Plumbing for internationalization - Numerous test fixes and legacy docker work 2.6.5 ---- ### Bug Fix (RK-324) Fix Ruby pipe bug affecting Ubuntu 2.6.4 ---- ### Changes Numerous test fixes. 2.6.3 ---- ### Changes Update specs with new error string. **NOTE** - CHANGELOG is only assured to be up to date for a particular branch when a release is made on that branch. 2.6.2 ----- ### Changes (RK-311) Yard dependency updated for security fix. 2.6.1 ----- ### Bug Fixes (RK-310) Fix ChecksumMismatch error on Windows for forge caching. 2.6.0 ----- ### New Features (RK-307) Branches can now be ignored by prefixes during deployment. (RK-305) Add --no-force to deploy action to avoid overwriting local module changes. (RK-264) Add --force action to puppetfile install to force overwriting local module changes. (RK-291) (RK-304) Add caching of forge modules. ### Changes (RK-306) Remove the dependency on semantic_puppet. (RK-161) Deprecate the usage of PUPPETFILE and PUPPETFILE_DIR environment variables. 2.5.5 ----- 2017/06/02 ### Bug Fixes (#696) Move deprecated module check to install/reinstall/upgrade rather than synchronize. This fixes a major slowdown on redeployment. ### Changes (RK-290) Add deprecation warning for duplicate module names in Puppetfile. This will cause an error in r10k v3.0.0. (RK-285) Update minitar dependency to 0.6.1 2.5.4 ----- 2017/04/05 ### Bug Fixes (PF-1317) Only use deprecated attribute when it exists. 2.5.3 ----- 2017/03/31 ### Bug Fixes (#645) Fix undefined method error when r10.yaml empty (#659)(RK-269) Puppetfile actions acknowledge :branch and :default_branch (Thanks to [Chris Cowley](https://github.com/chriscowley) for the report.) ### Changes (PF-1317) Emit a warning when syncing a deprecated Forge module. 2.5.2 ----- 2017/02/07 (#699) Pin minitar dependency to 0.5.4 to avoid a bug with the 0.6.0 release. (Thanks to [Logan Garrett](https://github.com/lngarrett) for the report and fix.) 2.5.1 ----- 2016/12/05 (RK-78) Use :prune option for #fetch in Rugged::BareRepository Versions of the "rugged" gem prior to 0.24.0 lacked the ability to automatically "prune" branches from a local repo that no longer existed in the matching remote repo after a fetch. To work around this issue, r10k included code that would manually remove/recreate branches during a fetch. Since "rugged" 0.24.0 is now widely available, r10k has been updated to use the built-in "prune" option during a fetch and the workaround code has been removed. NOTE: If you use the "rugged" gem with r10k, you will need to manually upgrade it to a version >= 0.24.0 to take advantage of the new functionality. If you are using a "rugged" version less than 0.24.0, r10k will now issue a warning every time it fetches from a remote git repository. 2.5.0 ----- 2016/11/15 ### Bug Fixes (#669) Updated the behavior of the rugged based git provider to handle unexpected behavior around checkout and resets regarding file permissions, specifically, when resetting to an already checked out SHA the executable bit on files would not update. ### Changes (#664) Added to the proxy error message for changes made to libcurl which gave new, surprising errors about unsupported proxy schemes. 2.4.3 ----- 2016/08/23 ### Bug Fixes (RK-266) Fixed an issue where the "puppetfile install" action was encountering an error when operating on a Puppetfile with "local" content declarations. 2.4.2 (Yanked) -------------- 2016/08/22 ### Bug Fixes (RK-265) The "puppetfile install" action will no longer overwrite local modifications to managed Git content. Instead, a message will be logged at the "WARN" level indicating that the content was skipped. Note: The "deploy" actions will still overwrite local modications. For more background on this change, see below: In 2.4.0 a change was made to r10k's behavior when it encounters local modifications during "deploy" operations. Previously, r10k would log an error and skip updating the modified content. As of 2.4.0, local modifications will be overwritten and a warning will be logged. This change was considered a bug fix but was originally omitted from the changelog for that release. This change also inadvertently modified the behavior of the "puppetfile install" action. A command line flag to control this behavior more explicitly will likely be added in a future version. 2.4.1 ----- 2016/08/11 ### Bug Fixes (#634) Fix "undefined variable" error in "deploy module" action. (Special thanks to Andreas Ntaflos (antaflos) for the fix.) (#635) Reword some documentation around environment level purging. 2.4.0 ----- 2016/08/10 ### New Features (RK-222) New "install\_path" option for Git/SVN content. This feature allows you to specify where inside an environment each item from the Puppetfile should be installed to. See the [Puppetfile documentation](https://github.com/puppetlabs/r10k/blob/master/doc/puppetfile.mkd#per-item-install-path) for details. (RK-246) New "environment" level purging and configurable purge levels. You can now configure how r10k purges unmanaged content after a deployment. The default behavior should be unchanged but there is a new "purge\_levels" configuration option that can be used to enable new behavior or de-activate certain existing behaviors. See the relevant [configuration documentation](https://github.com/puppetlabs/r10k/blob/master/doc/dynamic-environments/configuration.mkd#purge_levels) for more details. (RK-223) Ability to track control repo branch from content declarations. Puppetfile content sourced from Git can now be configured to attempt to track the branch name of the control repo branch being deployed. For example, if r10k is deploying the 'production' branch of your control repo, it will try to also deploy the 'production' branch of a given Puppetfile content repo. See the [documentation](https://github.com/puppetlabs/r10k/blob/master/doc/puppetfile.mkd#control-repo-branch-tracking) for more details. ### Internationalization All user-facing strings generated by r10k have been externalized to enable future iternationalization (i18n) and localization work. ### Changed (RK-258) Symlinks inside of Forge modules will no longer cause r10k to exit non-zero. This situation used to raise an error but will now generate a WARN level log message instead. (#483) Local modifications to managed content will now be overwritten during "deploy" actions. (Note: This change inadvertently also affected the "puppetfile install" action in 2.4.0 and 2.4.1. This was fixed in 2.4.2. A command line flag to control this behavior more explicitly will likely be added in a future version.) ### Bug Fixes (#616) Ensure that Forge module version strings are valid semantic versions. (Special thanks to Patrick Robinson (patrobinson) for the fix.) (#622) Fix typos in workflow docs. (Special thanks to Yury Frolov (mrdracon) for the fix.) 2.3.0 ----- 2016/05/17 ### New Features (RK-236/RK-237) Added HTTP proxy support for Git operations. Previously, r10k only supported the use of HTTP proxies for connecting to the Puppet Forge. With these changes, r10k can now be configured to use an HTTP proxy for both Forge and Git operations. Configuration can be specified globally, for Forge or Git only, or on a per-Git repository basis. See [configuration documentation](https://github.com/puppetlabs/r10k/blob/master/doc/dynamic-environments/configuration.mkd) for more details. ### Bug Fixes (RK-238) When r10k encounters and ignores invalid file types in a module archive, it will now log the message at the DEBUG1 level instead of at WARN. (RK-243) In certain cases, when using the "rugged" Git provider, specifying invalid HTTP credentials for a repository could result in an infinite loop. Authentication retry for HTTP repositories is now capped at 50 attempts which matches the existing behavior for SSH. 2.2.2 ----- 2016/04/18 (RK-241) "deploy display" action does not properly format wrapped exceptions The "deploy display" action was not capturing and logging exceptions in the same way as other related actions. This meant that in many cases, when an error occurred, the underlying cause was not being shown. Specifically, the "deploy display" action was not benefitting from the improved error messaging for unreadable SSH keys which was added in r10k 2.2.0 as part of RK-220. 2.2.1 ----- 2016/04/14 (RK-229) Setting Forge proxy options breaks PE Authentication A bug in the interaction between r10k and the puppet\_forge gem was preventing the correct Authorization headers for Puppet Enterprise-only modules from being included with requests to the Puppet Forge when r10k was also configured to use a proxy. This bug has been resolved by adding new functionality to the puppet\_forge gem and updating r10k to use the new version. 2.2.0 ----- 2016/03/08 ### Notes (RK-154) Per-repo config for Git sources Git repository options, such as the SSH private key, can now be set indepdently for each repository when using the Rugged provider. See [documentation](https://github.com/puppetlabs/r10k/blob/master/doc/git/providers.mkd#ssh-configuration-1) for details. (RK-220) Improved error message for unreadable SSH keys r10k will now check to ensure that the configured SSH private key for a given repository is readable before attempting to connect. This will result in a clearer error message in situations where the key file is not readable. (CODEMGMT-453) Support for running under JRuby 1.7 with shellgit provider r10k should now run successfully under JRuby 1.7.x when using the "shellgit" provider. (MAINT) Documentation fixes Various errors and inconsistencies in the documentation have been fixed thanks to contributions from [Paul Tobias](https://github.com/tobiaspal), [Rob Nelson](https://github.com/rnelson0), and [David Danzilio](https://github.com/danzilio). Thanks! 2.1.1 ----- 2015/11/12 ### Notes (CODEMGMT-440) Defer git alternates setup The fix for RK-175 that updated the Git alternates file for repositories was happening too early, and could cause issues when multiple r10k processes were running concurrently. This has been fixed so that the alternates file update is deferred till the first time the git repository is actually accessed. (RK-187) Consider thin repos with a .git file (not directory) to be mismatched If a given Git thin repository had a .git file where r10k expected there to be a directory it would behave badly; this has been fixed so that if r10k encounters this case it treats the repository as mismatched. (RK-181) Correctly set baseurl/proxy with shared PuppetForge URL A combination of some odd connection handling behavior in the puppet_forge gem combined with some bad assumptions in r10k prevented users from being able to actually set a custom forge baseurl; this has been fixed. 2.1.0 ----- 2015/10/28 ### Thanks Thanks to the following contributors for their work on this release: * [Abel Paz](https://github.com/apazga) fixing some broken links in the Git Environments documentation * [Alex Rowley](https://github.com/rowleyaj) for surfacing the --puppetfile and --module options for `r10k puppetfile install` * [Austin Blatt](https://github.com/austb) (our summer intern!) for extracting the vendored Puppetforge code and pushing it into the upstream puppet_forge gem. * [Branan Riley](https://github.com/branan) for updating the quickstart guide for Puppet 4. * [Darrell](https://github.com/darrell) for adding documentation for the :local module type * [David Pashley](https://github.com/dpash) for documenting potential issues with Rugged/SSH on Ubuntu * [Dennis](https://github.com/pskrz) for clarifying path information in the quickstart guide * [E. Dunham](https://github.com/edunham) for adding an entry to the FAQ to explain the name of r10k * [Kirill Kondratenko](https://github.com/cybem) for contributing the :local module type (RK-149) * [Louis Mayorga](https://github.com/lmayorga1980) for finding and reporting RK-143, and testing fixes for that issue. * [Mark McKinstry](https://github.com/mmckinst) for fixing some broken links in the FAQ * [Patrick Robinson](https://github.com/nemski) for submitting GH-516 to catch and handle ArgumentErrors raised when loading Puppetfiles * [Thomas Lapinski](https://github.com/thlapin) for fixing a bug where switching from a Git module to a Forge module would incorrectly leave the Git module version installed. * [Thomas Mueller](https://github.com/vinzent) for updating the r10k homepage link in the gemspec. ### User notes (RK-70) Expose Forge baseurl setting in r10k.yaml The Puppet Forge URL can now be changed from the default forgeapi.puppetlabs.com to a user specified value. This allows users to use private/testing Puppet Forge instances. See https://github.com/puppetlabs/r10k/blob/master/doc/dynamic-environments/configuration.mkd#baseurl for documentation on using the baseurl setting. (RK-96) Correctly switch from Forge modules to Git modules If a Git version of a module had been installed via a Puppetfile and the Puppetfile entry was updated to use the Forge version of the module, r10k would only check the metadata - and if the metadata of the Git version matched then r10k would assume that the Forge module was installed. This has been fixed so that when checking the status of a Forge module r10k specifically looks for a `.git` directory and considers the module mismatched when the directory is present, which means that it will correctly replace a Git version of a module with the Forge version. (RK-142) Add machine readable output for `r10k deploy display` The output format for `r10k deploy display` was a YAML-like, janky hand rolled version; this has been replaced with the ability to specify an actual data format for the output. The default output type is now YAML. (GH-477) Add `--fetch` option for `r10k deploy display` When `r10k deploy display` is run it might be useful to see which environments don't yet exist; the added `--fetch` option allows r10k to update the environment sources to check for missing environments when displaying environments. (RK-143) Use argument vector when executing commands on Windows R10k used the `Open3.capture3` call, and used to concatenate the string to execute and join it with spaces. However Windows loves including spaces in file names, which creates all sort of weird behavior especially when running shell commands over SSH to Windows. This has been fixed to use the argument vector form of `capture3` to avoid these issues. (RK-149) Add :local module type The new :local module type allows users to add modules to the Puppetfile that are included in a control repository, so that modules can be kept in the Puppetfile moduledir without them being destroyed. (CODEMGMT-345) Write deploy signatures to a file If the code deployed by r10k is copied to another location without the cached repos, it becomes impossible to use Git to interact with the repository and see which version of code r10k deployed. R10k now creates a `.r10k-deploy.json` file that records the time and SHA of the last code deployment. (RK-80) Support non SSH key based authentication for rugged provider An error in how the Rugged Git provider provided SSH credentials prevented HTTPS urls requiring authentication to function. This has been fixed so that only SSH urls are provided ssh private key credentials, and HTTPS urls can provided a username and password in the URL for authentication. (RK-90) Don't raise an error when forge modules are duplicated A bug in how r10k Forge modules created caused r10k to crash when two forge modules tried to create the same directory. This has been fixed and restores the old behavior of letting the last module specified win. In the long term trying to specify two modules with the same name will produce an error, but in the mean time this fix restores existing behavior so that r10k won't roll over and die on error. (RK-120) Enable --config as top level command option R10k was originally built with environment deployment in mind, but the `r10k puppetfile` subcommand made this assumption faulty. However the way the config file was loaded only worked with `r10k deploy` subcommands, which prevented users from using a custom cachedir or use other settings when running `r10k puppetfile`. This has been corrected so that r10k can always read a config file when running any subcommand. (RK-175) Don't crash on unresolvable Rugged ref When r10k was using the rugged provider, checking out an unresolvable ref would throw a TypeError because r10k didn't always ensure that the ref was resolvable. This has been fixed so that r10k explicitly ensures that refs can be resolved before it checks them out. (RK-174) Always ensure alternates file is up to date When r10k created a Git working repository it permanently linked the repository to a cached Git repository via the Git alternates file. However if the cachedir setting changed, existing repositories would still reference the old path which could very badly break Git. This has been fixed so that r10k always ensures that the alternates file is up to date before any Git operations can happen, so that changing the cachedir will not break Git repositories. (RK-169) Print validation failures for invalid configs R10k used to have very lax handling of config file input, which would cause crashes during r10k runtime. This has been fixed so that r10k validates all configuration before it runs and prints out all configuration validation errors when validation fails. (RK-21) Indicate error when deploying non-existent environment When r10k was deploying a specific list of environments, it would skip any existing environment that didn't match the desired list of environments. However this meant that deploying a non-existent environment would not deploy any environments and r10k would silently exit with an exit code of 0. This has been fixed so that when r10k deploys a list of environments it makes sure that each environment can be updated, and when requested environments are missing it logs an error and exits with a non-zero exit code. (RK-163) Add deploy/write_lock setting Users can now prevent `r10k deploy` commands that change environments and modules from running via a config option; this allows users to lock out code deployments at certain times (code freezes and times outside of maintenance windows.) (GH-516) Handle ArgumentError when loading Puppetfiles The `r10k puppetfile check` command didn't output a very useful error when a module was given the wrong number of arguments; this has been fixed so that r10k catches ArgumentErrors raised while loading a Puppetfile and wraps it so that more information is added to the error message. (GH-469) Add `--puppetfile` and `--moduledir` options to `r10k puppetfile install` subcommand The `r10k puppetfile install` subcommand was able to set a custom puppetfile path and moduledir location via environment variables to match librarian-puppet, but they accidentally didn't match the librarian-puppet semantics - and environment variables aren't very nice to expose for configuring and application. The `r10k puppetfile install` subcommand now supports command line flags to set these options. (CODEMGMT-339) Add command line option to set cachedir There are some scenarios where r10k needs to be run with a common config file, but on a specific basis may need to set a custom cachedir. To make this work `r10k deploy` now supports a `--cachedir` setting for these temporary overrides. 2.0.3 ----- 2015/8/13 This is a bugfix release that resolves a critical issue issue in installing PE only modules. ### Notes (RK-156) PE-only modules cannot be installed The mechanism used to load PE license information in r10k was preventing r10k from not being able to locate the pe-license Ruby library in PE 2015.2.0; this has been resolved by actually trying to load the relevant files instead of probing for a gem and then conditionally loading it. 2.0.2 ----- 2015/06/18 This is a maintenance release that improves error messages around installing modules from the Puppet Forge. ### User notes (RK-109) Add context to connection failure errors If a connection to the Puppet Forge failed for any reason, the resulting exception would indicate the error type but not the host or proxy host. This made it hard to understand why connections were failing. This has been fixed so that r10k will include the host and optional proxy host in error messages when connections fail. (RK-121) Improve error handling for nonexistent Forge modules The r10k Puppet Forge connection error handling reports when HTTP requests fail, but would simply print the HTTP status code on failure. For cases where a nonexistent module or module release was queried, r10k now specially handles HTTP 404 status codes and indicates that the module/module release is missing instead of just throwing a generic HTTP error. 2.0.1 ----- 2015/06/09 This release fixes a couple of issues and defects found in 2.0.0. ### Thanks Thanks to Tim Meusel (https://github.com/bastelfreak) and ktreese (https://github.com/ktreese) for reporting GH-443 and GH-447. ### User notes (RK-117), (GH-443), (GH-447) Add minitar as runtime dependency Minitar is a hard runtime dependency of r10k as part of the new Forge module implementation, but was only added as a development dependency which means that r10k could be installed without all of the required runtime dependencies. This oversight has been corrected; r10k will now pull in minitar at installation time. (RK-118) Readd '/etc/r10k.yaml' to config search path The '/etc/r10k.yaml' config path was deprecated in r10k 1.5.0, but this deprecation was only a soft deprecation and was easy to miss. While 2.0.0 is a backwards incompatible release it was too aggressive to remove this entirely, so '/etc/r10k.yaml' will continue to be respected/read in r10k 2.x. If this file is present and used then a deprecation notice will be logged. Apologies for the churn on this! 2.0.0 ----- 2015/06/08 This is a backwards incompatible feature release, but as major releases go this is a pretty small one. Some changes introduced into master included some breaking changes and SemVer dictates that we do a major release in this case. Actual changes that will affect end users should be limited; the only one that should have big impact is the removal of Ruby 1.8.7 support. Any other issues encountered should be treated as bugs and will be fixed. ### User notes (GH-1) Native support for installing modules from the Puppet Forge R10k can now directly install modules from the Puppet Forge, rather than shelling out to the Puppet module tool. This will allow for later optimizations like caching module downloads to speed up installing module across multiple environments. (RK-83) Allow '-' as a module name separator (RK-53) Remove '/etc/r10k.yaml' from config file search path. R10k 1.5.0 added '/etc/puppetlabs/r10k/r10k.yaml' to the paths checked while looking for a config file, in order to keep in convention with the rest of the Puppet Labs config files. In 2.0.0 the old location, '/etc/r10k.yaml', has been removed. (RK-57) Notify users of purgedirs key deprecation The purgedirs key was used in r10k 0.0.9 but has not been used in the 1.x release series; if this setting is given then r10k will generate a warning indicating that it is not used. ### Deprecations/Removals (RK-47) Remove support for Ruby 1.8.7 Given that Ruby 1.8.7 has been EOL for nearly two years, it's time for r10k to drop support for 1.8.7 as well. The Puppet 4 all in one package ships with Ruby 2.1.6, so even if you're on a platform that doesn't have Ruby 1.9 or greater you can install r10k into the Puppet collection environment and used the bundled Ruby. (RK-54) Remove deprecated subcommands R10k 1.0.0 reorganized a number of subcommands but retained the old subcommand names for compatibility with 0.0.x; since it's been over 2 years since 1.0.0 has been released these commands have finally been removed. (RK-113) Remove deprecated Task classes and namespaces The R10K::Task namespace turned out to be unwieldy in practice and has been replaced with the R10K::Action namespace. Use that for running r10k as an application. (RK-114) Remove deprecated git classes The reorganization of the Git code in 1.5.0 rendered a number of classes obsolete; they've been removed. 1.5.1 ----- 2015/04/09 ### Thanks Thanks to all the users that helped track down RK-86, and Zack Smith in specific for tracing the source of the bug. ### User notes (RK-62) Warn when Rugged is compiled without SSH or HTTPS transports If Rugged/libgit2 was compiled without libssh2, trying to access a Git repository via SSH throws a fairly cryptic error. It's not terribly easy to specially handle the error message that's being logged, but in lieu of that r10k now checks the Rugged gem to make sure it's compiled with support for SSH and HTTPS when the Rugged Git provider is used. (RK-79) Bump minimum required version of faraday_middlware-multi_json Faraday 0.9.0 changed the API for middleware plugins, which made it incompatible with the faraday_middleware-multi_json plugin. That plugin supported the new API in version 0.0.6 but the minimum required version in the r10k gemspec was not updated, allowing r10k to be installed with incompatible versions of faraday and faraday_middleware-multi_json. This has been fixed by requiring the minimum compatible version of faraday_middleware-multi_json. (RK-86) Git modules don't properly track branches R10k 1.5.0 added smarter syncing for Git caches to reduce network traffic, but it accidentally caused branches to stop tracking changes to the remote branch. This has been fixed and the pre-1.5.0 behavior has been restored. 1.5.0 ----- 2015/04/02 ### Announcements Ruby 1.8.7 has had a good run, but it's time for r10k to think about moving on. As of r10k 1.5.0, support for Ruby 1.8.7 is officially deprecated. Issues affecting Ruby 1.8.7 will still be fixed, but will be of lower priority. Support for Ruby 1.8.7 will be dropped entirely in r10k 2.0.0. ### Thanks Thanks to the following contributors for their work on this release: * [Brett Swift](https://github.com/brettswift) for adding custom prefix values for sources (RK-35) * [Eli Young](https://github.com/elyscape) for adding additional debug information to `r10k version` (RK-37) * [Pete Fritchman](https://github.com/fetep) for removing a warning generated under Ruby 2.2 (RK-55) * [Theo Chatzimichos](https://github.com/tampakrap) for updating the dependency versions for r10k * [ETL](https://github.com/etlweather), [Ben S](https://github.com/juniorsysadmin), and [Robert Nelson](https://github.com/rnelson0) for their documentation contributions ### User notes (GH-57) Git based modules fail to detect presence of git binary As part of RK-17, r10k now checks to make sure that the `git` binary is present and executable before trying to run any commands. If Git is missing, r10k will fail hard instead of trying to run and failing on the first failed command. (GH-195) (RK-35) Allow sources to specify a custom prefix string For r10k deployments that have separate sources for Puppet code and Hiera data, the created environment paths must line up between the code and data repos. With basic prefixing where the source name is used as the prefix string, it's not possible to use prefixing and commonly named environments. This has been fixed so that the source `prefix` option can be given true, false, or a string which is a custom value to use for prefixing. (RK-17) Add rugged/libgit2 based Git implementation The libgit2 library and Ruby rugged gem provide a native interface to Git for Ruby that is faster and more consistent than shelling out to Git. An additional rugged based Git implementation has been added along the original 'shellgit' implementation, and the implementations can be swapped out as needed. (RK-24) Add '/etc/puppetlabs/r10k/r10k.yaml' to config search path In order to be more consistent with the rest of the Puppet ecosystem, r10k will now check for configuration in a standard location inside of '/etc/puppetlabs'. The old location, '/etc/r10k.yaml' is still respected so no configuration change is needed at this point, although the old location will be deprecated in the future. (RK-25) Warn if both '/etc/r10k.yaml' and '/etc/puppetlabs/r10k/r10k.yaml' exist (RK-31) Remove undocumented search for 'r10k.yaml' Early versions of r10k would try to search for 'r10k.yaml' in parent directories, in the same manner that programs like Git and Bundler recursively search for configuration. However this functionality isn't terribly useful, is a bit surprising, and adds a lot of complexity. This behavior has been removed in 1.5.0. (RK-32, RK-33, RK-38) Provide configuration options for Git providers The r10k Git providers can now be selected and configured via configuration in r10k.yaml. See the [Git specific documentation](doc/git) for more information on these settings. (RK-37) Support higher verbosity levels for `r10k version` If `r10k version` is run with the `--verbose` flag, diagnostic information about r10k will be included along with version information. (RK-39, RK-66) Improved logging/messaging R10k now logs more information about what's going on and is more consistent about which levels it displays messages. (RK-74) Improved log formatting The log formatting that r10k uses was added in some distant point in the past where there were few users, and wasn't really designed with users in mind. The formatting has been improved so that at lower log levels the formatting will include the log level and nothing else, and at higher levels it will include timing information. (RK-75) Optional log coloring At higher log levels r10k can produce a lot of logging information at a rapid rate, and in general errors and warnings are not very visually distinct and easy to miss. To help solve these problems r10k 1.5.0 now has optional colored logging to help provide more information/context at a glance. Coloring can be enabled with the `--color` option. (GH-265) (RK-11) Unix commands with > 64KiB of output no longer deadlock Unix pipes have a maximum buffer size of 64KiB, and if the pipe buffer fills then subsequent writes will block. Since r10k used to wait for a subprocess to finish before reading from the attached pipes, if the process filled the buffers then the processes would deadlock. This has been fixed by continually reading from the subprocess pipes to prevent the buffers from filling up. Unfortunately implementing this correctly requires reading from the pipes until EOF, which breaks the ssh ControlPersist functionality. Supporting both the ControlPersist functionality while preventing deadlock introduces too many potential race conditions, so unfortunately workarounds for this issue can't be accepted. See https://bugzilla.mindrot.org/show_bug.cgi?id=1988 for more information about the ssh ControlPersist bug. ### Developer notes (GH-142) Use Forge v3 API R10k was using the old and deprecated v1 Forge API to determine the latest version of Forge modules when using the `:latest` module version. This has been fixed so that the v3 API is used. (RK-16) Decouple Git platform dependent and independent code The original code that interfaced r10k and Git was very tightly coupled; there was no defined interface and it was expected that r10k would always shell out to Git. This has been fixed by extracting the shellout specific code to a library and defining a common interface for Git classes so that different Git implementations can be used. The existing shellout implementation has been renamed to 'shellgit' and should be functionally equivalent to the Git implementation in previous versions of r10k. (RK-55, GH-355) Prevent warnings on Ruby 2.2 Ruby 2.2 generates warnings when comparing values and `#<=>` raises an exception; the code triggering this behavior has been cleaned up to prevent a warning from being logged. (RK-65) Switch to using the semantic_puppet gem R10k is switching from the old vendored copy of 'SemVer' that was stolen from Puppet to the 'semantic_puppet' gem, which is the library that Puppet now uses for version comparisons and parsing. (RK-48) Ignore deleted versions when fetching latest module version When looking up the latest version of a module on the Forge, if the latest version had been deleted r10k would try to install that deleted version anyways. This has been fixed so that all deleted module releases will be ignored. 1.4.2 ----- 2015/03/13 ### Announcements The r10k ticket tracker is moving to the Puppet Labs issue tracker; new issues should be filed on the [R10K project](https://tickets.puppetlabs.com/browse/RK). The GitHub issue tracker will remain online for the near future but is deprecated in favor of JIRA. Issues from the GitHub tracker will be prefixed with "GH-"; issues from JIRA will be prefixed with "RK-". ### User notes (RK-4) Raise meaningful errors on missing sources If the 'sources' key in r10k.yaml was left unset, was misspelled, or was empty, r10k try to blindly iterate through it as a hash and would subsequently raise an error. This has been remedied so that if the value is missing or empty an error will be raised. Note that this doesn't handle the case where r10k.yaml is empty or malformed; that issue is being tracked as RK-34. (GH-310, RK-36) `r10k deploy display -p --detail` fails on Ruby 1.8.7 Ruby 1.8.7 does not implement the comparison operator on Symbols, which was being used by the display command to ensure that hashes were printed in a consistent order. This has been fixed by backporting the Ruby 1.9 Symbol sorting to 1.8.7. ### Thanks Thanks to [Eli Young](https://github.com/elyscape) for reviewing PR 337 and providing helpful feedback. 1.4.1 ----- 2015/01/09 ### User notes (GH-254) Puppetfile subcommands use non-zero exit codes on errors. The Puppetfile `install` and `purge` commands would always exit with an exit code of 0, regardless of if any errors occurred or if there was no Puppetfile available. This has been now corrected so that runtime errors cause r10k to exit with a non-zero exit code. (GH-260) Normalize deployed environment names on the command line. Version 1.4.0 removed the environment name normalization needed to deploy environments that had their directory names normalized; this has been remedied so that environment names on the command line are also normalized to match the corrections that r10k will make to the environment names. (GH-269) Improved error messages when trying to use non-existent Git refs. If a Git module tried to use a Git ref that did not exist, it would output a particularly unhelpful error message that didn't indicate what actually failed. This has been fixed so that if an invalid ref is used, r10k will actually report that the ref could not be used. What a brave new world we inhabit! (GH-271)/(GH-275) Report the name of invalid module names. The fix for GH-92 released in 1.4.0 added better handling and parsing of module names, but also added stricter parsing of module names and disallowed invalid characters that Puppet itself could not use. However when r10k encountered such an invalid module name, it would not report the module with the invalid name, making debugging harder than needed. In 1.4.1 r10k when r10k encounters an invalid module name it reports the invalid module name in the error message. ### Thanks Thanks to the following contributors for their work on this release: * [Lex Rivera](https://github.com/rlex) for discovering and reporting the r10k puppetfile exit code bug (GH-254). * [Eli Young](https://github.com/elyscape) for discovering and fixing the environment normalization bug (GH-260). * [Clayton O'Neill](https://github.com/dvorak) For adding better error reporting of invalid module names (GH-275). Additional thanks to all those in #puppet and #r10k for endlessly helping new users of r10k, your assistance is invaluable! ### 1.4.0 ----- 2014/12/2 ### User notes (GH-40) Display expected and actual module versions When displaying the state of a deployment, modules would report a version but would not indicate if that was the expected version or desired version. This has been changed so that both the expected and actual module version information is displayed. Because determining the actual version for modules can be slow the `--detail` flag must be passed to display this information. (GH-43) Using a relative `moduledir` in the Puppetfile is relative to the Puppetfile The `moduledir` setting in the Puppetfile could be used to set a custom directory for Puppetfile modules, but if a relative path was used it was relative to the current working directory. As of 1.4.0 a relative `moduledir` setting will be expanded to the Puppetfile, which should make it much easier to use a directory other than 'modules' for the Puppetfile installed modules. (GH-68) Add alias for `r10k deploy display` The `r10k deploy display` subcommand had unfriendly syntax, and now has an alias of `r10k deploy list`. (GH-92) Support `mod 'owner/module'` for Git and SVN modules The original implementation of Git and SVN based modules assumed that the module name was only the name component, and did not include the owner component of the module. Because of this the full module name was added to the path, which meant that a Git module or SVN module called `foo/bar` would be created as `$moduledir/foo/bar`, after which r10k would check for stale modules, see a module called `foo`, and delete it. This has now been corrected so that all modules may have an owner and name, and only the name will be used when constructing the module path. Issues also closed as part of GH-92: * GH-78 (GH-96) Provide output from Git command failures When r10k encounters an error while running a command, it will always log the failed command and the output of the command. This should make it dramatically easier to diagnose issues with the underlying commands used by r10k without having to re-run the failing r10k command with a myriad of command line flags. Issues also closed as part of GH-96: * GH-46 * GH-94 (GH-121) Support for calling an arbitrary script after deployment Users can now specify a `postrun` setting in `r10k.yaml` to run an arbitrary command after an environment deployment finishes. The current implementation is fairly simple and isn't passed additional information about the deployment and is mainly meant to notify other services that the deployment has completed. Future versions of r10k will provide more information to this command so that more complex actions can be taken on success and failure. (GH-155) Allow SVN credentials to be added for modules/environments, disallow interactive SVN When interacting with SVN modules, r10k could run SVN commands that could try to prompt the user for input but were not provided access to stdin. R10k is meant to be non-interactive so credentials need to be provided in some manner, but cannot be provided directly by the user. As of 1.4.0 SVN environments and modules can supply a username and password to be used when interacting with the SVN remote, and SVN commands are run with `--non-interactive` to prevent commands from trying to grab stdin. **Note**: SVN credentials are passed as command line options, so the SVN credentials may be visible in the process table when r10k is running. If you choose to supply SVN credentials make sure that the system running r10k is appropriately secured. (GH-169) Perform non-blocking reads on stderr in Subprocess When using SSH with a Control Master, the first time an SSH connection is launched it will persist and will hang onto stderr from the parent process. R10k was using blocking IO to read from child processes and assumed that the file descriptors would be closed when the launched process exited, and the persistent SSH process would cause r10k to hang. The subprocess code has been updated to perform nonblocking reads of stderr; when the child process exits it assumes that even if stderr is held open nothing else should be written to it, drains the buffered pipe content and returns with that. This is working around buggy behavior in SSH, but this problem doesn't look like it'll go away so the best course of action is to incorporate this fix into downstream. (GH-180) Don't leak FDs when executing subcommands R10k was not explicitly closing all file descriptors, and on Ruby 1.8.7 these would not be closed by the garbage collector, causing file descriptors to leak. This has been fixed and all file descriptors should be closed after each subprocess is invoked. Major thanks to Jeremy Asher for discovering and fixing this. (GH-193) Don't purge environments if environment sources can't be fetched The original behavior for deploying environments with r10k was to fetch sources, deploy environments from those sources, and then clean up any orphaned environments. If a source had been fetched before but could not be reached then r10k would use the previously fetched branch information to update environments. In normal cases this would provide a reasonably robust failure mode. However, this meant that if no sources had been be fetched or the source remote information was typoed, r10k would have no source information, could enumerate no environments, and then enter HULK SMASH mode where it would delete all unmanaged environments - AKA everything. This is an uncommon failure mode but could bite environments that were setting up r10k for the first time. To resolve this issue, r10k will try to fetch all sources and if any source fails to be fetched then r10k will abort the entire deployment. This means that r10k will fail very early before any changes have been made which is a safe time to fail. If the errors were transitory then r10k can be run again, and if the failures are permanent then it's hard to safely update anything and extremely dangerous to try to delete any environments. (GH-202) Different environments cannot manage the same path R10k allowed for multiple sources to create environments but did not have semantics for environments from different sources managing the same path. If this happened, the resulting behavior would be undefined and could do any number of strange things. The approach to this was by convention and prefixing was recommended to avoid this, but it's still not a great approach. This was resolved by looking for environment collisions before deploying; if collisions exist then r10k will abort the deployment. Note: The commit that fixed this referenced GH-123 instead of GH-202. (GH-214) Rescue SyntaxError and LoadError when evaluating Puppetfiles Since Puppetfiles are just a Ruby DSL it's possible to have multiple Puppetfiles that are aggregated by using `require` or `load`. However these methods raise exceptions that don't descend from `StandardError`, so the normal error handling would not catch them, so a malformed Puppetfile could catastrophically crash r10k. Because the Puppetfile is not an actual source file in the conventional sense we can recover from these errors and continue, so r10k will now recover from these errors. If a SyntaxError or LoadError is raised while evaluating a Puppetfile, r10k will rescue them and wrap them in an R10K::Error and then raise that, which can be caught and handled as a normal, non-critical exception. (GH-221) Only deploy modules once for each environment deploy If an environment was deployed for the first time and `--puppetfile` was specified, the initial creation would create the environment and then deploy modules, and then the modules would be deployed again because `--puppetfile` was separate code. The deploy logic has been refactored to consider `--puppetfile` being passed or the environment being created to be equivalent and will only deploy modules once. ### Developer notes `R10K::Environment::Base#sync` is no longer recursive; callers are expected to handle recursive updates if needed. The `R10K::Action` namespace has been added to provide an API to r10k functionality. These should be used by any code that wants to interact with r10k and should provide an alternative to shelling out to r10k or using `R10K::CLI` directly. The individual action objects should be called through `R10K::Action::Runner` so that application setup and teardown is handled. ### Thanks Thanks to the following contributors for their work on this release: * [Robert Nelson](https://github.com/rnelson0) for his work on documenting workflows and best practices with r10k and adding convenience aliases for `r10k deploy display` * [Wolf Noble](https://github.com/rmnwolf) for homogenizing markdown file extensions * [Thomas Bartelmess](https://github.com/tbartelmess) for updating the required version of cri * [Theo Chatzimichos](https://github.com/tampakrap) for correcting the license format to conform to SPDX * [Richard Raseley](https://github.com/richardraseley) for writing a quickstart guide for r10k * [Matthew Haughton](https://github.com/3flex) for fixing documentation typos * [Markus Frosch](https://github.com/lazyfrosch) for fixing a regression in how duplicate environments are checked for, before the regression was ever released. Good catch! * [Jeremy Asher](https://github.com/jeremyasher) for fixing file descriptor leaks and hanging on open file descriptors in the Subprocess module. Great sleuthing on this! * [Guzman Braso](https://github.com/guzmanbraso) for adding Debian support and fixing some bugs in the quickstart documentation. * [David Schmitt](https://github.com/DavidS) for fixing markdown syntax errors so that link URLs would render properly in GitHub * [Christophe Bliard](https://github.com/cbliard) for fixing the Puppetfile moduledir setting to treat relative paths as relative to the Puppetfile * [Christoph Föhrdes](https://github.com/cfoehrdes) For fixing a copy/paste error in the Puppetfile documentation In addition to those who contributed to the code base, thanks to all those that reported and commented on issues; user input makes it much easier to make r10k a better project! 1.3.5 ----- 2014/11/13 ### User notes (GH-212) Force use of json_pure on Ruby 1.8.7 Ruby 1.8.7 does not ship with a JSON library, so r10k has depended on json_pure to ensure that there's always a JSON library available. However there is a quirk in multi_json in how it probes for JSON implementations and may load the wrong gem, which percolates up and breaks the JSON parsing code used when querying for forge module versions. To resolve this, json_pure is always used on ruby 1.8.7. 1.3.4 ----- 2014/09/14 This bugfix release incorporates all fixes added in 1.2.4. 1.3.3 ----- 2014/09/11 ### User notes (GH-178) Failing to fetch a source git repo can wipe out environments When updating Git sources at the beginning of a deployment, if the fetch was interrupted r10k could cache an empty list of environments. This could cause r10k to remove all environments for that source. This was due to a method directly using a value that was supposed to be lazily evaluated and memoized. It has been fixed so that even if r10k cannot fetch a source it will still be able to deploy modules for environments, clean up removed environments, and correctly remove unmanaged environments. (GH-186) Rescue SyntaxError when checking Puppetfile syntax When a Puppetfile with invalid syntax is parsed it raises a SyntaxError, and the `r10k puppetfile check` code was not specifically handling that. Thus when checking an invalid file r10k was actually crashing and not gracefully handling the error. This was fixed to cleanly rescue the SyntaxError, optionally print stacktraces, and print an all ok message on success. 1.3.2 ----- 2014/07/27 This bugfix release incorporates all fixes added in 1.2.3. 1.3.1 ----- 2014/07/16 ### User notes (GH-161) Deployments fail where a branch has \W in the git branch name In 1.3.0 environment naming was partially reworked to allow better handling of per-environment deployment, but unfortunately this caused a regression where environments would be differently named in 1.3.0. This fix changes the environment deployment on a per-name basis to use the normalized name instead of the raw Git branch name. This bugfix release also incorporates all fixes added in 1.2.2. ### Thanks Thanks to Chris Spence for his work on this release. 1.3.0 ----- 2014/06/07 ### User notes #### (GH-104) SVN support for environments. R10k can now dynamically generate enviroments based on SVN repositories. SVN repositories must SVN repositories must conform to the conventional SVN repository structure with the directories trunk/, branches/, and optionally tags/ in the root of the repository. The trunk/ directory is specifically mapped to the production environment, branches are created as environments with the name of the given branch. Please note that since SVN support for environments should be considered preliminary and may still have some kinks to be worked out, so use it with caution in production. #### (GH-112) Modules can be deployed in a single environment. When deploying modules with `r10k deploy module `, users can specify the `-e ` flag to update modules in a single environment. #### (GH-117) Controllable behavior for invalid Git branches. Git sources can now tune how r10k behaves when it encounters a git branch with a non-word character. Valid values are 'correct_and_warn' which emits a warning and sanitizes the environment name, 'correct' which silently corrects the environment, and 'error' which emits an error and ignores the environment. ### Developer notes #### IMPORTANT: as of 1.3.0, r10k is relaxing how it implements semantic versioning. There are a lot of internal APIs in r10k that need to be improved or overhauled, and making changes in a backwards compatible manner has been impeding development on a number of important features. There's no indication that there are any consumers of the r10k internal APIs, and if that's the case then r10k doesn't help anyone by maintaining SemVer for its internal APIs. As of 1.3.0, r10k is dropping guarantees about API compatibility for most of the core functionality. The `R10K::Task*` classes were designed to be the primary interface for external use and those will remain backwards compatible for 1.x. However any code around configuration parsing, deployments, sources, environments, and modules may have API changes in minor versions. That being said, if you are using any of these APIs and you experience breakage, you're not out of luck. If an API change actually does affect you please report it as a bug and those specific APIs can probably be fixed up. Hopefully this will ease development of r10k while not making the lives of external developers too painful. #### (GH-116) Allow alternate implementations of sources and environments This allows the groundwork for allowing users to implement plugins for sources and environments. A real API specification for sources and environments has been started, and sources and environments can be defined at runtime. In the long run r10k will add a plugin system for loading additional code from Rubygems and other sources, so r10k will be extensible without requiring modifications to the source. 1.2.4 ----- 2014/09/14 ### User Notes (GH-188) Call puppet module tool install with --force for downgrades 1.2.3 ----- 2014/07/27 ### User Notes (GH-173) Fixed a bug with Ruby 1.8.7 with Pathname objects. 1.2.2 ----- 2014/07/16 ### User Notes (GH-165) `r10k puppetfile` only consumes handled command line options. Previously, passing `-v` or other commands when running `r10k puppetfile *` could result in this error: r10k puppetfile install --help --trace Error while running: # This was due to overly greedy code passing in all options from the command line to the TaskRunner. This has been fixed so only known options are passed along, and options that aren't relevant (such as :verbose) will be ignored. (GH-158) Log levels are now documented in the command line --help pages. (GH-137) Git remotes are now correctly updated. A regression in the Git remote handling meant that git remotes would never be properly updated when switching Git environments and modules from one remote to another, and the git alternates file was never updated properly. This has been fixed so that when the Git remote is updated, all references to the remotes and alternates will be updated. (GH-163) All Git tags are deleted when switching Git remotes Git tags cannot necessarily be transferred from one Git repository to another, so when a Git repo has its remotes changed all tags are deleted to prevent stale tags from overwriting tags from the new repo. 1.2.1 ----- 2014/04/21 ### User Notes (GH-93) r10k deploy subcommands now respect the --help flag. (GH-100) The addition of a faster command execution library was POSIX centric, but there were a number of users that were running r10k on Windows, which brike their environments. Support for Windows has been re-added and Windows is now a supported platform for using the r10k puppetfile commands. Please note that r10k now requires Ruby 1.9.3 on Windows to function. ### Thanks Thanks to Sam Kottler and Daniel Dreier for their their work on this release. 1.2.0 ----- 2014/03/07 ### User Notes Preliminary support for Puppetfile modules from SVN sources. SVN repositories can track the latest available revision or may be pinned to a specific revision. Forge modules can now track the latest available version. This can be enabled by setting the module version to `:latest`. Git based Puppetfile modules can now be specified as branches, tags, and commits. When tags and commits are specified r10k can perform optimizations when updating the given repositories to reduce network accesses. Command execution has been greatly improved. The old library for executing commands (systemu) had very high overhead and was 50 - 100 times slower than %x[] or fork/exec. It's been replaced with a custom process execution implementation. Modules can swap out sources. When an existing module is changed from Forge to Git, for instance, the existing module will be removed before the new module is installed. (GH-30) 1.2.0rc2 -------- 2014/02/27 ### Notes Git repositories were not tracking their upstream remotes, repos should now properly update upstream changes. Git reference clones now fetch their cache remotes immediately after the initial clone. 1.2.0rc1 -------- 2014/02/08 Release Candidate 1 for 1.2.0 1.1.4 ----- 2014-02-24 This is a backwards compatible bugfix release. ### Notes * (GH-90) Multiple environments with the same name but with different sources were previously colliding and some environments were being ignored. This has been fixed and all environments should be deployed when updates are run. ### Thanks Thanks to the following contributors for their their extraordinary patience and help in for chasing down GH-90: * Andreas Ntaflos (antaflos) * Igor Galić (igalic) 1.1.3 ----- 2014-01-26 This is a backwards compatible maintenance release. ### Notes * (GH-82) Added all git managed files, including README.markdown, CHANGELOG, and LICENSE to the gemspec for better compatibility with non-gem packages. 1.1.2 ----- 2014-01-06 This is a backwards compatible maintenance release. ### Developer notes * If Puppet and r10k are required in the same namespace, it's possible for the vendored copy of SemVer to conflict with the Puppet version. This was fixed by renaming the copy vendored in r10k and putting it under a namespace. 1.1.1 ----- 2013-12-11 This is a backwards compatible bugfix release. ### User notes * (GH-48) Environment prefixing always defaults to off. Users were already using r10k with multiple sources but in different directories, and prefixing breaks this behavior. Since this was a backwards incompatible change this has to be rolled back. * (GH-64) Multiple sources in a single directory no longer purge each other. ### Thanks Thanks to the following contributors for their help in 1.1.1: * Stig Sandbeck Mathisen * Gabriel M Schuyler 1.1.0 ----- 2013-09-30 This is a backwards compatible bugfix and feature release. ### User notes (GH-35) Puppetfiles can now specify a path to the moduledir, instead of assuming '/modules'. It can be set with the Puppetfile `moduledir` directive. Note that this is not compatible with librarian-puppet. (GH-53) Multiple environment sources can now be specified in a single directory. When multiple sources are specified, each environment will be prefixed with the source name. This can be enabled and disabled with the source `prefix` option. (GH-45) Documentation has been greatly expanded. (GH-56) New subcommand: `r10k puppetfile check` allows you to validate the syntax of a Puppetfile. (GH-66) Initial clones use `git checkout` when switching to a new branch instead of just `git reset`; without this change it would look like the wrong branch was checked out. (GH-59) r10k can now pull from Pulp repositories for Forge based modules. (GH-70) Handle unset HOME - in case that HOME is unset, assume that the current user is root. This mainly occurs when r10k is being run as the `prerun` command under Puppet. ### Developer notes The method mocking framework has been switched from mocha to rspec-mocks. Mocha is notoriously bad about breaking changes between versions and rspec-mocks is very robust, so Mocha has been ripped out and dropped as a dependency. Rspec expectations now use the `expect(thing).to` syntax instead `thing.should` A quasi settings framework has been extracted to make application settings less bad. In the long term a general application framework will be extracted from r10k to handle generic 'application' problems like this, but for now the settings framework is the way to handle singleton data. R10K:Git::Cache object memoization has been extracted into a standalone class instead of being grafted onto the class. All hail the single responsibility principle! R10K::Module code has been refactored. There's now a real base class instead of a hacky mixin with some metadata magic on top. ### Thanks Thanks to the following contributors for their help in 1.1.0: * Alex Linden Levy * Abhay Chrungoo * Adam Vessey * Chuck Schweizer * Elias Probst * Greg Baker * Jochen Schalanda * Theo Chatzimichos 1.0.0 ----- 2013-05-30 This is a backwards incompatible bugfix and feature release. ### Configuration The configuration file format of 0.0.9 should be compatible with 1.0.0, and any issues with that should be considered a bug. A longstanding issue was confusion between symbols and strings in r10k.yaml (GH-18). To resolve this, symbols and strings will be treated equally and should produce the same behavior. In the long run, symbols will probably be deprecated for the sake of conformity. ### Command line invocation A number of commands have been renamed. They still but will emit a deprecation warning and will redirect to the new command implementation. The only exceptions is the are the `r10k environment cache` and `r10k environment stale` commands, but they were pretty much useless anyways. Log level verbosity can now be specified by level name instead of level number. If --verbose is passed without a level, it will set the log level to `info`. ### Puppetfile support r10k can be used to deploy modules from a standalone Puppetfile. See `r10k puppetfile` for more information. Modules without a version in the format of 'foo/bar' will be assumed. (GH-21) ### API r10k handles versioning according to SemVer; since this is a major release this is a backwards incompatible API change. It's unlikely that this has had any extensions written on top of it, but if you have, then heads up. However, all versions of 1.x should be backwards compatible. ### Bugfixes A number of bugs were due to underlying architecture flaws. Part of 1.0.0 has been a significant architectural overhaul, so on top of all of the above changes there should be a lot of other bugs that have been fixed. r10k-4.0.2/CODEOWNERS000066400000000000000000000000541460033767200136740ustar00rootroot00000000000000* @puppetlabs/dumpling @puppetlabs/skeletor r10k-4.0.2/CONTRIBUTING.mkd000066400000000000000000000106221460033767200147070ustar00rootroot00000000000000Contributing ============ ## Bug reports and feature requests Bug reports and feature requests may be filed on the [GitHub issue tracker][github-issue-tracker]. When filing a bug report or feature request, please spend a moment to look for existing issue reports and update those with additional information if possible. [github-issue-tracker]: https://github.com/puppetlabs/r10k/issues ### Bug reports When filing a bug report, include as much relevant detailed information as possible. Try to include the following: * A descriptive summary of the bug * When the bug first occurred * What sort of events may have triggered the bug * The expected behavior * The actual behavior * Steps to reproduce If if you're seeing errors or crashes you can run R10K with the `-v debug2` to greatly increase the log level and `--trace` to print any stack traces that may be caught. Including a reproduction environment with something like Vagrant is tremendously helpful and can greatly speed up the time it takes to fix your issue! ### Feature requests When filing a feature request, include information about how important the feature is, how it would generally be used in your environment, and how often this feature would be used by other users as well. Please keep in mind that each added feature incurs additional maintenance costs, so feature requests should strive to solve a frequent pain point for users or generally make r10k a more effective tool for a large number of r10k users. The policy towards feature requests in r10k has been greatly shaped by Brian Granger's post on [features and scope in open source software][features-and-scope]. Try to include the following: * A descriptive summary of the feature * How you expect to use this feature * How important this feature is for your environment * How useful this feature would be for other users * Any potential caveats this may have or any issues this feature may have [features-and-scope]: http://brianegranger.com/?p=249 "Features and Scope in Open Source Software" ## Documentation Contributing fixes and improvements to the core documentation is a great way to help improve R10K. Improvements to the FAQ, example code, and so forth can go a long way towards making R10K more approachable for new users and existing users alike. For more information about the submission process please see the section on [code contributions](#code-contributions). ## Code contributions This documentation is based on the Project Hydra [CONTRIBUTING](https://github.com/projecthydra/hydra-head/blob/master/CONTRIBUTING.md) guide. ### Making changes * Fork the repository on GitHub * Create a topic branch from where you want to base your work. * For new features, base your code off of 'master'. * For critical bugfixes, base your code off of the appropriate maintenance branch series, eg '1.2.x' or '1.3.x' * To create a new topic branch; `git checkout -b fix/master/my_contribution master` * Please avoid working directly on the 'master' branch. * Make commits of logical units. * Your commit message should include a high level description of your work. * Check for unnecessary whitespace with `git diff --check` before committing. ``` [code/subsystem] (GH-#) Present tense short summary (80 characters or less) More detailed description, if necessary. It should be wrapped to 80 characters. Try to be as descriptive as you can, even if you think that the commit content is obvious, it may not be obvious to others. You should add such description also if it's already present in bug tracker, it should not be necessary to visit a webpage to check the history. ``` * Make sure you have added the necessary tests for your changes. * Run _all_ the tests to assure nothing else was accidentally broken. ### Submitting changes * Push your changes to a topic branch in your fork of the repository. * Submit a pull request from your fork to the project. * Expect to have some further discussion of your submission prior to it being merged. Please note - code review is an essential part of the code review and submission process. In order to keep the code base clean and maintainable questions may be asked on implementation decisions and you may be asked to make additional changes to the contribution. Remember - feedback is never personal and should be given to benefit both the submitter as well as the project receiving the contribution. r10k-4.0.2/Gemfile000066400000000000000000000004671460033767200136040ustar00rootroot00000000000000source ENV['GEM_SOURCE'] || 'https://rubygems.org' gemspec group :extra do gem 'rugged', '~> 1.0', :platforms => :ruby end group :development do gem 'simplecov', '~> 0.22.0' gem 'ruby-prof', :platforms => :ruby end if File.exist? "#{__FILE__}.local" eval(File.read("#{__FILE__}.local"), binding) end r10k-4.0.2/LICENSE000066400000000000000000000010601460033767200133040ustar00rootroot00000000000000Copyright 2013, 2014 Adrien Thebo Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. r10k-4.0.2/README.mkd000066400000000000000000000107161460033767200137410ustar00rootroot00000000000000r10k ==== Puppet environment and module deployment [![Build Status](https://travis-ci.org/puppetlabs/r10k.png?branch=master)](https://travis-ci.org/puppetlabs/r10k) > R10k is supported and maintained by Puppet, but we consider it to be feature > complete and currently have no plans for any new development. We will keep it > working within the context of Puppet Enterprise, but we cannot make any other > maintenance promises at this time. Description ----------- [workflow]: https://puppet.com/docs/puppet/latest/environments_about.html R10k provides a general purpose toolset for deploying Puppet environments and modules. It implements the [Puppetfile](doc/puppetfile.mkd) format and provides a native implementation of Puppet [environments][workflow]. You might also consider [g10k](https://github.com/xorpaul/g10k) as a non-ruby based alternative. Requirements ------------ R10k supports the Ruby versions `>= 2.6.0`. It's tested on Ruby 2.6.0 up to Ruby 3.1.0 + Jruby. R10k requires additional components, depending on how you plan on managing environments and modules. - Installing modules from the Puppet Forge requires Puppet 7.0.0+ or later. Puppet 5 and 6 may work, but is generally not recommended. - Git is required for creating environments and modules from Git - SVN is required for creating environments and modules from SVN Installation ------------ ### Rubygems For general use, you should install r10k from Ruby gems: gem install r10k r10k help ### Puppet Enterprise 3.x Puppet Enterprise bundles a copy of Ruby, so if you do not want to use the system version of Ruby with r10k, you need to use the bundled PE gem command for installation: /opt/puppet/bin/gem install r10k r10k help ### Puppet 4 Puppet 4 bundles a copy of Ruby, so if you do not want to use the system version of Ruby with r10k, you need to use the bundled puppet gem command for installation. /opt/puppetlabs/puppet/bin/gem install r10k /opt/puppetlabs/puppet/bin/r10k help ### Bundler If you have more specific needs or plan on modifying r10k you can run it out of a git repository using Bundler for dependencies: git clone https://github.com/puppetlabs/r10k cd r10k bundle install bundle exec r10k help ### Arch Linux Arch Linux provides a [system package](https://archlinux.org/packages/extra/any/r10k/) for r10k. This is built against the [system Ruby](https://archlinux.org/packages/extra/x86_64/ruby/) (which is Ruby 3.0.2 as of 2021-08-03). This package is maintained by [Tim Meusel](https://github.com/bastelfreak). Usage ----- R10k has two primary roles: installing Puppet modules using a standalone Puppetfile, and managing Git and SVN based dynamic environments. For more information see the topic specific documentation: * [Puppetfile Documentation](doc/puppetfile.mkd) * [Updating your Puppetfile](doc/updating-your-puppetfile.mkd) * [Environment Deployment Documentation](doc/dynamic-environments.mkd) * [Quickstart](doc/dynamic-environments/quickstart.mkd) * [Common Patterns](doc/common-patterns.mkd) * [Workflow Guide](doc/dynamic-environments/workflow-guide.mkd) For more general questions, see the [FAQ](doc/faq.mkd). Development ----------- ### i18n R10k has now had all user-facing strings in error messages and log messages externalized. When adding new error or log messages please follow the instructions for [writing translatable code](https://github.com/puppetlabs/gettext-setup-gem#writing-translatable-code). ### l10n When localizing the strings found in R10k, follow the prescribed [translation workflow](https://github.com/puppetlabs/gettext-setup-gem#translation-workflow). The workflow describes the rake tasks provided to generate the .po files for each locale. Releasing --------- To release a new version of the r10k gem, ensure the [changelog](CHANGELOG.mkd) is up to date and open a pull request updating the [version file](lib/r10k/version.rb). When the PR is merged, a new release of the gem will be triggered. By default, a patch (Z) release will be triggered. To release a new major (X) or minor (Y) version, include `#major` or `#minor` (respectively) in your commit message to trigger the appropriate release. Getting help ------------ * [Puppet Community Slack](https://puppetcommunity.slack.com/) * Mailing lists: [puppet-users](https://groups.google.com/forum/#!forum/puppet-users) * Q&A: [Puppet Ask](https://ask.puppetlabs.com/questions/) ## Maintenance See [CODEOWNERS](CODEOWNERS) for current project owners. r10k-4.0.2/Rakefile000066400000000000000000000002641460033767200137510ustar00rootroot00000000000000spec = Gem::Specification.find_by_name 'gettext-setup' load "#{spec.gem_dir}/lib/tasks/gettext.rake" GettextSetup.initialize(File.absolute_path('locales', File.dirname(__FILE__))) r10k-4.0.2/bin/000077500000000000000000000000001460033767200130525ustar00rootroot00000000000000r10k-4.0.2/bin/r10k000077500000000000000000000005511460033767200135560ustar00rootroot00000000000000#!/usr/bin/env ruby require 'r10k/cli' require 'colored2' begin R10K::CLI.command.run(ARGV) rescue Interrupt $stderr.puts "r10k: Aborted!".red exit(1) rescue SystemExit => e exit(e.status) rescue Exception => e $stderr.puts "\nr10k: Runtime error: #{e.inspect}".red $stderr.puts e.backtrace.join("\n").red if ARGV.include? '--trace' exit(1) end r10k-4.0.2/doc/000077500000000000000000000000001460033767200130475ustar00rootroot00000000000000r10k-4.0.2/doc/common-patterns.mkd000066400000000000000000000032471460033767200167000ustar00rootroot00000000000000Common Patterns =============== This guide provides common patterns seen in the r10k community. These patterns are, of course, simply a guide. Understand why you are or are not using a specific pattern before implementing it. Repository Setup ---------------- Use a [Control Repo](https://docs.puppet.com/pe/latest/cmgmt_control_repo.html) to store your `Puppetfile`. Hiera data should be in the Control repo OR as a separate source in `r10k.yaml`. Each puppet module should be contained in its own independent forge module or repository. Editing modules --------------- All environment content is checked out into `$environmentpath/modules` on the node r10k is run on, either your puppet master or each masterless node. Edits made directly to these files will be lost on the next deploy. It is best practice not to edit code on the production system in the production paths. You may clone upstream repositories in a regular user's directory, on the master or on another machine. Create a new feature branch locally, make all required edits, and push the new branch upstream when ready for testing. R10k will deploy changes from the upstream repositories, eliminating the need for manual updates of the `$environmentpath` contents. Automated deploys ----------------- To reduce manual intervention, use a post-receive hook on your control and module repos to initiate an r10k deploy. You can develop your own or use a publicly available hook. These include: * [Reaktor](https://github.com/pzim/reaktor) * [zack/r10k's Webhooks](https://forge.puppetlabs.com/zack/r10k#webhook-support) (Puppet Enterprise only) * [Simple Puppet Provisioner](https://github.com/mbaynton/SimplePuppetProvisioner) r10k-4.0.2/doc/dynamic-environments.mkd000066400000000000000000000026551460033767200177250ustar00rootroot00000000000000Dynamic Environments ==================== Dynamic environment deployment is the core functionality of r10k. Table of contents ----------------- * [Introduction](dynamic-environments/introduction.mkd): A brief description of what dynamic environments are and how they work. * [Quickstart](dynamic-environments/quickstart.mkd): Getting started with the essentials of dynamic environments and r10k. * [Configuration](dynamic-environments/configuration.mkd) A reference of dynamic environment configuration options and how they're used * [Git environments](dynamic-environments/git-environments.mkd) How r10k implements dynamic environments using git * [Master Configuration](dynamic-environments/master-configuration.mkd) How to configure your Puppet masters to use dynamic environments * [Workflow Guide](dynamic-environments/workflow-guide.mkd) A general-purpose workflow guide for using r10k. * [Usage](dynamic-environments/usage.mkd) A reference of r10k commands and how they're used. - - - Community guides ---------------- * [Building a Functional Puppet Workflow Part 3: Dynamic Environments with R10k](http://garylarizza.com/blog/2014/02/18/puppet-workflow-part-3/) * [Puppet Infrastructure with r10k](http://terrarum.net/blog/puppet-infrastructure-with-r10k.html) * [Puppet for vSphere Admins](http://rnelson0.com/puppet-for-vsphere-admins/) Installing and using r10k in a vSphere environment r10k-4.0.2/doc/dynamic-environments/000077500000000000000000000000001460033767200172205ustar00rootroot00000000000000r10k-4.0.2/doc/dynamic-environments/configuration.mkd000066400000000000000000000764611460033767200226020ustar00rootroot00000000000000Dynamic Environment Configuration ================================= * [Config file location](#config-file-location) * [Manual configuration](#manual-configuration) * [Automatic configuration](#automatic-configuration) * [General options](#general-options) * [cachedir](#cachedir) * [proxy](#proxy) * [pool_size](#pool_size) * [git](#git) * [provider](#provider) * [proxy](#proxy-1) * [username](#username) * [private_key](#private_key) * [oauth_token](#oauth_token) * [repositories](#repositories) * [private_key](#private_key-1) * [oauth_token](#oauth_token-1) * [proxy](#proxy-2) * [forge](#forge) * [proxy](#proxy-3) * [baseurl](#baseurl) * [authorization_token](#authorization_token) * [allow_puppetfile_override](#allow_puppetfile_override) * [Deployment options](#deployment-options) * [postrun](#postrun) * [sources](#sources) * [deploy](#deploy) * [purge\_levels](#purge_levels) * [deployment](#deployment) * [environment](#environment) * [puppetfile](#puppetfile) * [purge\_allowlist](#purge_allowlist) * [write\_lock](#write_lock) * [generate\_types](#generate_types) * [puppet\_path](#puppet_path) * [puppet\_conf](#puppet_conf) * [exclude_spec](#exclude_spec) * [Source options](#source-options) * [remote](#remote) * [basedir](#basedir) * [prefix](#prefix) * [prefix behaviour](#prefix-behaviour) * [strip\_component](#strip_component) * [strip\_component behaviour](#strip_component-behaviour) * [ignore_branch_prefixes](#ignore_branch_prefixes) * [ignore_branch_prefixes behaviour](#ignore_branch_prefixes-behaviour) * [filter_command](#filter_command) * [Examples](#examples) * [Minimal example](#minimal-example) * [Separate hiera data](#separate-hiera-data) * [Multiple tenancy](#multiple-tenancy) * [Multiple tenancy with external hieradata](#multiple-tenancy-with-external-hieradata) * [Experimental Features](#experimental-features) * [YAML Environment Source](#yaml-environment-source) * [YAMLdir Environment Source](#yamldir-environment-source) * [Exec environment Source](#exec-environment-source) * [Environment Modules](#environment-modules) * [Puppetfile module conflicts](#puppetfile-module-conflicts) * [Plain Environment Type](#plain-environment-type) * [Tarball Environment Type](#tarball-environment-type) R10k uses a configuration file to determine how dynamic environments should be deployed. Config file location -------------------- ### Manual configuration An explicit configuration file location be specified by providing the `--config` option to `r10k deploy`, like so: r10k deploy --config /srv/puppet/r10k.yaml [...] ### Automatic configuration If an explicit configuration file is not given, r10k will search the following locations for a configuration file. * `{current working directory}/r10k.yaml` * `/etc/puppetlabs/r10k/r10k.yaml` (1.5.0 and later) * `/etc/r10k.yaml` (deprecated in 1.5.0) In 1.5.0 r10k added `/etc/puppetlabs/r10k/r10k.yaml` to the configuration search path. The old location, `/etc/r10k.yaml` has been deprecated in favor of the new location. If both `/etc/puppetlabs/r10k/r10k.yaml` and `/etc/r10k.yaml` exist and explicit configuration file has not been given, r10k will log a warning and use `/etc/puppetlabs/r10k/r10.yaml`. General options --------------- ### cachedir The 'cachedir' setting specifies where r10k should keep cached information. Right now this is predominantly used for caching git repositories but will be expanded as other subsystems can take advantage of caching. For example: ```yaml --- # Store all cache information in /var/cache cachedir: '/var/cache/r10k' ``` The cachedir setting defaults to `~/.r10k`. If the HOME environment variable is unset r10k will assume that r10k is being run with the Puppet [`prerun_command`](https://puppet.com/docs/puppet/latest/configuration.html#preruncommand) setting and will set the cachedir default to `/root/.r10k`. ### proxy The 'proxy' setting configures a proxy server to use for all operations which occur over an HTTP(S) transport. You can override this setting for Git or Forge operations only by setting the 'proxy' setting under the 'git' or 'forge' settings. You can also override for a specific Git repository by setting a proxy in the 'repositories' list of the 'git' setting. By default, r10k will look for and use the first environment variable it finds in this list: HTTPS\_PROXY, https\_proxy, HTTP\_PROXY, http\_proxy. If no proxy setting is found in the environment, this setting will default to use no proxy. ```yaml proxy: 'http://proxy.example.com:3128' ``` r10k also supports using authenticated proxies with either Basic or Digest authentication: ```yaml proxy: 'http://user:password@proxy.example.com:3128' ``` The proxy server being used will be logged at the "debug" level when r10k runs. ### pool_size The pool_size setting is a number to determine how many threads should be spawn while updating modules. The default value is 4, which means modules will be updated in parallel. If this causes issues, change this setting to 1 to cause modules to be updated serially. ### git The 'git' setting is a hash that contains Git specific settings. #### provider The provider option determines which Git provider should be used. ```yaml git: provider: rugged # one of shellgit, rugged ``` See the [git provider documentation](../git/providers.mkd) for more information regarding Git providers. #### default_ref r10k is unable to deploy a git module if no `ref` is specified. A `default_ref` can be set in the r10k config that will become the ref a module uses if not otherwise specified. This is the lowest priority setting for a module's `ref`. Read the [Puppetfile documentation](../puppetfile.mkd#git) for higher priority settings to determine a module's ref. ```yaml git: default_ref: main ``` #### proxy The 'proxy' setting allows you to set or override the global proxy setting specifically for Git operations that use an HTTP(S) transport. See the global proxy setting documentation for more information and examples. #### username The username setting is only used by the Rugged git provider. The username option sets the username for SSH remotes when the SSH URL does not provide a username. When used with a Git hosting service this is most sensibly set to 'git'. The username defaults to the username of the currently logged in user. ```yaml git: username: "git" ``` #### private_key The private_key setting is only used by the Rugged git provider. The private_key option specifies the path to the default Git SSH private key for Git SSH remotes. The private_key setting must be set if SSH remotes are used. ```yaml git: private_key: "/etc/puppetlabs/r10k/ssh/id_rsa" ``` #### oauth_token The oauth_token setting is only used by the Rugged git provider. The oauth_token option specifies the path to the default access token for Git HTTPS remotes. Public git repositories can be accessed via HTTPS without authentication, but the oauth_token setting may be set if any non-public HTTPS remotes are used. ```yaml git: oauth_token: "/etc/puppetlabs/r10k/token" ``` #### repositories The repositories option allows configuration to be set on a per-remote basis. Each entry is a map of the repository URL and per-repository configuration for that repo. ##### private_key A repository specific private key to use for SSH connections for the given repository URL. This overrides the global private_key setting. ```yaml git: repositories: - remote: "ssh://tessier-ashpool.freeside/protected-repo.git" private_key: "/etc/puppetlabs/r10k/ssh/id_rsa-protected-repo-deploy-key" ``` ##### oauth_token A repository specific access token to use for HTTPS connections for the given repository URL. This overrides the global oauth_token setting. ```yaml git: repositories: - remote: "https://tessier-ashpool.freeside/protected-repo.git" oauth_token: "/etc/puppetlabs/r10k/protected-repo-deploy-token" ``` ##### proxy The 'proxy' setting allows you to set or override the global proxy setting for a single, specific repository. See the global proxy setting documentation for more information and examples. ### forge The 'forge' setting is a hash that contains settings for downloading modules from the Puppet Forge. #### proxy The 'proxy' setting allows you to set or override the global proxy setting for all Forge interactions. See the global proxy setting documentation for more information and examples. #### baseurl The 'baseurl' setting indicates where Forge modules should be installed from. This defaults to 'https://forgeapi.puppetlabs.com' #### authorization_token The 'authorization_token' setting allows you to provide a token for authenticating to a Forge server. You will need to prepend your token with 'Bearer ' to authenticate to the Forge or when using your own Artifactory server. ```yaml forge: baseurl: 'https://private-forge.mysite' authorization_token: 'Bearer mysupersecretauthtoken' ``` #### allow_puppetfile_override The `allow_puppetfile_override` setting causes r10k to respect [`forge` declarations](https://github.com/puppetlabs/r10k/blob/main/doc/puppetfile.mkd#forge) in Puppetfiles, overriding the `baseurl` setting and allowing per-environment configuration of the Forge URL. Deployment options ------------------ The following options configure how r10k deploys dynamic environments. ### postrun The `postrun` setting specifies an arbitrary command to run after deploying all environments. The command must be an array of strings that will be used as an argument vector. The exit code of the command is not currently used, but the command should exit with a return code of 0 as the exit code may have semantics in the future. ```yaml --- postrun: ['/usr/bin/curl', '-F', 'deploy=done', 'http://my-app.site/endpoint'] ``` The postrun setting can only be set once. Occurrences of the string `$modifiedenvs` in the postrun command will be replaced with the current environment(s) being deployed, space separated. ### sources The `sources` setting specifies what repositories should be used for creating dynamic environments. It is a hash where each key is the short name of a specific repository (for instance, "qa" or "web" or "ops") and the value is a hash of properties for that source. ```yaml --- sources: main: # Source settings follow ``` ### deploy The `deploy` setting is a top level setting for controlling how r10k deploys behave. At this point only new settings are included under this setting, but in the long term the current top level deploy settings will be moved under `deploy`. #### purge\_levels The `purge_levels` setting controls how aggressively r10k will purge unmanaged content during a deployment. Given value must be a list of strings indicating at what levels unmanaged content should be purged. The valid string options for the list are 'deployment', 'environment', and 'puppetfile'. ```yaml --- deploy: purge_levels: [ 'deployment', 'environment', 'puppetfile' ] ``` This setting currently only impacts the "deploy environment" action. The default value is `['deployment', 'puppetfile']` to maintain parity with existing behavior before this setting was added. The effect of enabling the various purge levels is as follows: ##### deployment After each deploy, in the configured basedir, r10k will recursively remove any content found which is not managed by one of the sources declared in the r10k.yaml configuration. Note that disabling this level of purging may cause the number of deployed environments to grow without bound; deleting branches from a control repo would no longer cause the matching environment to be purged. ##### environment After a given environment is deployed, r10k will recursively remove any content found which is neither committed to the control repo branch that maps to that environment, nor declared in a Puppetfile committed to that branch. Enabling this purge level will cause r10k to load and parse the Puppetfile for the environment even without the `--modules` flag being set. However, Puppetfile content will still only be deployed if the environment is new or the `--modules` flag is set. Additionally, no environment-level content will be purged if any errors are encountered while evaluating the Puppetfile or deploying its contents. Note that the .r10k-deploy.json file is exempt from this purging. ##### puppetfile After Puppetfile content for a given environment is deployed, r10k will recursively remove any content found in a directory managed by the Puppetfile which is not also declared in that Puppetfile. Directories considered to be managed by a Puppetfile include the configured `moduledir` (which defaults to "modules") as well as alternate directories specified as an `install_path` option to any Puppetfile content declarations. #### purge\_allowlist The `purge_allowlist` setting exempts the specified filename patterns from being purged. This setting is currently only considered during `environment` level purging. (See above.) Given value must be a list of shell style filename patterns in string format. See the Ruby [documentation for the `fnmatch` method](http://ruby-doc.org/core-2.2.0/File.html#method-c-fnmatch) for more details on valid patterns. Note that the `FNM_PATHNAME` and `FNM_DOTMATCH` flags are in effect when r10k considers the allowlist. Patterns are relative to the root of the environment being purged and *do not match recursively* by default. For example, a allowlist value of `*myfile*` would only preserve a matching file at the root of the environment. To preserve the file throughout the deployed environment, a recursive pattern such as `**/*myfile*` would be required. Files matching a allowlist pattern may still be removed if they exist in a folder that is otherwise subject to purging. In this case, an additional allowlist rule to preserve the containing folder is required. ```yaml --- deploy: purge_allowlist: [ 'custom.json', '**/*.xpp' ] ``` #### write\_lock The `write_lock` setting allows administrators to temporarily disallow r10k code deploys without having to remove the r10k configuration entirely. This can be useful to prevent r10k deploys at certain times or prevent r10k from interfering with a common set of code that may be touched by multiple r10k configurations. ```yaml --- deploy: write_lock: "Deploying code is disallowed until the next maintenance window (2038-01-19)" ``` #### generate\_types The `generate_types` setting controls whether r10k should update generated types after a successful environment update. See [Environment isolation](https://puppet.com/docs/puppet/latest/environment\_isolation.html) for more information on generated types. Defaults to false. ```yaml deploy: generate_types: true ``` #### puppet\_path The path to the puppet executable used for generating types. Defaults to `/opt/puppetlabs/bin/puppet`. ```yaml deploy: puppet_path: '/usr/local/bin/puppet' ``` #### puppet\_conf The path to the puppet.conf file used for generating types. Defaults to `/etc/puppetlabs/puppet/puppet.conf`. ```yaml deploy: puppet_conf: '/opt/puppet/conf/puppet.conf' ``` #### exclude_spec During module deployment, r10k's default behavior is to delete the spec directory. Setting `exclude_spec` to true will deploy modules without their spec directory. This behavior can be configured for all modules using the `exclude_spec` setting in the r10k config. It can also be passed as a CLI argument for `deploy environment/module`, overriding the r10k config. Setting this per module in a `Puppetfile` will override the default, r10k config, and cli flag for that module. The following example sets all modules to not deploy the spec dir via the r10k config. ```yaml deploy: exclude_spec: true ``` Source options -------------- The following options are respected by all source implementations. Sources may implement other options in addition to the ones listed below; see the source specific documentation for more information. ### remote The 'remote' setting specifies where the source repository should be fetched from. It may be any valid URL that the source may check out or clone. The remote must be able to be fetched without any interactive input, eg usernames or passwords cannot be prompted for in order to fetch the remote. We support the `git`, `ssh`, and `https` transport protocols. An SSH private key or access token must be provided for authentication. Only `https` may be used without authentication. See [GitHub's blog on protocol security](https://github.blog/2021-09-01-improving-git-protocol-security-github/) for more info. ```yaml --- sources: mysource: remote: 'https://git-server.site/my-org/main-modules' ``` ### basedir The 'basedir' setting specifies where environments will be created for this source. This directory will be entirely managed by r10k and any contents that r10k did not put there will be _removed_. ```yaml --- sources: mysource: basedir: '/etc/puppet/environments' ``` If two different sources have the same basedir, it's possible for them to create two separate environments with the same name and file path. If this occurs r10k will treat this as a fatal error and will abort. To avoid this, use prefixing on one or both of the sources to make sure that all environment names are unique. See also the [prefix](#prefix) setting. ### prefix The prefix setting allows environment names to be prefixed with the short name of the given source. This prevents collisions when multiple sources are deployed into the same directory. ```yaml --- sources: mysource: basedir: '/etc/puppet/environments' prefix: true # All environments will be prefixed with "mysource_" ``` #### prefix behaviour * if `true` environment folder will be prefixed with the name of the source. * if `false` (default) environment folder will not be prefixed * if `String` environment folder will be prefixed with the `prefix` value. ### strip\_component The 'strip\_component' setting allows parts of environment names from a source to have a transformation applied, removing a part of the name before turning them into Puppet environments. This is primarily useful for VCS sources (e.g. Git), because it allows branch names to use prefixes or organizing name components such as "env/production", "env/development", but deploy Puppet environments from these branches named without the leading "env/" component. E.g. "production", "development". ```yaml --- sources: mysource: basedir: '/etc/puppet/environments' strip_component: 'env/' ``` #### strip\_component behaviour * if `string` environment names will have this prefix removed, if the prefix is present. Note that when string values are used, names can only have prefix components removed. * if `/regex/` the regex will be matched against environment names and if a match is found, the matching name component will be removed. ### ignore_branch_prefixes The 'ignore_branch_prefixes' setting causes environments to be ignored which match in part or whole to any of the prefixes listed in the setting. The setting is a list of strings. Each branch in the 'git' repo will have its name tested against all prefixes and, if the prefix is found, then an environment will not be deployed for this branch. If no 'ignore_branch_prefixes' is specified, then all branches in the 'git' repo will be deployed (default behavior). #### ignore_branch_prefixes behaviour * if empty, deploy environments for all branches * for each branch in git repo ** if `branch.name` has a prefix found in `ignore_branch_prefixes`, then do not deploy an environment for branch Example: do not deploy branches with names starting with (or completely named) 'test' or 'dev'. ```yaml --- sources: mysource: basedir: '/etc/puppet/environments' ignore_branch_prefixes: - 'test' - 'dev' ``` ### filter_command You can filter out any branch based on the result of the command specified as 'filter_command'. Currently it only works with git repository. Non zero return status of the command results in a branch beeing removed. The command is passed additional environment variables * GIT_DIR – path to the cached git repository * R10K_BRANCH – branch which is being filtered * R10K_NAME – source name from r10k configuration This can be used for example for filtering out the branches with invalid gpg signature of their latest commit ```yaml --- sources: mysource: basedir: '/etc/puppet/environments' filter_command: 'git verify-commit $R10K_BRANCH 2> /dev/null' ``` Beware that if the production branch of manifests is filtered out, you will end up with empty environment. Examples -------- ### Minimal example The majority of users will only have a single repository where all modules and hiera data files are kept. In this case you will specify a single source: ```yaml --- sources: operations: remote: 'https://git-server.site/my-org/org-modules' basedir: '/etc/puppet/environments' ``` ### Separate hiera data For more complex cases where you want to store hiera data in a different repository and your modules in another repository, you can specify two sources: ```yaml --- sources: operations: remote: 'https://git-server.site/my-org/org-modules' basedir: '/etc/puppet/environments' hiera: remote: 'https://git-server.site/my-org/org-hiera-data' basedir: '/etc/puppet/hiera-data' ``` ### Multiple tenancy Alternately you may want to create separate environments from multiple repositories. This is useful when you want two groups to be able to deploy Puppet modules but they should only have write access to their own modules and not the modules of other groups. ```yaml --- sources: main: remote: 'https://git-server.site/my-org/main-modules' basedir: '/etc/puppet/environments' prefix: false # Prefix defaults to false so this is only here for clarity qa: remote: 'https://git-server.site/my-org/qa-puppet-modules' basedir: '/etc/puppet/environments' prefix: true dev: remote: 'https://git-server.site/my-org/dev-puppet-modules' basedir: '/etc/puppet/environments' prefix: true ``` This will create the following directory structure: ``` /etc/puppet/environments |-- production # main-modules repository, production branch |-- upgrade_apache # main-modules repository, upgrade_apache branch |-- qa_production # qa repository, production branch |-- qa_jenkins_test # qa repository, jenkins_test branch |-- dev_production # dev repository, production branch `-- dev_loadtest # dev repository, loadtest branch ``` #### Multiple tenancy with external hieradata If hiera data is in a separate repository from your control repository, you must override the `prefix` so environment folders line up in both directories: ```yaml --- sources: app1_data: remote: 'https://git-server.site/my-org/app1-hieradata' basedir: '/etc/puppet/hieradata' prefix: "app1" app1_modules: remote: 'https://git-server.site/my-org/app1-puppet-modules' basedir: '/etc/puppet/environments' prefix: "app1" ``` This will create the following directory structure: ``` /etc/puppet/environments |-- app1_production # app1 modules repository, production branch |-- app1_develop # app1 modules repository, develop branch /etc/puppet/hieradata |-- app1_production # app1 data repository, production branch |-- app1_develop # app1 data repository, develop branch ``` Experimental Features --------------------- ### YAML Environment Source Dynamically deploying Puppet content based on the state of version control repositories can be powerful and efficient for development workflows. The linkage however is not advantageous when trying to build precision controls over deployment of previously-developed and tested content. The YAML environment source type allows for a clear separation of tooling between development workflow, and deployment workflow. Development workflow creates new commits in the version control system. Deployment workflow consumes them. To use the YAML environment source, configure r10k's sources with at least one entry using the yaml type. ```yaml # r10k.yaml --- sources: puppet: type: yaml basedir: /etc/puppetlabs/code/environments config: /etc/puppetlabs/r10k/environments.yaml # default ``` When using the YAML source type, every environment is enumerated in a single yaml file. Each environment specifies a type, source, and version (typically a Git ref) to deploy. In the following example, two environments are defined, which are identical to each other. ```yaml --- production: type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 development: type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 ``` ### YAMLdir Environment Source Like the YAML environment source, but implemented as a conf.d pattern. ```yaml # r10k.yaml --- sources: puppet: type: yamldir basedir: /etc/puppetlabs/code/environments config: /etc/puppetlabs/r10k/environments.d # default ``` Each environment is defined in a yaml file placed in the configuration directory. The filename, without the .yaml extension, will be the name of the environment. ``` /etc/puppetlabs/r10k/environments.d ├── production.yaml └── development.yaml ``` The contents of the file should be a hash specifying the enviornment type, and all other applicable environment options. ```yaml # production.yaml --- type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 ``` ### Exec environment Source The exec environment source runs an external command which is expected to return on stdout content compatible with the YAML environment source data format. The command may return the data in JSON or YAML form. The exec environment source is similar in purpose to Puppet's exec node terminus, used to implement external node classifiers (ENCs). R10k's exec source type allows the the implementation of external environment sources. ```yaml # r10k.yaml --- sources: puppet: type: exec basedir: /etc/puppetlabs/code/environments command: /usr/local/bin/r10k-environments.sh ``` ### Environment Modules The environment modules feature allows module content to be attached to an environment at environment definition time. This happens before modules specified in a Puppetfile are attached to an environment, which does not happen until deploy time. Environment module implementation depends on the environment source type. For the YAML environment source type, attach modules to an environment by specifying a modules key for the environment, and providing a hash of modules to attach. Each module accepts the same arguments accepted by the `mod` method in a Puppetfile. For ease of reading and consistency, however, it is perferred to use the generic type, source, and version options over implementation-specific formats and options such as "ref" and "git". The example below includes two Forge modules and one module sourced from a Git repository. The two environments are almost identical. However, a new version of the stdlib module has been deployed in development (6.2.0), that has not yet been deployed to production. ```yaml --- production: type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 modules: puppetlabs-stdlib: type: forge version: 6.0.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 development: type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 modules: puppetlabs-stdlib: type: forge version: 6.2.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 ``` An example of a single environment definition for the YAMLdir environment source type: ```yaml # production.yaml --- type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 modules: puppetlabs-stdlib: type: forge version: 6.0.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 ``` #### Puppetfile module conflicts When a module is defined in an environment and also in a Puppetfile, the default behavior is for the environment definition of the module to take precedence, a warning to be logged, and the Puppetfile definition to be ignored. The behavior is configurable to optionally skip the warning, or allow a hard failure instead. Use the `module_conflicts` option in an environment definition to control this. Available `module_conflicts` options: * `override_and_warn` (default): the version of the module defined by the environment will be used, and the version defined in the Puppetfile will be ignored. A warning will be printed. * `override`: the version of the module defined by the environment will be used, and the version defined in the Puppetfile will be ignored. * `error`: an error will be raised alerting the user to the conflict. The environment will not be deployed. ```yaml # production.yaml --- type: git source: git@github.com:puppetlabs/control-repo.git version: 8820892 module_conflicts: override_and_warn modules: puppetlabs-stdlib: type: forge version: 6.0.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 ``` ### Plain Environment Type A "control repository" typically contains a hiera.yaml, an environment.conf, a manifests/site.pp file, and a few other things. However, none of these are strictly necessary for an environment to be functional if modules can be deployed to it. The plain environment type allows sources that support environment modules to operate without a control repo being required. Modules can be deployed directly. ```yaml --- production: type: plain modules: puppetlabs-stdlib: type: forge version: 6.0.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 development: type: plain modules: puppetlabs-stdlib: type: forge version: 6.0.0 puppetlabs-concat: type: forge version: 6.1.0 reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 ``` ### Tarball Environment Type The tarball environment type allows an environment to be deployed from a tarball archive, rather than a Git repository. When using a tarball environment type, a source location for the tarball is required. Optionally, the tarball's sha256 checksum may be specified as the version. It is highly recommended to include a version specifier. If a version specifier is not included, r10k will never invalidate a cached copy of the tarball's source. Tarball environment sources will be unpacked directly into the environment root. ```yaml --- production: type: tarball source: https://repo.example.com/projects/puppet/env-2.36.1.tar.gz version: 99a906c99c2f144de43f2ae500509a7474ed11c583fb623efa8e5b377a3157f0 # sha256digest development: type: tarball source: https://repo.example.com/projects/puppet/env-6128ada.tar.gz version: 6128ada158622cd90f8e1360fb7c2c3830a812d1ec26ddf0db7eb16d61b7293f # sha256digest modules: reidmv-xampl: type: git source: https://github.com/reidmv/reidmv-xampl.git version: 62d07f2 ``` r10k-4.0.2/doc/dynamic-environments/git-environments.mkd000066400000000000000000000070661460033767200232360ustar00rootroot00000000000000Git Based Dynamic Environments ============================== R10k can use Git repositories to implement dynamic environments. You can create, update, and delete Puppet environments automatically as part of your normal Git workflow. Dynamic Environments in a nutshell ---------------------------------- The core idea of dynamic environments is that you should be able to manage your Puppet modules in the same manner that you would manage any other code base. It builds on top of Git topic branch model. [git-topic-branching]: http://git-scm.com/book/en/Git-Branching-Branching-Workflows#Topic-Branches One of the most prevalent ways of using Git relies on using [topic branches][git-topic-branching]. Whenever changes need to be made that need to be reviewed or tested before going live, they should be done in a different, short lived branch called a topic branch. Work can be freely done on a topic branch in isolation and when the work is completed it is merged into a "master" or "production" branch. This is very powerful because it allows any number of people to rapidly develop features in isolation and merge features in a single operation. The dynamic environment model extends this git branching strategy to your live Puppet masters. It creates a mapping between Git branches and Puppet environments so that you can use the Git branching model and have that be seamlessly reflected in Puppet environments. This means that creating a new Git branch creates a new Puppet environment, updating a Git branch will update that environment, and deleting a Git branch will remove that environment. R10k supports both [directory and config file environments](https://puppet.com/docs/puppet/latest/env_environments.html). Ensure that the basedir for your sources and your puppet config align. How it works ------------ R10k works by tracking the state of your Git repository or repositories. Each repository's branches will be cloned into a directory with a matching name, creating a Puppet environment for the branch. If a repository includes a Puppetfile, such as the control repo, the Forge modules and Git/SVN repositories described within will be cloned as well, into the same directories. Subsequent changes to the branches will be kept in sync on the filesystem by future r10k runs. Finally, if there are directories that do not match existing branches, r10k will assume that the branches for those environments were delete and will remove those environments. r10k will need to be be able to authenticate with each repository. Most Git systems support authentication with SSH keys. GitHub calls them [deploy keys][github-deploy-keys]. Bitbucket calls them [deployment keys][bitbucket-deployment-keys]. Stash calls them [SSH access keys][stash-access-keys]. [github-deploy-keys]: https://developer.github.com/guides/managing-deploy-keys/#deploy-keys [bitbucket-deployment-keys]: https://confluence.atlassian.com/display/BITBUCKET/Use+deployment+keys [stash-access-keys]: https://confluence.atlassian.com/display/STASH/SSH+access+keys+for+system+use Configuration ------------- The following configuration options can be specified for Git based environment sources. ### invalid_branches: This setting specifies how Git branch names that cannot be cleanly mapped to Puppet environments will be handled. Valid values: * 'correct_and_warn': Non-word characters will be replaced with underscores and a warning will be emitted. (Default) * 'correct': Non-word characters will silently be replaced with underscores. * 'error': Branches with non-word characters will be ignored and an error will be emitted. r10k-4.0.2/doc/dynamic-environments/introduction.mkd000066400000000000000000000056471460033767200224520ustar00rootroot00000000000000Dynamic Environments ==================== One of the most important functions of r10k is its ability to dynamically manage your Puppet environments. When environments were originally built into Puppet they were meant to be static in nature. Each environment had to be defined beforehand in the master's puppet.conf file in a section, like so: ```ini [master] # Environment independent settings vardir = '/var/lib/puppet' [production] modulepath = '/etc/puppet/environments/production/modules' [testing] modulepath = '/etc/puppet/environments/testing/modules' [development] modulepath = '/etc/puppet/environments/development/modules' ``` Static Puppet environments were frequently used to implement a pipeline for developing Puppet code. New Puppet code would be developed and deployed to the development environment, pushed to testing for validation, and then finally pushed to the production for general deployment. This static nature of Puppet environments turned out to be inflexible in practice. With a predefined list of environments it could be very cumbersome to develop on different parts of a Puppet codebase in isolation; you could either develop multiple features in the same environment and risk cross pollution, or manually create new environments every time you needed isolation. Dynamic environments work by dynamically determining the settings for Puppet environment when the environment is used, rather than by defining an explicit section in puppet.conf. This works by making the current environment (in the '$environment' variable) part of of the path to environment specific settings, like modulepath, manifest, and so forth. ```ini [master] environmentpath = $confdir/environments' ``` Running `puppet agent -t --environment myenv` will cause $environment to be expanded to 'myenv', so the modulepath for that environment will be set to '/etc/puppet/environments/myenv/modules'. This approach of allowing environments to be defined on the fly is a complete reversal of the original architecture of environments. This approach means that it can be very easy to create new environments, update existing environments, and remove environments that aren't needed anymore. It's common practice to create a temporary environment to test an idea and destroy it shortly after. R10k is designed to enable this sort of fluid workflow. R10k predominantly uses version control systems to implement dynamic environments. This works by inspecting the VCS repositories containing your Puppet code and checking out that code on your masters so, that there's a 1:1 connection between a branch in your VCS repository and a Puppet environment on your masters. This approach allows you to define the way you want to work and use that with your chosen VCS, and r10k will make Puppet implement that workflow. Different version control systems will implement dynamic environments in slightly different ways; check out the VCS specific documentation for more information. r10k-4.0.2/doc/dynamic-environments/master-configuration.mkd000066400000000000000000000034271460033767200240630ustar00rootroot00000000000000Puppet master configuration =========================== In order to use environments, your Puppet masters will need to be configured to load manifests and modules relative to the requested environment. This is the default behavior in Puppet 4+ This behavior is controled by the following settings (listed with their default values): ``` codedir = /etc/puppetlabs/code environmentpath = $codedir/environments basemodulepath = $codedir/modules:/opt/puppetlabs/puppet/share/modules ``` The environment requested by the agent or assigned to it by an ENC is looked for at `$environmentpath/$environment`. That environment may have an environment.conf file in its base directory that specifies its own modulepath. If not, the default computed modulepath for that environment is `$environmentpath/$environment/modules:$basemodulepath`. These configuration variables are documented at [Puppet Configuration Reference](https://puppet.com/docs/puppet/latest/configuration.html) and their interaction in forming the modulepath is documented at [Directories and the Modulepath](https://puppet.com/docs/puppet/latest/dirs_modulepath.html). More information can be found about environments in general at [Creating Environmnets](https://puppet.com/docs/puppet/latest/environments_creating.html). The evolution to using environments in this way was a gradual one from community conventions to supported feature and there were several intermediate stages that had different configuration requirements within the puppet.conf. Some Puppet documentation from the 3.x and 4.x series may no longer be applicable to the above which solidified in Puppet 4.x. If you need to upgrade away from any intermediate setup see [Environments in Puppet 3.8](https://puppet.com/docs/puppet/3.8/environments.html) for examples that may help. r10k-4.0.2/doc/dynamic-environments/quickstart.mkd000066400000000000000000000134451460033767200221160ustar00rootroot00000000000000# Overview This intent of this document is to serve as a basic guide for getting started with r10k and a fresh Puppet installation, including the following: * Installing Puppet and its dependencies * Installing r10k and its dependencies * Configuring all components to support r10k * Configuring your git repository and initial files This document is based around Puppet 4+, specifically using the Puppet Collection repositories provided by Puppet Labs. # Pre-Requisites * Clean install of CentOS 7 or Debian 7.0 with root access / sudo rights. * Clean github repository with a deploy key generated by the server above. # Installing Puppetmaster on Centos 7 Install and enable the official Puppet Labs package repositories. ``` yum localinstall http://yum.puppetlabs.com/puppetlabs-release-pc1-el-7.noarch.rpm ``` Clean all yum data and rebuild the metadata cache. ``` yum clean all && yum makecache ``` Install the Puppet master. ``` yum install puppetserver ``` # Installing Puppetmaster on Debian 7 Install and enable the official Puppet Labs package repositories ``` wget http://apt.puppetlabs.com/puppetlabs-release-pc1-wheezy.deb dpkg -i puppetlabs-release-pc1-wheezy.deb apt-get update ``` Install the Pupppet master ``` apt-get install puppetserver ``` # Configuring Puppet Configure the Puppet master by editing `/etc/puppetlabs/puppet/puppet.conf` and ensuring it has the following contents: ``` [main] dns_alt_names = $_Insert FQDN of Puppet Master Here_$ [agent] server = $_Insert FQDN of Puppet Master Here_$ ``` Restart the Puppet master service. ``` service puppetserver restart ``` Ensure the certificate for the Puppet master was created. ``` # /opt/puppetlabs/bin/puppet cert list --all + "puppet-master.domain.local" (SHA256) 3F:F3:63:BB:EE:57:46:A4:7B:03:AB:9D:FD:97:0F:8F:73:87:40:3B:6D:E5:DC:FC:C3:49:F5:C9:B6:F4:DE:B8 (alt names: "DNS:puppet-master.domain.local") ``` Notice for Debian users: apt post-configure will build the certificate for the server BEFORE you configure it. Therefore you should rebuild your certs after done with /etc/puppetlabs/puppet/puppet.conf configuration. To do so you need to remove old certs and restart puppetmaster: ``` service puppetserver stop find $(/opt/puppetlabs/bin/puppet master --configprint ssldir) -name "$(/opt/puppetlabs/bin/puppet master --configprint certname).pem" -delete service puppetserver start ``` # Install and Configure R10k Install r10k via Ruby Gems. ``` /opt/puppetlabs/puppet/bin/gem install r10k ``` Configure r10k by creating the following directory structure and file `/etc/puppetlabs/r10k/r10k.yaml` and ensuring it has the following contents: ``` # The location to use for storing cached Git repos :cachedir: '/var/cache/r10k' # A list of git repositories to create :sources: # This will clone the git repository and instantiate an environment per # branch in /etc/puppetlabs/code/environments :my-org: remote: 'git@github.com:$_Insert GitHub Organization Here_$/$_Insert GitHub Repository That Will Be Used For Your Puppet Code Here_$' basedir: '/etc/puppetlabs/code/environments' ``` # Configure Puppet Code Repository Populate the repository by cloning it locally and performing each of the following actions within it: Note that puppet defaults to the `production` environment. You may wish to change your default git branch from `master` to `production` in order to match this. Alternatively, you can set your agents' environment to `master`. ``` mkdir -p {modules,site/profile/manifests,hieradata} touch hieradata/common.yaml touch site/profile/manifests/base.pp touch environment.conf touch Puppetfile touch site.pp ``` Edit the `environment.conf` file and ensure it has the following contents: ``` manifest = site.pp modulepath = modules:site ``` Edit the `site.pp` file and ensure it has the following contents: ``` hiera_include('classes') ``` Edit the `hieradata/common.yaml file and ensure it has the following contents: ``` --- classes: - 'profile::base' ntp::servers: - 0.us.pool.ntp.org - 1.us.pool.ntp.org ``` Edit the `Puppetfile` file and ensure it has the following contents: ``` forge 'forge.puppetlabs.com' # Forge Modules mod 'puppetlabs/ntp', '4.1.0' mod 'puppetlabs/stdlib' ``` Edit the `site/profile/manifests/base.pp` file and ensure it has the following contents: ``` class profile::base { class { '::ntp': } } ``` Ensure that the user r10k runs as (typically root) can access the git repository. See the [git environment guide](git-environments.mkd) for more detail. You can test the access by using su/sudo to perform `git clone yourrepoURL` as the correct user. # Summary We now have the following functional pieces: 1. Puppet master 2. Hiera 3. r10k 4. Puppet code repository 5. Initial 'profile' named 'base' that will configure NTP on our servers. This base will allow us to do all sorts of useful things. Most interesting (to me and for the purposes of this tutorial) is the ability to now utilize Git branches to help manage infrastructure as part of your software development lifecycle. Now, when you want to test a new profile, you can do the following: 1. Create a new branch of the Puppet code repository 2. Create your Puppet code in this new branch 3. Push the new branch up to the repository 4. Deploy it as a new environment using the `/opt/puppetlabs/puppet/bin/r10k deploy environment -p` command. From any agent node (including the master), you may run the agent against the new environment by specifying it on the command line. For example, if you create the branch `test`, run puppet as: ``` puppet agent -t --environment test ``` You can also modify the `/etc/puppetlabs/puppet/puppet.conf` file on a node and add the environment setting to the agent section to make the change permanent: ``` ... [agent] environment = test ``` Voila - you're testing code without impacting your production environment! r10k-4.0.2/doc/dynamic-environments/svn-environments.mkd000066400000000000000000000027301460033767200232520ustar00rootroot00000000000000SVN Based Dynamic Environments ============================== R10k can use SVN repositories to implement dynamic environments. You can create, update, and delete Puppet environments automatically as part of your normal SVN workflow. How it works ------------ R10k implements a branching workflow similar to Git by using the SVN concept of branches. SVN repositories must conform to the conventional SVN repository structure with the directories trunk/, branches/, and optionally tags/ in the root of the repository. R10k maps the trunk/ directory to the production environment, and production environment, and branches (directories in branches/) are created as environments with the name of the given branch. Configuration ------------- In addition to the settings that all sources support, SVN sources can specify the following additional options: ### username/password If the SVN repository requires credentials, you can supply the `username` and `password` options. Both `username` and `password` must be specified in order to SVN authentication. **Note**: SVN credentials are passed as command line options, so the SVN credentials may be visible in the process table when r10k is running. If you choose to supply SVN credentials make sure that the system running r10k is appropriately secured. ```yaml --- sources: myenvs: type: svn remote: 'svn://my-svn.server/my-svn-repo' basedir: '/etc/puppet/environments' username: 'azurediamond' password: 'hunter2' ``` r10k-4.0.2/doc/dynamic-environments/usage.mkd000066400000000000000000000146111460033767200210240ustar00rootroot00000000000000Usage ===== R10k provides fairly fine grained controls over your environments to fit your needs. If you want to do a full update of all of your environments and modules and don't need it to be done in real time, you can trigger a full update and let it run in the background. If you are actively developing code and need to run very fast updates of one specific environment, you can do a targeted update of that code as well. All commands that deal with deploying environments are grouped under the `r10k deploy` subcommand. Command line invocation ----------------------- ### Deploying environments Recursively update all environments: r10k deploy environment --modules The simplest way to use r10k is by simply updating all environments and modules and takes the brute force approach of "update everything, ever." When this command is run r10k will update all sources, create new environments and delete old environments, and recursively update all environment modules specified in environment Puppetfiles, yamldirs, etc. While this is the simplest method for running r10k, it is also the slowest by a very large degree because it does the maximum possible work. This should not be something you run interactively, or use on a regular basis. - - - Update environments while avoiding unnecessary recursion: r10k deploy environment This will update existing environments and recursively create new environments. Note that when an environment is deployed for the first time, it will automatically update all modules as well. For subsequent updates only the environment itself will be updated. - - - Update a single environment: r10k deploy environment my_working_environment When you're actively developing on a given environment, this is the best way to deploy your changes. Note that when an environment is deployed for the first time, it will automatically update all modules as well. For subsequent updates only the environment itself will be updated. - - - Update a single environment and force an update of modules: r10k deploy environment my_working_environment --modules This will update the given environment and update all contained modules. This is useful if you want to make sure that a given environment is fully up to date. - - - There is also a middle ground between updating all modules and updating no modules. It is often desirable to update the environment and then update only those modules whose definitions have changed in the Puppetfile, or whose content _could_ have changed since the last deployment (eg, Forge modules with their version set to `:latest` or Git modules who point to a `branch` ref). This can be achieved by assuming content is unchanged locally on disk. This is the opposite of what one would assume during a module development cycle, when a user might be making local edits to test code changes. However, in production, access to puppet code is usually locked down, and updates are deployed through automated invocations of R10K. In these cases, deploys where most modules are unchanged and reference exact versions (ie, not `:latest` or a branch as mentioned above), this invocation may shorten deployment times dozens of seconds if not minutes depending on how many modules meet the above criteria (approximately 1 minute for every 400 modules). To take advantage of this, set as many modules as possible in the Puppetfile to explicit, static version. These are released Forge versions, or Git modules using the `:tag`, or `:commit` keys. Git `:ref`s containing only the full 40 character commit SHA will also be treated as static versions. Then invoke a deploy with: There may be issues with deployments apparently successful after an initial errored deployment. If this is happening, try running without the `--incremental` flag to run a full deployment. r10k deploy environment production --modules --incremental - - - Update a single environment and specify a default branch override: r10k deploy environment my_working_environment --modules --default-branch-override default_branch_override This will update the given environment and update all contained modules, overriding the :default_branch entry in the Puppetfile of each module. If the specified override branch is not found, it will fall back to the normal default branch and attempt to use that. This is used primarily to allow automated r10k solutions using the control_branch pattern with a temporary branch deployment to ensure the deployment is pushed to the correct module repository branch. Note that the :default_branch and its override are only ever used if the specific desired ref cannot be located. ### Deploying modules Update a single module across all environments: r10k deploy module apache This is useful for when you're working on a module specified in a Puppetfile and want to update it across all environments. See [Puppetfile documentation](doc/puppetfile.mkd) for details on how this affects Forge vs. Git/SVN modules. - - - Update multiple modules across all environments: r10k deploy module apache jenkins java - - - Update one or more modules in a single environment: r10k deploy module -e production apache jenkins java ### Viewing environments Display all environments being managed by r10k: r10k deploy display Display all environments being managed by r10k, and modules specified in the Puppetfile: r10k deploy display -p Display all environments being managed by r10k, and modules specified in the Puppetfile along with their expected and actual versions: r10k deploy display -p --detail Display an explicit list of environments being managed by r10k and modules specified in the Puppetfile along with their expected and actual versions: r10k deploy display -p --detail production vmwr webrefactor User accounts ------------- When running commands to deploy code on a master, r10k needs to have write access to your Puppet environment path and should create files that are readable by the user account running the master. If you're using Puppet Enterprise this account is `pe-puppet`, and if you're using Puppet open source this account is `puppet`. This can be done in a few ways. First off, you can run r10k as the puppet user itself. You can also create a new user that has write access to the Puppet environment path, has the same GID as the puppet user, and has a umask of 0027. You can also run r10k as root, which is the simplest solution but does require access control to the root user. r10k-4.0.2/doc/dynamic-environments/workflow-guide.mkd000066400000000000000000000165321460033767200226710ustar00rootroot00000000000000R10k's dynamic deployments work best with a workflow that understands and respects how r10k works, to prevent automation and manual processes from conflicting. Your workflow will need to be customized to meet your team's skills, tools, and needs. This guide describes a generic workflow that can be customized easily. This guide assumes that each of your modules is in a separate repository and that the `Puppetfile` is in its own repo called the [Control Repo](http://technoblogic.io/blog/2014/05/16/r10k-control-repos/). All module repos have a primary branch of *master* and the Control's primary branch is *production*. All changes are made through r10k and no user makes manual changes to the environments under **/etc/puppet**. Adding New Modules ------------------ This workflow is useful when adding a forge or internally-developed module to your puppet environment. ### Create new feature branch Create a new feature branch in your module repositories. Do this for each repository, including the control repository, that will reference the new module. You do not need to do so for modules that are not being edited. ```git checkout -b feature``` If you are simply adding the module at this time and not referencing it in other modules or manifests, only the Control repo requires a new branch. ### Add new module and branches to control repo The new module is added to the control repository's `Puppetfile` like so: ``` # Forge modules: mod "puppetlabs/ntp" # Your modules: mod "custom_facts", :git => "https://github.com/user/custom_facts" ``` For any existing modules that you branched, add a reference to the new branch name. Don't forget the comma at the end of the *:git* value. ``` mod "other_module", :git => "https://github.com/user/other_module", :ref => "feature" ``` ### Reference new module in manifests, modules, and hiera If you are simply adding the module at this time and not referencing it in other modules or manifests, you may skip this step. Edit your existing manifests, modules, and hiera as needed to make sure of the new module. ### Deploy environments Save all your changes in each module repo. Commit and push the changes upstream: ``` git commit -a -m ‘Add feature reference to module’ git push origin feature ``` Commit and push the change in your control repo upstream: ``` git commit -a -m ‘Add module puppetlabs/ntp to branch feature' git push origin feature ``` Finally, deploy the environments via r10k. This step must occur on the master: ```r10k deploy environment -p``` Add the **-v** option for verbosity if you need to troubleshoot any errors. The new branch should be located at `$environmentpath/feature`. ### Test the new module branches If you are simply adding the module at this time and not referencing it in other modules or manifests, you may skip this step. Run the puppet agent against the new environment from at least two nodes, one that should not be impacted by change and one that should be impacted. ```puppet agent -t --environment feature``` Verify that catalog compilation succeeds and that you are satisfied that the effective changes match your expected changes. Repeat the steps above until you are satisfied with the results. ### Merge changes In each of the changed modules and the control repo, checkout the main branch, merge, and push changes to the master/production branch. ``` # Module repos git checkout master git merge feature git push origin master # Control repo git checkout production git merge feature vi Puppetfile # Remove all :ref's pointing to 'feature'. Don't forget the trailing commas # on the :git statements git commit -a -m 'Remove refs to feature branch for module puppetlabs/ntp' git push origin production ``` If you are simply adding the module at this time and not referencing it in other modules or manifests, you are now finished. ### Cleanup feature branches You may skip this step for long-lived branches, however most feature branches should be short-lived and can be pruned once testing and merging is complete. Remove the old branches in each repo: ``` git branch -D repo git push origin :repo ``` Deploy via r10k on the master and ensure there are no errors. The *feature* dynamic environment will no longer exist at `$environmentpath/feature` if you deleted the branch in your Control repo. ```r10k deploy environment -p``` Editing existing Modules ------------------------ When editing your own existing modules, this workflow should be followed. ### Create new feature branches Create a new feature branch in your module repositories. Do this in the edited module, the control repository, and in each module that will reference the updated module. You do not need to do so for modules that are not being edited. ```git checkout -b feature``` ### Update control repo to reference new branch For all modules that you branched, add a reference to the new branch name to the `Puppetfile` in your Control repo. Don't forget the comma at the end of the *:git* value. ``` mod "other_module", :git => "https://github.com/user/other_module", :ref => "feature" ``` ### Modify existing module, references to module Make the required changes to your existing module. Edit your existing manifests, modules, and hiera as needed to make sure of the updated module. ### Deploy environments Save all your changes in each modified repo. Commit and push the changes upstream: ``` git commit -a -m ‘Add feature reference to module’ git push origin feature ``` Commit and push the change in your control repo upstream: ``` git commit -a -m ‘Add module puppetlabs/ntp to branch feature' git push origin feature ``` Finally, deploy the environments via r10k. This step must occur on the master: ```r10k deploy environment -p``` Add the *-v* option for verbosity if you need to troubleshoot any errors. The new branch should be located at `$environmentpath/feature`. ### Test the new module branches Run the puppet agent against the new environment from at least two nodes, one that should not be impacted by change and one that should be impacted. ```puppet agent -t --environment feature``` Verify that catalog compilation succeeds and that you are satisfied that the effective changes match your expected changes. Repeat the steps above until you are satisfied with the results. ### Merge changes In each of the changed module repos, checkout the main branch and merge. ``` # Module repos git checkout master git merge feature git push origin master ``` In the Control repo, check out production. Do NOT merge the feature branch as it now references the incorrect branch for each git repo, and no other changes were made (unlike a new module, where a new repo is referenced). ``` # Control repo git checkout production ``` ### Cleanup feature branches You may skip this step for long-lived branches, however most feature branches should be short-lived and can be pruned once testing and merging is complete. Remove the old branches in each repo: ``` git branch -D repo git push origin :repo ``` Redeploy with r10k on your Puppet Master and ensure there are no errors. The *feature* dynamic environment should no longer exist at `$environmentpath/feature`. ```r10k deploy environment -p``` Customize Your Workflow ----------------------- This guide is very generic in nature. Use it as a template and expand and modify it to fit your team, your tools, and your company culture. Above all, be consistent in your methodology. r10k-4.0.2/doc/faq.mkd000066400000000000000000000144641460033767200143240ustar00rootroot00000000000000Frequently Asked Questions ========================== ### How can run I `puppet generate types` for each changed environment during deployment? The command `puppet generate types` creates Puppet signatures for custom types circumventing the need to load their Ruby code during compilation. This prevents the type definitions from "leaking" across environments. Best practice currently is to run this command as part of your Puppet code deployment pipeline, and r10k's postrun command ability is a natural place to do so. The postrun command will have any occurance of "$modifiedenvs" replaced with a space-seperated list of environments the deploy was configured to modify. For example creating the script: ``` $ cat /usr/local/bin/generate-puppet-types.sh #!/bin/bash for environment in $1; do /opt/puppetlabs/bin/puppet generate types --environment $environment done ``` And configuring r10k such: ``` $ tail -2 /etc/puppetlabs/r10k/r10k.yaml postrun: ["/usr/local/bin/generate-puppet-types.sh", "$modifiedenvs"] ``` Should provide you with the necessary generate type definitions for improved environment isolation. ### The default Git branch is 'master', while the default Puppet environment is 'production'. How do I reconcile this? The default Git branch name is 'master', but this is a somewhat arbitrary name and doesn't necessarily map to every use case. In the case of R10K it's generally easiest to rename 'master' to 'production'. You can rename the master branch with the following: ``` git branch -m master production git push --set-upstream origin production ``` Note that this will only create a new branch called production with a copy of master - to change the default branch for all subsequent clones, read on. #### Changing the default branch for bare Git repositories When you clone a repository, Git checks out the [currently active branch][git-clone] on the remote repository. Changing this for a non-bare repository is simple - just check out a different branch and subsequent clones from that repository will use that branch. For bare repositories things are a bit more complex. Bare repositories do not have a working directory that can be checked out, but they do have a [symbolic ref][git-symbolic-ref] that serves the same role. To change this, run the following command: ``` git --git-dir /path/to/bare/repo symbolic-ref HEAD refs/heads/production ``` #### Changing the default branch for different Git services For Git hosting services where you may not cannot directly invoke commands, there are usually administrative tools to allow you to change the default branch on your remote repositories: * [GitHub][github-default-branch] * [Bitbucket][bitbucket-default-branch] * [Gitolite v2][gitolite-v2-default-branch] * [Gitolite v3][gitolite-v3-default-branch] [git-clone]: https://www.kernel.org/pub/software/scm/git/docs/git-clone.html "Man page for git-clone" [git-symbolic-ref]: https://www.kernel.org/pub/software/scm/git/docs/git-symbolic-ref.html "Man page for git-symbolic-ref" [github-default-branch]: https://help.github.com/articles/setting-the-default-branch "Changing the default branch on GitHub" [bitbucket-default-branch]: https://answers.atlassian.com/questions/280944/how-to-change-main-branch-in-bitbucket "Changing the default branch on Bitbucket" [gitolite-v2-default-branch]: http://stackoverflow.com/questions/7091599/git-default-remote-branch-with-gitolite "Changing the default branch on Gitolite v2" [gitolite-v3-default-branch]: http://stackoverflow.com/questions/13949093/git-change-default-branch-gitolite "Changing the default branch on Gitolite v3" ### How do I prevent r10k from removing modules in the `/modules` directory of my Git repository? By default, r10k will install modules specified in the Puppetfile into the `/modules` directory of each environment, but if you already use that directory and keep modules in it, r10k may think those modules are not meant to exist and may remove them. There are three ways of fixing this: including your local modules in the Puppetfile, moving the directory where r10k install Puppetfile sourced modules, or moving your modules. #### Including your local modules in the Puppetfile The Puppetfile has a concept of a "local" module, otherwise known as a module that r10k did not directly placed there but should not be removed. If you want to continue keep your modules in the `/modules` directory and still install external modules from the Puppetfile into that directory, you can add a `mod` directive to the Puppetfile for each of your local modules. ``` mod 'my_ntp', :local => true mod 'roles', :local => true mod 'profiles', :local => true # Include your external modules as usual mod 'puppetlabs/stdlib' mod 'puppetlabs/apache' ``` #### Move where the Puppetfile installs external modules Instead of having to add a module entry for each of your local modules, you can simply move where the Puppetfile installs modules with the `moduledir` setting. ``` # The moduledir setting must be set before any modules are created moduledir "external-modules" mod 'puppetlabs/stdlib' mod 'puppetlabs/apache' ``` In Puppet 3.6 and later you can create an `environment.conf` in the root of your environment to indicate which directories contain modules: ``` # environment.conf modulepath = modules:external-modules ``` #### Move your local modules Lastly, you can simply move your locally versioned modules to a separate directory to avoid conflicting over the `/modules` directory entirely. With this example as well you can use the `environment.conf` file to tell Puppet which directories contain modules. ``` # environment.conf modulepath = internal-modules:modules ``` #### Does R10K support Local/Private Forge? Yes. Set the Forge to use _globally_ in `r10k.yaml`. see [Configuration](/doc/dynamic-environments/configuration.mkd#baseurl) for details. #### What does the name mean? It’s called R10K because I’m terrible at names. When I started working on R10K I couldn’t come up with a decent name. While trying to come up with something clever, I recalled that Randall Munroe wrote a bot for [controlling IRC chatter](http://blog.xkcd.com/2008/01/14/robot9000-and-xkcd-signal-attacking-noise-in-chat/), and gave it the most generic name he could think of - Robot 9000. Since I just needed a name, any name, I decided to go with an equally generic name by incrementing the robot index. r10k-4.0.2/doc/git/000077500000000000000000000000001460033767200136325ustar00rootroot00000000000000r10k-4.0.2/doc/git/cloning-and-mirroring.mkd000066400000000000000000000060561460033767200205350ustar00rootroot00000000000000Git Cloning and Mirroring ========================= Most Git operations on a repository require having a full clone of that repository locally available. Because this clone is a full copy, the initial clone is heavyweight and generally time consuming. For a situation like r10k where the same repositories are reused often, constantly cloning and deleting repositories is very inefficient. In order to speed up Git operations r10k takes a number of steps to avoid cloning and fetching repositories, as well as deduplicating content across multiple clones of the same repositories. Because r10k tends to reuse the same Git repositories in multiple places, r10k avoids repeated, full repository clones by mirroring repositories. When r10k starts using a new repository, it first clones that repository into a central location for later use. This initial clone is the only time that r10k will perform a full clone of that repository. When r10k creates an actual checkout of a Git repository, it uses the corresponding mirrored repository as a [reference](#git-alternates). This allows the working checkout to borrow objects from the mirrored repository instead of cloning all of the Git objects again. This saves a great deal of time and space as the number of copies of a repository increases. Mirrored git repositories are cloned into the directory specified by the r10k.yaml 'cachedir' setting. The name 'cachedir' is a bit of a misnomer; Git repositories are mirrored to speed up access to the remote resource like a cache, but unlike a traditional cache the mirrored repositories are persistent and should not be deleted. Because the mirrored repository contains all of the objects for all of the referencing repositories, deleting the mirrored repository is akin to deleting the `.git/objects` directory. Doing so effectively cripples the repository, so removing mirrored repositories should be done with care to avoid deleting repositories that are still in use. Git alternates -------------- A standard git repository stores all content in the `.git/objects` directory, either as a zlib compressed file in `.git/objects/[0-9a-f]{2}/[0-9a-f]{38}` or as part of a packfile in `.git/objects/pack`. Since content can be both stored in and retrieved from this location it's treated like a simple database, and is generally referred to as the Git object database. Git allows a single repository to look in more than just `.git/objects` for objects; additional object databases are referred to as **alternates**. If a repository has alternate object databases set up, it will check `.git/objects` and then each alternate object database when looking for an object. Git stores a list of alternate object databases in `.git/objects/info/alternates`. Invoking `git clone` with the `--reference ` flag will use that repository as an alternate object database. Links ----- * [man git-clone(1) --reference](https://www.kernel.org/pub/software/scm/git/docs/git-clone.html) * [man gitrepository-layout(5) objects/info/alternates](https://www.kernel.org/pub/software/scm/git/docs/gitrepository-layout.html) r10k-4.0.2/doc/git/providers.mkd000066400000000000000000000105361460033767200163510ustar00rootroot00000000000000Git Providers ============= As of 1.5.0, r10k can interact with Git repositories using multiple Git providers. Shellgit -------- The shellgit provider is the original Git provider that is based on shelling out to the `git` binary. It relies on the standard set of Git userland executables in order to work. The shellgit provider is the default Git provider in order to maintain compatibility with existing r10k installations. ### Requirements The shellgit provider requires that `git` can be found on the `PATH` environment variable. This can be done by installing the git package via the system package manager. ### SSH Configuration Because the shellgit provider relies on the `git` command which in turn uses the `ssh` binary as the SSH transport layer, configuring access to Git repositories over SSH is done by configuring the underlying `ssh` command. Rugged ------ The rugged provider is based on the [libgit2](https://github.com/libgit2/libgit2) library and the Ruby [rugged gem](https://github.com/libgit2/rugged). ### SSH Configuration Since the rugged provider does not read ~/.ssh if using SSH based Git repositories, the 'private_key' option must be provided. An optional 'username' field can be provided when the Git remote URL does not provide a username. ```yaml git: private_key: '/root/.ssh/id_rsa' username: 'git' ``` If you have per repository private keys you can add them with the repositories list. ```yaml git: # default private key private_key: '/root/.ssh/id_rsa' repositories: - remote: "git@github.com:my_org/private_repo" # private key for this repo only private_key: '/root/.ssh/private_repo_id' ``` ### HTTPS Configuration Public HTTPS based Git repositories can be accessed with no additional settings. For repos that do require authentication, the 'oauth_token' option may be provided. ```yaml git: oauth_token: '/etc/puppetlabs/r10k/token' ``` If you have per repository access tokens you can add them with the repositories list. ```yaml git: # default access token oauth_token: '/etc/puppetlabs/r10k/token' repositories: - remote: "https://github.com/my_org/private_repo.git" # access token for this repo only oauth_token: '/etc/puppetlabs/r10k/private_repo_token' ``` #### Supported transports with Rugged Rugged compiles libgit2 and and the Ruby bindings when the gem is installed. You may need libraries installed before you install the gem to use certain protocols to access git remote repositories. For ssh support, you need to have libssh2 installed (along with the relevant dev package/headers) before you install the Rugged gem. For https support on Linux, you need to have OpenSSL installed (along with the relevant dev package/headers) before you install the Rugged gem. OS X and Windows support should automatically include https support. You can check whether https or ssh support is included in your Rugged installation by using the following in irb and making sure the required feature is listed: ```ruby irb(main):001:0> require('rugged') => true irb(main):002:0> Rugged.features => [:threads, :https, :ssh] irb(main):003:0> ``` You will require the ':https' or ':ssh' features to use the respective protocols in your Puppetfile module references or in r10k.yaml. R10K 2.0.0 and later will automatically issue a warning if either feature is missing. libssh2 on Debian and Ubuntu is compiled against libgcrypto instead of OpenSSL [due to licensing reasons](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=668271), and unfortunately libgcrypto does not support a number of required operations, including reading from a private key file. You will need to either use shellgit or recompile your own libssh2-1 package to use OpenSSL on these distributions. If you see the following error message, this is the likely cause: Failed to authenticate SSH session: Unable to extract public key from private key file: Method unimplemented in libgcrypt backend at /var/cache/r10k/ssh---git@git.example.com-sys-puppet.git Configuration ------------- R10K will attempt to use the shellgit provider, then fall back to the rugged provider, and then hard fail if no Git provider is available. The Git provider in use can be manually specified by specifying the desired provider in r10k.yaml. ```yaml git: provider: 'rugged' ``` Valid values are 'rugged' and 'shellgit'. If an invalid value is used r10k will raise an error. r10k-4.0.2/doc/puppetfile.mkd000066400000000000000000000302321460033767200157210ustar00rootroot00000000000000Puppetfile ========== Puppetfiles are a simple Ruby based DSL that specifies a list of modules to install, what version to install, and where to fetch them from. r10k can use a Puppetfile to install a set of Puppet modules for local development, or they can be used with r10k environment deployments to install additional modules into a given environment. Unlike librarian-puppet, the r10k implementation of Puppetfiles does not include dependency resolution, but it is on the roadmap. When directly working with Puppetfiles, you can use the `r10k puppetfile` subcommand to interact with a Puppetfile. When using r10k's deploy functionality, interacting with Puppetfiles is handled on a case by case basis. Because the Puppetfile format is actually implemented using a Ruby DSL any valid Ruby expression can be used. That being said, being a bit too creative in the DSL can lead to surprising (read: bad) things happening, so consider keeping it simple. Commands -------- Puppetfile subcommands assume that the Puppetfile to operate on is in the current working directory and modules should be installed in the 'modules' directory relative to the current working directory. Install or update all modules in a given Puppetfile into ./modules) r10k puppetfile install Verify the Puppetfile syntax r10k puppetfile check Remove any modules in the 'modules' directory that are not specified in the Puppetfile: r10k puppetfile purge Global settings --------------- The following settings can be used to control how the Puppetfile installs and handles modules. ### forge The `forge` setting specifies which server that Forge based modules are fetched from. This declaration is only respected if [`forge.allow_puppetfile_override`](/dynamic-environments/configuration.mkd#allow_puppetfile_override) is set to true in the main `r10k.yaml`. Otherwise, use [`forge.baseurl`](/doc/dynamic-environments/configuration.mkd#baseurl) to globally configure where modules should be downloaded from. ### moduledir The `moduledir` setting specifies where modules from the Puppetfile will be installed. This defaults to the `modules` directory relative to the Puppetfile. If the path is absolute then the modules will be installed to that absolute path, otherwise it's assumed that the `moduledir` setting should be relative and the modules will be installed in that directory relative to the Puppetfile. The moduledir setting should be placed before any modules are declared. Install modules to an absolute path: ```ruby moduledir '/etc/puppet/modules' mod 'branan/eight_hundred' # will be installed into '/etc/puppet/modules/eight_hundred' ``` Install modules to a relative path: ```ruby moduledir 'thirdparty' mod 'branan/eight_hundred' # will be installed into `dirname /path/to/Puppetfile`/thirdparty/eight_hundred ``` **Note**: support for a relative moduledir was added in r10k 1.4.0; the behavior of a relative moduledir path is undefined on earlier versions of r10k. Module types ------------ r10k can install Puppet modules from a number of different sources. Right now modules can be installed from the Puppet Forge, Git, or SVN. ### Puppet Forge Modules can be installed from the Puppet Forge. If no version is specified the latest version available at the time will be installed, and will be kept at that version. mod 'puppetlabs/apache' If a version is specified then that version will be installed. mod 'puppetlabs/apache', '0.10.0' If the version is set to :latest then the module will be always updated to the latest version available. mod 'puppetlabs/apache', :latest An explicit type and/or version can be specified using the standard interface, `:type` and `:version`. The `:source` parameter is not supported for individual forge modules and will be ignored. mod 'puppetlabs/apache', type: 'forge', version: '6.0.0' ### Git Git repositories that contain a Puppet module can be cloned and used as modules. When Git is used, the module version can be specified by using `:ref`, `:tag`, `:commit`, `:branch`, or the standard interface parameter `:version`. When a module is installed using `:ref`, r10k uses some simple heuristics to determine the type of Git object that should be checked out. This can be used with a git commit, branch reference, or a tag. When a module is installed using `:tag` or `:commit`, r10k assumes that the given object is a tag or commit and can do some optimizations around fetching the object. If the tag or commit is already available r10k will skip network operations when updating the repo, which can speed up install times. When `:ref` is set to track `HEAD`, it will synchronize the module on each run. Module versions can also be specified using `:branch` to track a specific branch reference. In r10k 3.x the default branch was hardcoded to `master`; in 4.x that was removed. A `default_ref` can be specified in the r10k config to to mimic that old behavior, but it is recommended to set the ref on a per-module basis in the Puppetfile. Read [here](dynamic-environments/configuration.mkd#default_ref) for more info on the `default_ref` setting. #### Examples ```ruby # Install puppetlabs/apache and keep it up to date with 'master' mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache' # Install puppetlabs/apache and track the 'docs_experiment' branch mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :ref => 'docs_experiment' # Install puppetlabs/apache and pin to the '0.9.0' tag mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0' # Install puppetlabs/apache and pin to the '83401079' commit mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :commit => '83401079053dca11d61945bd9beef9ecf7576cbf' # Install puppetlabs/apache and track the 'docs_experiment' branch mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :branch => 'docs_experiment' # Install puppetlabs/apache and use standard interface parameters pinned to the # '2098a17' commit. mod 'puppetlabs-apache', type: 'git', source: 'https://github.com/puppetlabs/puppetlabs-apache', version: '2098a17' ``` #### Control Repo Branch Tracking Since r10k 2.4.0, the `:branch` option can be set to the special value `:control_branch` to indicate that the content should track a branch reference matching the containing control repo branch. For example, if a Puppetfile containing a Git content declaration is in the "testing" branch of a control repo, a setting of `:control_branch` will attempt to deploy that content from a "testing" branch of the content repo. Additionally, you can specify a `:default_branch` option which is the branch reference that content will be deployed from if the the given `:ref`, `:tag`, `:commit`, or `:branch` option cannot be resolved and deployed. If the desired content cannot be resolved and no default branch is given, or if the default branch can also not be resolved, an error will be logged and the content will not be deployed or updated. #### :control\_branch Examples ```ruby # Deploy content from branch matching control repo branch. mod 'hieradata', :git => 'git@git.example.com:organization/hieradata.git', :branch => :control_branch # Track control branch and fall-back to master if no matching branch. mod 'hieradata', :git => 'git@git.example.com:organization/hieradata.git', :branch => :control_branch, :default_branch => 'master' ``` ### SVN Modules can be installed via SVN. If no version is given, the module will track the latest version available in the main SVN repository. mod 'apache', :svn => 'https://github.com/puppetlabs/puppetlabs-apache/trunk' If an SVN revision number is specified with `:rev`, `:revision`, or `:version`, that SVN revision will be kept checked out. mod 'apache', :svn => 'https://github.com/puppetlabs/puppetlabs-apache/trunk', :rev => '154' mod 'apache', :svn => 'https://github.com/puppetlabs/puppetlabs-apache/trunk', :revision => '154' mod 'apache', type: 'svn', source: 'https://github.com/puppetlabs/puppetlabs-apache/trunk', version: '154' If the SVN repository requires credentials, you can supply the `:username` and `:password` options. mod 'apache', :svn => 'https://github.com/puppetlabs/puppetlabs-apache/trunk', :username => 'azurediamond', :password => 'hunter2' **Note**: SVN credentials are passed as command line options, so the SVN credentials may be visible in the process table when r10k is running. If you choose to supply SVN credentials make sure that the system running r10k is appropriately secured. ### Tarball Modules can be installed from tarball archives. A tarball module must specify a source URL to retreive the tarball content from. A tarball module may optionally specify a sha256 checksum as the module version. mod 'puppetlabs-apache', type: 'tarball', source: 'https://repo.example.com/puppet/modules/puppetlabs-apache-7.0.0.tar.gz', version: 'aedd6dc1a5136c6a1a1ec2f285df2a70b0fe4c9effb254b5a1f58116e4c1659e' # sha256 digest If no version is specified, a tarball will be downloaded from the given source and cached. The cache will not be invalidated until the source URL is changed, or a sha256 checksum version is provided. Tarball module content will be unpacked directly into an appropriately named module directory. For example, the puppetlabs-apache-7.0.0.tar.gz archive in the example above will be unpacked into `/modules/apache/`. ### Local In the event you want to store locally written modules in your r10k-managed repository in the Puppetfile managed path, you can use the `:local` type. For instance, if you have a Git repository with the following structure: ``` # tree -L 2 . ├── environment.conf ├── modules │   └── local_module └── Puppetfile 4 directories, 2 files ``` And you want to prevent `local_module` from being removed, you can add a 'local' module in your Puppetfile: ``` mod 'local_module', :local => true # Include your other modules as normal mod 'branan/eight_hundred' mod 'puppetlabs/apache' ``` If you run r10k against this Git branch, you'll get the following: ``` # tree -L 2 . ├── environment.conf ├── modules │   ├── apache │   ├── eight_hundred │   └── local_module └── Puppetfile 4 directories, 2 files ``` #### Caveats This is a workaround for r10k not being able to determine that modules created via VCS should not be purged, but is not meant to be a long term solution. The general practice around having local and remote modules in the same Git repository is to move modules versioned into a separate directory, like so: ``` # tree -L 2 . ├── environment.conf ├── site-modules │   └── local_module ├── modules │   ├── apache │   └── eight_hundred └── Puppetfile 4 directories, 2 files ``` Moving modules stored in the Git repository into a separate directory will remove the need to have Puppetfile entries for every locally versioned Puppet module. For more information see the [FAQ entry](faq.mkd#how-do-i-prevent-r10k-from-removing-modules-in-the-modules-directory-of-my-git-repository) on managing internal and external modules in the same directory. ### Per-Item spec dir deployment During deployment, r10k's default behavior is to delete the spec directory. The Puppetfile can modify this per module, overriding settings from the default r10k config. The following example sets the module to deploy the spec directory. ``` mod 'apache', :git => 'git@github.com:puppetlabs/puppetlabs-apache.git', :exclude_spec => false ``` ### Per-Item Install Path Git and SVN content types support installing into an alternate path without changing the value of moduledir by specifying an 'install\_path' option: ``` # This will install the 'apache' module into 'external/apache'. mod 'apache', :git => 'git@github.com:puppetlabs/puppetlabs-apache.git', :install_path => 'external' ``` The given 'install\_path' can be an absolute path or a path relative to the base of the environment. Note that r10k will exit with an error if you attempt to set the 'path' option to a directory outside of the environment. r10k-4.0.2/doc/updating-your-puppetfile.mkd000066400000000000000000000044011460033767200205250ustar00rootroot00000000000000Updating Your Puppetfile ======================== Over time, your Puppetfile may become stale and reference older versions of modules or miss dependencies for the modules. Your Puppetfile will require maintenance to keep it up to date. Manual Updates -------------- You can manually update your Puppetfile very easily. By visiting the module's homepage on the [Puppet Forge](https://forge.puppetlabs.com/), you can determine the new version of a module and update it: # Original mod 'puppetlabs/apache', '0.10.0' # New mod 'puppetlabs/apache', '1.0.0' When using a module directly from a git/svn repo, the `:tag` or `:ref` should be updated: # Original mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :tag => '0.10.0' # Original mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :tag => '1.0.0' Dependency tracking can be done on the Puppet Forge as well by looking at the Dependency tab (Ex: [puppetlabs/apache](https://forge.puppetlabs.com/puppetlabs/apache/dependencies) and visiting each module in turn, or examining `metadata.json` in non-forge modules. Automatic Updates ----------------- The manual update process is sufficient when updating a small number of modules for a specific effort. Automatic tooling is helpful when updating a lengthier number of modules and for scheduled updates. A number of tools have been provided by the Puppet user community to assist with this. You are encouraged to review each tool before using them, and use of these tools is at your own risk. * [ra10ke](https://rubygems.org/gems/ra10ke) ([project page](https://github.com/tampakrap/ra10ke/)) - A set of rake tasks to scan the Puppetfile for out of date modules * [puppetfile-updater](https://rubygems.org/gems/puppetfile-updater/) ([project page](https://github.com/camptocamp/puppetfile-updater)) - A set of rake tasks to scan the Puppetfile, find newer versions, update the Puppetfile, and commit the changes. * [generate-puppetfile](https://rubygems.org/gems/generate-puppetfile) ([project page](https://github.com/rnelson0/puppet-generate-puppetfile)) - A command line tool to generate raw Puppetfiles, update existing Puppetfiles, and optionally generate a `.fixtures.yml` file. r10k-4.0.2/integration/000077500000000000000000000000001460033767200146255ustar00rootroot00000000000000r10k-4.0.2/integration/Gemfile000066400000000000000000000013401460033767200161160ustar00rootroot00000000000000source ENV['GEM_SOURCE'] || 'https://artifactory.delivery.puppetlabs.net/artifactory/api/gems/rubygems/' def location_for(place, fake_version = nil) if place =~ /^(git:[^#]*)#(.*)/ [fake_version, { :git => $1, :branch => $2, :require => false }].compact elsif place =~ /^file:\/\/(.*)/ ['>= 0', { :path => File.expand_path($1), :require => false }] else [place, { :require => false }] end end gem 'beaker', *location_for(ENV['BEAKER_VERSION'] || '~> 4.5') gem 'beaker-pe', '~> 3.0' gem 'beaker-answers' gem 'beaker-hostgenerator', *location_for(ENV['BEAKER_HOSTGENERATOR_VERSION'] || '~> 1.1') gem 'beaker-abs', *location_for(ENV['BEAKER_ABS_VERSION'] || '~> 0.4') gem 'rototiller', '= 0.1.0' gem 'beaker-qa-i18n' r10k-4.0.2/integration/README.mkd000066400000000000000000000026351460033767200162650ustar00rootroot00000000000000Integration Tests =========================== This folder contains integration tests for the r10k project. These tests were originally written by the QA team at Puppet Labs and is actively maintained by the QA team. Feel free to contribute tests to this folder as long as they are written with [Beaker](https://github.com/puppetlabs/beaker) and follow the guidelines below. ## Integration? The r10k project already contains RSpec tests and you might be wondering why there is a need to have a set of tests separate from those tests. At Puppet Labs we define an "integration" test as: > Validating the system state and/or side effects while completing a complete life cycle of user stories using a > system. This type of test crosses the boundary of a discrete tool in the process of testing a defined user > objective that utilizes a system composed of integrated components. What this means for this project is that we will install and configure all infrastructure needed in a real-world r10k environment. ## Running Tests Included in this folder under the "test_run_scripts" sub-folder are simple Bash scripts that will run suites of Beaker tests. This scripts utilize environment variables for specifying test infrastructure. For security reasons we do not provide examples from the Puppet Labs testing environment. ## Documentation Each sub-folder contains a "README.mdk" that describes the content found in the sub-folder. r10k-4.0.2/integration/Rakefile000066400000000000000000000051301460033767200162710ustar00rootroot00000000000000require 'rototiller' require 'fileutils' namespace :ci do namespace :test do desc 'Tests at the component level for the pe-r10k project' task :component => [:check_pe_r10k_env_vars] do Rake::Task[:beaker].invoke end end end desc 'Run tests against a packaged PE build' task :acceptance do @acceptance_pre_suite = 'pre-suite' Rake::Task[:beaker].invoke end desc 'The acceptance tests for r10k, run in the beaker framework' rototiller_task :beaker => [:beaker_hostgenerator] do |t| common_setup = <<-EOS pre-suite/00_pe_install.rb, component/pre-suite/05_install_dev_r10k.rb, pre-suite/10_git_config.rb, pre-suite/20_pe_r10k.rb, EOS common_setup.gsub!("\n", '') flags = [ {:name => '--hosts', :default => 'configs/generated', :override_env => 'BEAKER_HOST'}, {:name => '--keyfile', :default => "#{ENV['HOME']}/.ssh/id_rsa-acceptance", :override_env => 'BEAKER_KEYFILE'}, {:name => '--load-path', :default => 'lib'}, {:name => '--pre-suite', :default => @acceptance_pre_suite || common_setup, :override_env => 'BEAKER_PRE_SUITE'}, {:name => '--tests', :default => 'tests', :override_env => 'BEAKER_TESTS'}, {:name => '--preserve-hosts', :default => 'onfail', :override_env => 'BEAKER_PRESERVE_HOSTS'}, ] t.add_flag(*flags) t.add_env do |env| env.name = 'PE_FAMILY' env.message = 'The puppet enterprise major branch to install from' end t.add_env do |env| env.name = 'pe_dist_dir' env.message = 'The location to download PE from, for example "https://artifactory.delivery.puppetlabs.net/artifactory/generic_enterprise__local/20XX.X/ci-ready"' ENV['pe_dist_dir'] ||= "https://artifactory.delivery.puppetlabs.net/artifactory/generic_enterprise__local/#{ENV['PE_FAMILY']}/ci-ready" end t.add_env do |env| env.name = 'GIT_PROVIDER' env.message = 'The git provider that r10k should use on a SUT' end t.add_command({:name => 'beaker --debug', :override_env => 'BEAKER_EXECUTABLE'}) end desc 'Generate a host configuration used by Beaker' rototiller_task :beaker_hostgenerator do |t| if ENV['BEAKER_HOST'].nil? FileUtils.mkdir_p 'configs' t.add_command do |c| c.name = 'beaker-hostgenerator' c.argument = '> configs/generated' end # This is a hack :( t.add_flag(:name => '', :default => 'centos7-64mdca-64.fa', :override_env => 'TEST_TARGET') t.add_flag(:name => '--global-config', :default => '{forge_host=forgeapi.puppet.com}', :override_env => 'BHG_GLOBAL_CONFIG') end end rototiller_task :check_pe_r10k_env_vars do |t| t.add_env(:name => 'SHA', :message => 'The sha for pe-r10k') end r10k-4.0.2/integration/component/000077500000000000000000000000001460033767200166275ustar00rootroot00000000000000r10k-4.0.2/integration/component/pre-suite/000077500000000000000000000000001460033767200205445ustar00rootroot00000000000000r10k-4.0.2/integration/component/pre-suite/05_install_dev_r10k.rb000066400000000000000000000007551460033767200245450ustar00rootroot00000000000000test_name "Install PE r10k" do step "Install PE r10k" do variant, version, arch, codename = master['platform'].to_array if variant == 'ubuntu' && version.split('.').first.to_i >= 18 on master, "echo 'Acquire::AllowInsecureRepositories \"true\";' > /etc/apt/apt.conf.d/90insecure" end install_dev_repos_on('pe-r10k', master, ENV['SHA'], '/tmp/repo_configs', {:dev_builds_url => 'http://builds.delivery.puppetlabs.net'}) master.install_package('pe-r10k') end end r10k-4.0.2/integration/files/000077500000000000000000000000001460033767200157275ustar00rootroot00000000000000r10k-4.0.2/integration/files/README.mkd000077500000000000000000000002101460033767200173550ustar00rootroot00000000000000Files =========================== This folder contains files used for testing the r10k project. The files are organized by test suite. r10k-4.0.2/integration/files/hiera.yaml000077500000000000000000000002101460033767200176770ustar00rootroot00000000000000--- :backends: - yaml :hierarchy: - "%{environment}" :yaml: :datadir: "/etc/puppetlabs/puppet/environments/%{environment}/hiera" r10k-4.0.2/integration/files/modules/000077500000000000000000000000001460033767200173775ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/helloworld/000077500000000000000000000000001460033767200215525ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/helloworld/manifests/000077500000000000000000000000001460033767200235435ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/helloworld/manifests/init.pp000077500000000000000000000001451460033767200250520ustar00rootroot00000000000000class helloworld { notify { "Hello world!": message => "I am in the ${environment} environment"} } r10k-4.0.2/integration/files/modules/hieratest/000077500000000000000000000000001460033767200213675ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/hieratest/manifests/000077500000000000000000000000001460033767200233605ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/hieratest/manifests/init.pp000077500000000000000000000001531460033767200246660ustar00rootroot00000000000000class hieratest ($hiera_message = "default text") { notify { "Hiera test!": message => $hiera_message} } r10k-4.0.2/integration/files/modules/unicode/000077500000000000000000000000001460033767200210255ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/unicode/files/000077500000000000000000000000001460033767200221275ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/unicode/files/pretend_unicode000077500000000000000000000000321460033767200252170ustar00rootroot00000000000000Ha ha ha! I am in Korean! r10k-4.0.2/integration/files/modules/unicode/manifests/000077500000000000000000000000001460033767200230165ustar00rootroot00000000000000r10k-4.0.2/integration/files/modules/unicode/manifests/init.pp000077500000000000000000000002211460033767200243200ustar00rootroot00000000000000class unicode { file { '/tmp/unicode.file': ensure => file, source => "puppet:///modules/unicode/굢챣샃뻧븣럩윕컾ë¾ê¹©" } } r10k-4.0.2/integration/files/pre-suite/000077500000000000000000000000001460033767200176445ustar00rootroot00000000000000r10k-4.0.2/integration/files/pre-suite/git_config.pp.erb000077500000000000000000000004761460033767200230760ustar00rootroot00000000000000$git_package = $facts['os']['family'] ? { 'Debian' => 'git-core', default => 'git' } class { 'git': package_name => $git_package } -> git::config { 'user.name': value => 'Tester', } -> git::config { 'user.email': value => 'tester@puppetlabs.com', } -> file { '<%= git_repo_path %>': ensure => directory } r10k-4.0.2/integration/files/pre-suite/prod_env.config000077500000000000000000000001731460033767200226530ustar00rootroot00000000000000# Puppet Enterprise requires $basemodulepath modulepath = site:dist:modules:$basemodulepath manifest = manifests/site.pp r10k-4.0.2/integration/files/r10k_conf.yaml.erb000077500000000000000000000003411460033767200211450ustar00rootroot00000000000000cachedir: '/var/cache/r10k' git: provider: '<%= git_provider %>' sources: <% for source in sources %> <%= source.repo_name %>: basedir: "<%= env_path %>" remote: "<%= source.control_remote %>" <% end %> r10k-4.0.2/integration/lib/000077500000000000000000000000001460033767200153735ustar00rootroot00000000000000r10k-4.0.2/integration/lib/README.mkd000077500000000000000000000001261460033767200170270ustar00rootroot00000000000000Lib =========================== This folder contains Ruby files that extends Beaker. r10k-4.0.2/integration/lib/git_utils.rb000077500000000000000000000134401460033767200177300ustar00rootroot00000000000000# Execute a git command on a host. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +git_sub_command+ - The git sub-command to execute including arguments. (The 'git' command is assumed.) # * +git_repo_path+ - The path to the git repository on the target host. # * +opts+ - Options to alter execution. # # ==== Returns # # +nil+ # # ==== Examples # # git_on(master, 'add file.txt', '~/git_repo') def git_on(host, git_sub_command, git_repo_path, opts = {}) git_command = "git --git-dir=#{git_repo_path}/.git --work-tree=#{git_repo_path} #{git_sub_command}" on(host, git_command, opts) end # Add all uncommitted files located in a repository. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # git_add_everything(master, '~/git_repo') def git_add_everything(host, git_repo_path) git_on(host, "add #{git_repo_path}/*", git_repo_path) end # Push branch to origin remote. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +branch+ - The branch to push. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # git_push(master, 'production', '~/git_repo') def git_push(host, branch, git_repo_path) git_on(host, "push origin #{branch}", git_repo_path) end # Commit changes and push branch to origin remote. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +branch+ - The branch to push. # * +message+ - A single-line commit message. (Don't quote message!) # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # git_commit_push(master, 'production', 'Awesome stuff!', '~/git_repo') def git_commit_push(host, branch, message, git_repo_path) git_on(host, "commit -m \"#{message}\"", git_repo_path) git_push(host, branch, git_repo_path) end # Add all uncommitted files located in a repository, commit changes and push branch to origin remote. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +branch+ - The branch to push. # * +message+ - A single-line commit message. (Don't quote message!) # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # git_add_commit_push(master, 'production', 'Awesome stuff!', '~/git_repo') def git_add_commit_push(host, branch, message, git_repo_path) git_add_everything(host, git_repo_path) git_commit_push(host, branch, message, git_repo_path) end # Get the last commit SHA. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +string+ - The SHA of the last commit. # # ==== Examples # # last_commit = git_last_commit(master, '~/git_repo') def git_last_commit(host, git_repo_path) sha_regex = /commit (\w{40})/ return sha_regex.match(git_on(host, 'log', git_repo_path).stdout)[1] end # Hard reset the git repository to a specific commit. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +commit_sha+ - The reset HEAD to this commit SHA. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # git_reset_hard(master, 'ff81c01c5', '~/git_repo') def git_reset_hard(host, commit_sha, git_repo_path) git_on(host, "reset --hard #{commit_sha}", git_repo_path) end # Create a bare Git repository. # # ==== Attributes # # * +host+ - The Puppet host on which to create a bare git repo. # * +git_repo_parent_path+ - The parent path that contains the desired Git repository. # * +git_repo_name+ - The name of the repository. # # ==== Returns # # +string+ - The path to the newly created Git repository. # # ==== Examples # # git_init_bare_repo(master, '/git/repos', 'environments') def git_init_bare_repo(host, git_repo_parent_path, git_repo_name) #Init git_repo_path = File.join(git_repo_parent_path, "#{git_repo_name}.git") #Initialize bare Git repository on(host, "mkdir -p #{git_repo_path}") on(host, "git init --bare #{git_repo_path}") return git_repo_path end # Clone a Git repository. # # ==== Attributes # # * +host+ - The Puppet host on which to create a bare git repo. # * +git_clone_path+ - The destination path for the git clone. # * +git_source+ - The origin from which to clone. # # ==== Returns # # +nil+ # # ==== Examples # # git_clone_repo(master, '~/repos/r10k', '/git/repos/environments.git') def git_clone_repo(host, git_clone_path, git_source) on(host, "git clone #{git_source} #{git_clone_path}") end # Create a bare Git repository and then clone the repository. # # ==== Attributes # # * +host+ - The Puppet host on which to create a bare git repo. # * +git_repo_parent_path+ - The parent path that contains the desired Git repository. # * +git_repo_name+ - The name of the repository. # * +git_clone_path+ - The destination path for the git clone. # # ==== Returns # # +string+ - The path to the newly created Git repository. # # ==== Examples # # git_init_bare_repo_and_clone(master, '/git/repos', 'environments', '~/repos/r10k') def git_init_bare_repo_and_clone(host, git_repo_parent_path, git_repo_name, git_clone_path) origin_git_repo_path = git_init_bare_repo(host, git_repo_parent_path, git_repo_name) git_clone_repo(host, git_clone_path, origin_git_repo_path) end r10k-4.0.2/integration/lib/master_manipulator.rb000077500000000000000000000130351460033767200216330ustar00rootroot00000000000000# Create a "site.pp" file with file bucket enabled. Also, allow # the creation of a custom node definition or use the 'default' # node definition. # # ==== Attributes # # * +master_certname+ - Certificate name of Puppet master. # * +manifest+ - A Puppet manifest to inject into the node definition. # * +node_def_name+ - A node definition pattern or name. # # ==== Returns # # +string+ - A combined manifest with node definition containing input manifest # # ==== Examples # # site_pp = create_site_pp("puppetmaster", '', node_def_name='agent') def create_site_pp(master_certname, manifest='', node_def_name='default') default_def = <<-MANIFEST node default { } MANIFEST node_def = <<-MANIFEST node #{node_def_name} { #{manifest} } MANIFEST if node_def_name != 'default' node_def = "#{default_def}\n#{node_def}" end site_pp = <<-MANIFEST filebucket { 'main': server => '#{master_certname}', path => false, } File { backup => 'main' } #{node_def} MANIFEST return site_pp end # Read a Puppet manifest file and inject the content into a # "default" node definition. (Used mostly to overide site.pp) # # ==== Attributes # # * +manifest_path+ - The file path to target manifest. # * +master_certname+ - Certificate name of Puppet master. # # ==== Returns # # +string+ - A combined manifest with node definition containg input manifest # # ==== Examples # # site_pp = create_node_manifest("/tmp/test.pp", "master") def create_node_manifest(manifest_path, master_certname, node_def_name='default') manifest = File.read(manifest_path) site_pp = <<-MANIFEST filebucket { 'main': server => '#{master_certname}', path => false, } File { backup => 'main' } node default { #{manifest} } MANIFEST return site_pp end # Set mode, owner and group on a remote path. # # ==== Attributes # # * +host+ - The remote host containing the target path. # * +path+ - The path to set mode, user and group upon. # * +mode+ - The desired mode to set on the path in as a string. # * +owner+ - The owner to set on the path. (Puppet user if not specified.) # * +group+ - The group to set on the path. (Puppet group if not specified.) # # ==== Returns # # nil # # ==== Examples # # set_perms_on_remote(master, "/tmp/test/site.pp", "777") def set_perms_on_remote(host, path, mode, owner=nil, group=nil) if (owner.nil?) owner = on(host, puppet('config', 'print', 'user')).stdout.rstrip end if (group.nil?) group = on(host, puppet('config', 'print', 'group')).stdout.rstrip end on(host, "chmod -R #{mode} #{path}") on(host, "chown -R #{owner}:#{group} #{path}") end # Inject temporary "site.pp" onto target host. This will also create # a "modules" folder in the target remote directory. # # ==== Attributes # # * +master+ - The target master for injection. # * +site_pp_path+ - A path on the remote host into which the site.pp will be injected. # * +manifest+ - The manifest content to inject into "site.pp" to the host target path. # # ==== Returns # # nil # # ==== Examples # # site_pp = inject_site_pp(master, "/tmp/test/site.pp", manifest) def inject_site_pp(master, site_pp_path, manifest) site_pp_dir = File.dirname(site_pp_path) create_remote_file(master, site_pp_path, manifest) set_perms_on_remote(master, site_pp_dir, "777") end # Create a temporary directory environment and inject a "site.pp" for the target environment. # # ==== Attributes # # * +master+ - The master on which to create a new Puppet environment. # * +env_root_path+ - The base path on the master that contains all environments. # * +env_seed_name+ - The seed name to use for generating an environment name. # * +manifest+ - The manifest content to inject into "site.pp" of the newly created environment. # # ==== Returns # # +string+ - The environment name that was generated. # # ==== Examples # # temp_env_name = create_temp_dir_env(master, "/tmp/test/site.pp", "stuff", manifest) def create_temp_dir_env(master, env_root_path, env_seed_name, manifest) env_name = "#{env_seed_name}" + rand(36**16).to_s(36) env_path = "#{env_root_path}/#{env_name}" env_site_pp_path = "#{env_path}/manifests/site.pp" on(master, "mkdir -p #{env_path}/manifests #{env_path}/modules") set_perms_on_remote(master, env_path, "777") inject_site_pp(master, env_site_pp_path, manifest) return env_name end # Restart the puppet server and wait for it to come back up # ==== Attributes # *+host+ - the host that this should operate on # *+opts+ - an options hash - not required # *+:timeout+ - the amount of time in seconds to wait for success # *+:frequency+ - The time to wait between retries # # Raises a standard error if the wait is uncessfull # # ==== Example # restart_puppet_server(master) # restart_puppet_server(master, {:time_out => 200, :frequency => 10}) def restart_puppet_server(host, opts = {}) on(host, "puppet resource service pe-puppetserver ensure=stopped") on(host, "puppet resource service pe-puppetserver ensure=running") masterHostName = on(host, "hostname").stdout.chomp opts[:time_out] ||= 100 opts[:frequency] ||= 5 i = 0 # -k to ignore HTTPS error that isn't relevant to us curl_call = "-I -k https://#{masterHostName}:8140/production/certificate_statuses/all" while i < opts[:time_out] do sleep opts[:frequency] i += 1 exit_code = curl_on(host, curl_call, :acceptable_exit_codes => [0,1,7]).exit_code # Exit code 7 is "connection refused" if exit_code != '7' sleep 20 puts 'Restarting the Puppet Server was successful!' return end end raise StandardError, 'Attempting to restart the puppet server was not successful in the time alloted.' end r10k-4.0.2/integration/lib/r10k_utils.rb000077500000000000000000000164521460033767200177300ustar00rootroot00000000000000require 'git_utils' # Retrieve the file path for the "r10k.yaml" configuration file. # # ==== Attributes # # * +master+ - The Puppet master on which r10k is installed. # # ==== Returns # # +string+ - Absolute file path to "r10k.yaml" config file. # # ==== Examples # # get_r10k_config_file_path(master) def get_r10k_config_file_path(master) confdir = on(master, puppet('config print confdir')).stdout.rstrip return File.join(File.dirname(confdir), 'r10k', 'r10k.yaml') end # Verify that a pristine "production" environment exists on the master. # (And only the "production" environment!) # # ==== Attributes # # * +master+ - The Puppet master on which to verify the "production" environment. # # ==== Returns # # +nil+ # # ==== Examples # # verify_production_environment(master) def verify_production_environment(master) environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_md5sum_path = File.join(environment_path, 'production', 'manifests', '.site_pp.md5') #Verify MD5 sum of "site.pp" on(master, "md5sum -c #{prod_env_md5sum_path}") #Verify that "production" is the only environment available. on(master, "test `ls #{environment_path} | wc -l` -eq 1") on(master, "ls #{environment_path} | grep \"production\"") end # Revert the Puppet environments back to a pristine 'production' branch while deleting all other branches. # # ==== Attributes # # * +host+ - One or more hosts to act upon, or a role (String or Symbol) that identifies one or more hosts. # * +commit_sha+ - The reset 'production' branch HEAD to this commit SHA. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # r10k_revert_environment(master, 'ff81c01c5', '~/git_repo') def r10k_revert_environment(host, commit_sha, git_repo_path) #Reset 'production' branch to know clean state. git_on(host, 'checkout production', git_repo_path) git_reset_hard(host, commit_sha, git_repo_path) #Get all branches except for 'production'. local_branches = git_on(host, 'branch | grep -v "production" | xargs', git_repo_path).stdout() #Delete all other branches except for 'production' locally and remotely. if local_branches != "\n" git_on(host, "branch -D #{local_branches}", git_repo_path) end #Force push changes to remote. git_on(host, 'push origin --mirror --force', git_repo_path) git_on(host, 'push origin --mirror --force', git_repo_path) #Remove r10k cache cachedir = '/var/cache/r10k' on(master, "rm -rf #{cachedir}") end # Clean-up the r10k environment on the master to bring it back to a known good state. # # ==== Attributes # # * +host+ - The Puppet master on which to verify the "production" environment. # * +commit_sha+ - The reset HEAD to this commit SHA. # * +git_repo_path+ - The path to the git repository on the target host. # # ==== Returns # # +nil+ # # ==== Examples # # clean_up_r10k(master, 'ff81c01c5', '~/git_repo') def clean_up_r10k(master, commit_sha, git_repo_path) environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_modules_path = File.join(environment_path, 'production', 'modules') prod_env_site_path = File.join(environment_path, 'production', 'site') r10k_fqp = get_r10k_fqp(master) step 'Reset Git Repo to Known Good State' r10k_revert_environment(master, commit_sha, git_repo_path) # RK-297 workaround. Without this, tests will fail with an error like the following: # [2017-06-02 11:11:46 - ERROR] Object not found - no match for id (60e4ea82c9fdf86974a13f78b839a497325de04b) # This cleanup should not be necessary when RK-297 has been resolved. # step 'Remove git directories from codedir to prevent cache errors' on(master, "find #{environment_path } -name .git -type d -print0 | xargs -r0 -- rm -r") step 'Restore Original "production" Environment' on(master, "#{r10k_fqp} deploy environment -v") step 'Verify "production" Environment is at Original State' verify_production_environment(master) step 'Remove Any Modules from the "production" Environment' on(master, "rm -rf #{prod_env_modules_path}/*") on(master, "rm -rf #{prod_env_site_path}/*") end # Create a new r10k Git source that is copied from the current "production" environment. # # ==== Attributes # # * +master+ - The Puppet master on which to create a new Git source. # * +git_repo_parent_path+ - The parent path that contains the desired Git repository. # * +git_repo_name+ - The name of the repository. # * +git_clone_path+ - The destination path for the git clone. # * +env_name+ - The initial branch name (environment) for first commit. # * +deploy?+ - A flag indicating if r10k environment deployment should be kicked off after cloning. # # ==== Returns # # +string+ - The path to the newly created Git repository. # # ==== Examples # # init_r10k_source_from_prod(master, '/git/repos', 'environments', '~/repos/r10k', 'test', deploy=true) def init_r10k_source_from_prod(master, git_repo_parent_path, git_repo_name, git_clone_path, env_name, deploy=false) #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip prod_env_path = File.join(env_path, 'production') r10k_fqp = get_r10k_fqp(master) local_files_root_path = ENV['FILES'] || 'files' prod_env_config_path = 'pre-suite/prod_env.config' prod_env_config = File.read(File.join(local_files_root_path, prod_env_config_path)) #Create Git origin repo and clone. git_init_bare_repo_and_clone(master, git_repo_parent_path, git_repo_name, git_clone_path) #Copy current contents of production environment to the git clone path on(master, "cp -r #{prod_env_path}/* #{git_clone_path}") #Create hidden files in the "site" and "modules" folders so that git copies the directories. on(master, "mkdir -p #{git_clone_path}/modules #{git_clone_path}/site") on(master, "touch #{git_clone_path}/modules/.keep;touch #{git_clone_path}/site/.keep") #Create MD5 sum file for the "site.pp" file. on(master, "md5sum #{git_clone_path}/manifests/site.pp > #{git_clone_path}/manifests/.site_pp.md5") #Add environment config that specifies module lookup path for production. create_remote_file(master, "#{git_clone_path}/environment.conf", prod_env_config) git_on(master, "add #{git_clone_path}/*", git_clone_path) git_on(master, "commit -m \"Add #{env_name} environment.\"", git_clone_path) git_on(master, "branch -m #{env_name}", git_clone_path) git_on(master, "push -u origin #{env_name}", git_clone_path) #Attempt to deploy environments. if deploy on(master, "#{r10k_fqp} deploy environment -v") end end # Get the fully qualified path for r10k. # # ==== Arguments # # * +host+ - The Beaker host to introspect # # ==== Returns # # +string+ - The fully qualified path for r10k # # ==== Examples # # get_r10k_fqp(master) def get_r10k_fqp(host) puppet_version = get_puppet_version(host) if puppet_version < 4.0 fqp = '/opt/puppet/bin/r10k' else fqp = '/opt/puppetlabs/puppet/bin/r10k' end fqp end # Get the version of puppet that is installed on a host. # # ==== Arguments # # * +host+ - The Beaker host to introspect # # ==== Returns # # +float+ - the version of puppet as a float # # ==== Examples # # get_puppet_version(master) def get_puppet_version(host) on(host, puppet('--version')) do |result| @version = result.stdout.match(/(\d){1}.(\d){1,2}.(\d){1,2}/)[0].to_f end @version end r10k-4.0.2/integration/manifests/000077500000000000000000000000001460033767200166165ustar00rootroot00000000000000r10k-4.0.2/integration/manifests/README.mkd000077500000000000000000000002241460033767200202510ustar00rootroot00000000000000Manifests =========================== This folder contains manifests used for testing the r10k project. The manifests are organized by test suite. r10k-4.0.2/integration/pre-suite/000077500000000000000000000000001460033767200165425ustar00rootroot00000000000000r10k-4.0.2/integration/pre-suite/00_pe_install.rb000077500000000000000000000003571460033767200215300ustar00rootroot00000000000000require 'beaker-pe' test_name 'CODEMGMT-20 - C48 - Install Puppet Enterprise' step 'Install PE' install_pe step 'Stop puppet service to avoid running into existing agent runs' on(hosts, puppet('resource service puppet ensure=stopped')) r10k-4.0.2/integration/pre-suite/10_git_config.rb000077500000000000000000000034211460033767200215020ustar00rootroot00000000000000require 'erb' require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-20 - C59120 - Install and Configure Git for r10k' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip prod_env_path = File.join(env_path, 'production') git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_control_remote_head_path = File.join(git_control_remote, 'HEAD') git_environments_path = File.join('/root', git_repo_name) local_files_root_path = ENV['FILES'] || 'files' git_manifest_template_path = File.join(local_files_root_path, 'pre-suite', 'git_config.pp.erb') git_manifest = ERB.new(File.read(git_manifest_template_path)).result(binding) step 'Get PE Version' pe_version = get_puppet_version(master) fail_test('This pre-suite requires PE 3.7 or above!') if pe_version < 3.7 #Setup step 'Read module path' on(master, puppet('config print basemodulepath')) do |result| (result.stdout.include? ':') ? separator = ':' : separator = ';' @module_path = result.stdout.split(separator).first end step 'Install "git" Module' on(master, puppet("module install puppetlabs-git --modulepath #{@module_path}")) step 'Install and Configure Git' on(master, puppet('apply'), :stdin => git_manifest, :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Create "production" Environment on Git' init_r10k_source_from_prod(master, git_repo_path, git_repo_name, git_environments_path, 'production') step 'Change Default Branch to "production" on Git Control Remote' create_remote_file(master, git_control_remote_head_path, "ref: refs/heads/production\n") on(master, "chmod 644 #{git_control_remote_head_path}") r10k-4.0.2/integration/pre-suite/20_pe_r10k.rb000077500000000000000000000031531460033767200206360ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-21 - C59119 - Configure r10k for Puppet Enterprise' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip prod_env_path = File.join(env_path, 'production') r10k_config_path = get_r10k_config_file_path(master) git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) step 'Get PE Version' pe_version = get_puppet_version(master) fail_test('This pre-suite requires PE 3.7 or above!') if pe_version < 3.7 #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Setup step 'Remove Current Puppet "production" Environment' on(master, "rm -rf #{prod_env_path}") step 'Configure r10k' create_remote_file(master, r10k_config_path, r10k_conf) on(master, "chmod 644 #{r10k_config_path}") step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") step 'Disable Environment Caching on Master' on(master, puppet('config set environment_timeout 0 --section main')) #This should be temporary until we get a better solution. step 'Disable Node Classifier' on(master, puppet('config', 'set node_terminus plain', '--section master')) step 'Restart the Puppet Server Service' restart_puppet_server(master) step 'Run Puppet Agent on All Nodes' on(agents, puppet('agent', '--test', '--environment production')) r10k-4.0.2/integration/pre-suite/README.mkd000077500000000000000000000002651460033767200202020ustar00rootroot00000000000000Pre-suite =========================== This folder contains the [Beaker](https://github.com/puppetlabs/beaker) pre-suite tasks that configures test test infrastructure for testing. r10k-4.0.2/integration/tests/000077500000000000000000000000001460033767200157675ustar00rootroot00000000000000r10k-4.0.2/integration/tests/Puppetfile/000077500000000000000000000000001460033767200201045ustar00rootroot00000000000000r10k-4.0.2/integration/tests/Puppetfile/HTTP_PROXY_affects_forge_source.rb000066400000000000000000000041251460033767200264500ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-730 - C97981 - HTTP_PROXY affects git source in puppetfile' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" puppetfile =<<-EOS mod 'puppetlabs/apache', '0.10.0' EOS proxy_env_value = 'http://iloveferrits.net:3219' #In-line files r10k_conf = <<-CONF proxy: 'http://ilovedogs.com:3128' cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" forge: proxy: "http://ilovecats.net:3128" CONF teardown do master.clear_env_var('HTTP_PROXY') step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'cleanup r10k' clean_up_r10k(master, last_commit, git_environments_path) end master.add_env_var('HTTP_PROXY', proxy_env_value) step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy Puppetfile to "production" Environment Git Repo' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile) step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) #test on(master, "#{r10k_fqp} deploy environment -p", :accept_all_exit_codes => true) do |r| regex = /using proxy http:\/\/ilovecats\.net:3128/i assert(r.exit_code == 1, 'expected error code was not observed') assert_match(regex, r.stderr, 'The expected error message was not observed' ) end r10k-4.0.2/integration/tests/Puppetfile/HTTP_PROXY_affects_git_source.rb000066400000000000000000000042551460033767200261350ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-730 - C97982 - HTTP_PROXY affects git source in puppetfile' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" puppetfile =<<-EOS mod 'motd', :git => 'https://github.com/puppetlabs/puppetlabs-motd', :branch => 'main' EOS proxy_env_value = 'http://ferritsarebest.net:3219' #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF teardown do master.clear_env_var('HTTPS_PROXY') step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'cleanup r10k' clean_up_r10k(master, last_commit, git_environments_path) end master.add_env_var('HTTPS_PROXY', proxy_env_value) step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy Puppetfile to "production" Environment Git Repo' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile) step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) #test on(master, "#{r10k_fqp} deploy environment -p", :accept_all_exit_codes => true) do |r| # Rugged as of 0.28 has a different error message than shellgit regex = /((failed to resolve address for)|(Could not resolve proxy:)) ferritsarebest\.net/ assert(r.exit_code == 1, 'expected error code was not observed') assert_match(regex, r.stderr, 'The expected error message was not observed' ) end r10k-4.0.2/integration/tests/README.mkd000077500000000000000000000002121460033767200174170ustar00rootroot00000000000000Tests =========================== This folder contains the [Beaker](https://github.com/puppetlabs/beaker) test files organized by suite. r10k-4.0.2/integration/tests/basic_functionality/000077500000000000000000000000001460033767200220205ustar00rootroot00000000000000r10k-4.0.2/integration/tests/basic_functionality/basic_deployment.rb000066400000000000000000000166211460033767200256740ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'Basic Environment Deployment Workflows' # This isn't a block because we want to use the local variables throughout the file step 'init' @env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) control_repo_gitdir = '/git_repos/environments.git' control_repo_worktree = '/root/environments' last_commit = git_last_commit(master, control_repo_worktree) git_provider = ENV['GIT_PROVIDER'] config_path = get_r10k_config_file_path(master) config_backup_path = "#{config_path}.bak" puppetfile1 =<<-EOS mod 'puppetlabs/apache', '0.10.0' mod 'puppetlabs/stdlib', '8.0.0' EOS r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{@env_path}" remote: "#{control_repo_gitdir}" deploy: purge_levels: ['deployment','environment','puppetfile'] CONF def and_stdlib_is_correct metadata_path = "#{@env_path}/production/modules/stdlib/metadata.json" on(master, "test -f #{metadata_path}", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'stdlib content has been inappropriately purged') end metadata_info = JSON.parse(on(master, "cat #{metadata_path}").stdout) assert(metadata_info['version'] == '8.0.0', 'stdlib deployed to wrong version') end teardown do on(master, "mv #{config_backup_path} #{config_path}") clean_up_r10k(master, last_commit, control_repo_worktree) end step 'Set up r10k and control repo' do # Backup and replace r10k config on(master, "mv #{config_path} #{config_backup_path}") create_remote_file(master, config_path, r10k_conf) # Place our Puppetfile in the control repo's production branch git_on(master, 'checkout production', control_repo_worktree) create_remote_file(master, "#{control_repo_worktree}/Puppetfile", puppetfile1) git_add_commit_push(master, 'production', 'add Puppetfile for Basic Deployment test', control_repo_worktree) # Ensure the production environment will be deployed anew on(master, "rm -rf #{@env_path}/production") end test_path = "#{@env_path}/production/modules/apache/metadata.json" step 'Test initial environment deploy works' do on(master, "#{r10k_fqp} deploy environment production --verbose=info") do |result| assert(result.output =~ /.*Deploying module to .*apache.*/, 'Did not log apache deployment') assert(result.output =~ /.*Deploying module to .*stdlib.*/, 'Did not log stdlib deployment') end on(master, "test -f #{test_path}", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'Expected module in Puppetfile was not installed') end and_stdlib_is_correct end original_apache_info = JSON.parse(on(master, "cat #{test_path}").stdout) step 'Test second run of deploy updates control repo, but leaves moduledir untouched' do puppetfile2 =<<-EOS # Current latest of apache is 6.5.1 as of writing this test mod 'puppetlabs/apache', :latest mod 'puppetlabs/stdlib', '8.0.0' mod 'puppetlabs/concat', '7.0.0' EOS git_on(master, 'checkout production', control_repo_worktree) create_remote_file(master, "#{control_repo_worktree}/Puppetfile", puppetfile2) git_add_commit_push(master, 'production', 'add Puppetfile for Basic Deployment test', control_repo_worktree) on(master, "#{r10k_fqp} deploy environment production --verbose=info") do |result| refute(result.output =~ /.*Deploying module to .*apache.*/, 'Inappropriately updated apache') refute(result.output =~ /.*Deploying module to .*stdlib.*/, 'Inappropriately updated stdlib') end on(master, "test -f #{test_path}", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'Expected module content in Puppetfile was inappropriately purged') end new_apache_info = JSON.parse(on(master, "cat #{test_path}").stdout) on(master, "cat #{@env_path}/production/Puppetfile | grep ':latest'", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'Puppetfile not updated on subsequent r10k deploys') end assert(original_apache_info['version'] == new_apache_info['version'] && new_apache_info['version'] == '0.10.0', 'Module content updated on subsequent r10k invocations w/o providing --modules') on(master, "test -f #{@env_path}/production/modules/concat/metadata.json", accept_all_exit_codes: true) do |result| assert(result.exit_code == 1, 'Module content deployed on subsequent r10k invocation w/o providing --modules') end and_stdlib_is_correct end step 'Test --modules updates modules' do on(master, "#{r10k_fqp} deploy environment production --modules --verbose=info") do |result| assert(result.output =~ /.*Deploying module to .*apache.*/, 'Did not log apache deployment') assert(result.output =~ /.*Deploying module to .*stdlib.*/, 'Did not log stdlib deployment') assert(result.output =~ /.*Deploying module to .*concat.*/, 'Did not log concat deployment') end on(master, "test -f #{test_path}", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'Expected module content in Puppetfile was inappropriately purged') end on(master, "test -f #{@env_path}/production/modules/concat/metadata.json", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'New module content was not deployed when providing --modules') end new_apache_info = JSON.parse(on(master, "cat #{test_path}").stdout) apache_major_version = new_apache_info['version'].split('.').first.to_i assert(apache_major_version > 5, 'Module not updated correctly using --modules') and_stdlib_is_correct end step 'Test --modules --incremental deploys changed & dynamic modules, but not unchanged, static modules' do puppetfile3 =<<-EOS # Current latest of apache is 6.5.1 as of writing this test mod 'puppetlabs/apache', :latest mod 'puppetlabs/stdlib', '8.0.0' mod 'puppetlabs/concat', '7.1.0' EOS git_on(master, 'checkout production', control_repo_worktree) create_remote_file(master, "#{control_repo_worktree}/Puppetfile", puppetfile3) git_add_commit_push(master, 'production', 'add Puppetfile for Basic Deployment test', control_repo_worktree) on(master, "#{r10k_fqp} deploy environment production --modules --incremental --verbose=debug1") do |result| assert(result.output =~ /.*Deploying module to .*apache.*/, 'Did not log apache deployment') assert(result.output =~ /.*Deploying module to .*concat.*/, 'Did not log concat deployment') assert(result.output =~ /.*Not updating module stdlib, assuming content unchanged.*/, 'Did not log notice of skipping stdlib') end on(master, "test -f #{test_path}", accept_all_exit_codes: true) do |result| assert(result.exit_code == 0, 'Expected module content in Puppetfile was inappropriately purged') end new_apache_info = JSON.parse(on(master, "cat #{test_path}").stdout) apache_major_version = new_apache_info['version'].split('.').first.to_i assert(apache_major_version > 5, 'Module not updated correctly using --modules & --incremental') concat_info = JSON.parse(on(master, "cat #{@env_path}/production/modules/concat/metadata.json").stdout) concat_minor_version = concat_info['version'].split('.')[1].to_i assert(concat_minor_version == 1, 'Module not updated correctly using --modules & --incremental') and_stdlib_is_correct end r10k-4.0.2/integration/tests/basic_functionality/negative/000077500000000000000000000000001460033767200236225ustar00rootroot00000000000000r10k-4.0.2/integration/tests/basic_functionality/negative/neg_deploy_with_invalid_r10k_yaml.rb000077500000000000000000000026401460033767200327210ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-84 - C59271 - Attempt to Deploy with Invalid r10k Config' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_repo_path = '/git_repos' git_control_remote = File.join(git_repo_path, 'environments.git') git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: dir: "#{env_path}" remote: "#{git_control_remote}" CONF #Verification if get_puppet_version(master) < 4.0 error_message_regex = /ERROR.*can\'t\ convert\ nil\ into\ String/ else error_message_regex = /ERROR.* -> no implicit conversion of nil into String/ end #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup a Valid "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/basic_functionality/negative/neg_deploy_with_missing_r10k_yaml.rb000066400000000000000000000015751460033767200327470ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-84 - C59270 - Attempt to Deploy with Missing r10k Configuration File' #Init r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" r10k_fqp = get_r10k_fqp(master) #Verification error_message_regex = /No configuration file given, no config file found in current directory, and no global config present/ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 8) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/basic_functionality/negative/neg_invalid_git_provider.rb000077500000000000000000000022751460033767200312140ustar00rootroot00000000000000test_name 'CODEMGMT-137 - C64161 - Specify Invalid Value for Git Provider' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_repo_path = '/git_repos' git_control_remote = File.join(git_repo_path, 'environments.git') git_provider = 'invalid' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /should be one of.*not .invalid./ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup a Valid "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 8) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/basic_functionality/negative/negative_bad_proxy.rb000066400000000000000000000020031460033767200300130ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-110 - C88671 - Specify a bad proxy to r10k' confine(:to, :platform => ['el', 'sles']) #Init master_platform = fact_on(master, 'os.family') r10k_fqp = get_r10k_fqp(master) #Verification proxy_hostname = "http://notarealhostname:3128" error_regex = /Unable to connect to.*#{proxy_hostname}/i #Teardown teardown do step 'Remove puppetfile' on(master, 'rm -rf modules/') on(master, 'rm Puppetfile') end step 'turn off the firewall' on(master, puppet("apply -e 'service {'iptables' : ensure => stopped}'")) #Tests step 'make a puppetfile' create_remote_file(master, "Puppetfile", 'mod "puppetlabs/motd"') step 'Use a r10k puppetfile' on(master, "#{r10k_fqp} puppetfile install", {:acceptable_exit_codes => [0,1,2], :environment => {"http_proxy" => proxy_hostname}}) do |result| assert(result.exit_code == 1, 'The expected exit code was not observed.') assert_match(error_regex, result.stderr, 'Did not see the expected error') end r10k-4.0.2/integration/tests/basic_functionality/proxy_specified_in_configuration.rb000066400000000000000000000057151460033767200311660ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-110 - C87652 - Specify the proxy in the r10k.yaml' confine(:to, :platform => ['el', 'sles']) #Init master_platform = fact_on(master, 'os.family') env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] local_files_root_path = ENV['FILES'] || 'files' git_manifest_template_path = File.join(local_files_root_path, 'pre-suite', 'git_config.pp.erb') git_manifest = ERB.new(File.read(git_manifest_template_path)).result(binding) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" case master_platform when 'RedHat' pkg_manager = 'yum' when 'Suse' pkg_manager = 'zypper' end install_squid = "#{pkg_manager} install -y squid" remove_squid = "#{pkg_manager} remove -y squid" squid_log = "/var/log/squid/access.log" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" forge: proxy: "http://#{master.hostname}:3128" CONF #Verification squid_log_regex = /CONNECT forgeapi.puppet(labs)?.com:443/ #Teardown teardown do step 'Restore "git" Package' on(master, puppet('apply'), :stdin => git_manifest, :acceptable_exit_codes => [0,2]) step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) step 'Remove Squid' on(master, puppet("apply -e 'service {'squid' : ensure => stopped}'")) on(master, remove_squid) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy Puppetfile to "production" Environment Git Repo' create_remote_file(master, "#{git_environments_path}/Puppetfile", 'mod "puppetlabs/motd"') step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) step 'Install and configure squid proxy' on(master, install_squid) step 'turn off the firewall' on(master, puppet("apply -e 'service {'iptables' : ensure => stopped}'")) step 'start squid proxy' on(master, puppet("apply -e 'service {'squid' : ensure => running}'")) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -p") step 'Read the squid logs' on(master, "cat #{squid_log}") do |result| assert_match(squid_log_regex, result.stdout, 'Proxy logs did not indicate use of the proxy.') end r10k-4.0.2/integration/tests/basic_functionality/proxy_with_puppetfile.rb000066400000000000000000000031351460033767200270200ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-110 - C87651 - Specify a proxy in an environment variable' confine(:to, :platform => ['el', 'sles']) #Init master_platform = fact_on(master, 'os.family') r10k_fqp = get_r10k_fqp(master) case master_platform when 'RedHat' pkg_manager = 'yum' when 'Suse' pkg_manager = 'zypper' end install_squid = "#{pkg_manager} install -y squid" remove_squid = "#{pkg_manager} remove -y squid" squid_log = "/var/log/squid/access.log" #Verification squid_log_regex = /CONNECT forgeapi.puppet(labs)?.com:443/ #Teardown teardown do step 'Remove puppetfile' on(master, 'rm -rf modules/') on(master, 'rm Puppetfile') step 'Remove Squid' on(master, puppet("apply -e 'service {'squid' : ensure => stopped}'")) on(master, remove_squid) step 'Remove proxy environment variable' master.delete_env_var('http_proxy', "http://#{master.hostname}:3128") end step 'Install and configure squid proxy' on(master, install_squid) master.add_env_var('http_proxy', "http://#{master.hostname}:3128") step 'turn off the firewall' on(master, puppet("apply -e 'service {'iptables' : ensure => stopped}'")) step 'start squid proxy' on(master, puppet("apply -e 'service {'squid' : ensure => running}'")) #Tests step 'make a puppetfile' create_remote_file(master, "Puppetfile", 'mod "puppetlabs/motd"') step 'Use a r10k puppetfile' on(master, "#{r10k_fqp} puppetfile install") step 'Read the squid logs' on(master, "cat #{squid_log}") do |result| assert_match(squid_log_regex, result.stdout, 'Proxy logs did not indicate use of the proxy.') end r10k-4.0.2/integration/tests/basic_functionality/rugged_git_provider_with_ssh.rb000077500000000000000000000071311460033767200303140ustar00rootroot00000000000000require 'erb' require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-137 - C64160 - Use "rugged" Git Provider with Authentication' skip_test 'refactor to use shared private key internal to puppet' confine(:to, :platform => ['el', 'ubuntu', 'sles']) if ENV['GIT_PROVIDER'] == 'shellgit' skip_test('Skipping test because removing Git from the system affects other "shellgit" tests.') elsif fact_on(master, 'os.family') == 'RedHat' and fact_on(master, "os.release.major").to_i < 6 skip_test('This version of EL is not supported by this test case!') end #Init master_platform = fact_on(master, 'os.family') master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_path = '/git_repos' git_control_remote = 'git@github.com:puppetlabs/codemgmt-92.git' git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = 'rugged' jenkins_key_path = File.file?("#{ENV['HOME']}/.ssh/id_rsa") ? "#{ENV['HOME']}/.ssh/id_rsa" : File.expand_path('~/.ssh/id_rsa-jenkins') ssh_private_key_path = '/root/.ssh/id_rsa-jenkins' local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') git_manifest_template_path = File.join(local_files_root_path, 'pre-suite', 'git_config.pp.erb') git_manifest = ERB.new(File.read(git_manifest_template_path)).result(binding) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' private_key: '#{ssh_private_key_path}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do step 'Restore "git" Package' on(master, puppet('apply'), :stdin => git_manifest, :acceptable_exit_codes => [0,2]) step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") if File.file?(jenkins_key_path) == false skip_test('Skipping test because necessary SSH key is not present!') end step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Copy SSH Key to Master' scp_to(master, jenkins_key_path, ssh_private_key_path) on(master, "chmod 600 #{ssh_private_key_path}") step 'Remove "git" Package from System' if master_platform == 'RedHat' on(master, 'yum remove -y git') elsif master_platform == 'Debian' if fact_on(master, "os.release.major") == '10.04' on(master, 'apt-get remove -y git-core') else on(master, 'apt-get remove -y git') end elsif master_platform == 'SLES' on(master, 'zypper remove -y git-core git') end #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/basic_functionality/rugged_git_provider_without_ssh.rb000077500000000000000000000071101460033767200310410ustar00rootroot00000000000000require 'erb' require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-137 - C64159 - Use "rugged" Git Provider without Authentication' confine(:to, :platform => ['el', 'ubuntu', 'sles']) if ENV['GIT_PROVIDER'] == 'shellgit' skip_test('Skipping test because removing Git from the system affects other "shellgit" tests.') elsif fact_on(master, 'os.family') == 'RedHat' and fact_on(master, "os.release.major").to_i < 6 skip_test('This version of EL is not supported by this test case!') end #Init master_platform = fact_on(master, 'os.family') master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = 'rugged' local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') git_manifest_template_path = File.join(local_files_root_path, 'pre-suite', 'git_config.pp.erb') git_manifest = ERB.new(File.read(git_manifest_template_path)).result(binding) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do step 'Restore "git" Package' on(master, puppet('apply'), :stdin => git_manifest, :acceptable_exit_codes => [0,2]) step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) step 'Remove "git" Package from System' if master_platform == 'RedHat' on(master, 'yum remove -y git') elsif master_platform == 'Debian' if fact_on(master, "os.release.major") == '10.04' on(master, 'apt-get remove -y git-core') else on(master, 'apt-get remove -y git') end elsif master_platform == 'SLES' on(master, 'zypper remove -y git-core git') end #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/command_line/000077500000000000000000000000001460033767200204145ustar00rootroot00000000000000r10k-4.0.2/integration/tests/command_line/deploy_env_without_mod_update.rb000077500000000000000000000041721460033767200271000ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-90 - C62419 - Deploy Environment without Module Update' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_path = File.join(environment_path, 'production') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) motd_module_init_pp_path = File.join(prod_env_path, 'modules/motd/manifests/init.pp') #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ error_message_regex = /Error:/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'motd': content => '#{motd_contents}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) step 'Deploy "production" Environment via r10k with modules' on(master, "#{r10k_fqp} deploy environment -p -v") step 'Corrupt MOTD Manifest' create_remote_file(master, motd_module_init_pp_path, 'Broken') #Tests step 'Deploy "production" Environment via r10k without module update' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr) end end r10k-4.0.2/integration/tests/command_line/negative/000077500000000000000000000000001460033767200222165ustar00rootroot00000000000000r10k-4.0.2/integration/tests/command_line/negative/neg_deploy_env_with_module_update.rb000077500000000000000000000042341460033767200315100ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-90 - C62418 - Deploy Environment with Module Update' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_path = File.join(environment_path, 'production') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) motd_module_init_pp_path = File.join(prod_env_path, 'modules/motd/manifests/init.pp') #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ notify_message_regex = /Error:/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'motd': content => '#{motd_contents}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) step 'Deploy "production" Environment via r10k with modules' on(master, "#{r10k_fqp} deploy environment -p -v") step 'Corrupt MOTD Manifest' create_remote_file(master, motd_module_init_pp_path, 'Broken') #Tests step 'Deploy "production" Environment via r10k with module update' on(master, "#{r10k_fqp} deploy environment -p -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 1) do |result| assert_match(notify_message_regex, result.stderr, 'Unexpected error was detected!') end end r10k-4.0.2/integration/tests/command_line/negative/neg_invalid_cmd_line_arg.rb000066400000000000000000000012011460033767200274770ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-90 - C62420 - Invalid Command Line Argument' #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #Verification error_message_regex = /error/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment NONEXISTENTENV -v", :acceptable_exit_codes => 1) do |result| assert_match(/error/, result.stderr.downcase, 'Expected message not found!') end r10k-4.0.2/integration/tests/git_source/000077500000000000000000000000001460033767200201325ustar00rootroot00000000000000r10k-4.0.2/integration/tests/git_source/HTTP_proxy_and_git_source.rb000066400000000000000000000040521460033767200255450ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-730 - C97977 - Specify HTTP_PROXY environment var and proxy for specific git source' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" puppetfile =<<-EOS mod 'motd', :git => 'https://github.com/puppetlabs/puppetlabs-motd', :branch => 'main' EOS #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' repositories: - remote: 'https://something.else/repo' proxy: 'http://foooooooo.unresolvable:3128' sources: control: basedir: "#{env_path}" remote: 'https://something.else/repo' CONF teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'cleanup r10k' clean_up_r10k(master, last_commit, git_environments_path) end step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy Puppetfile to "production" Environment Git Repo' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile) step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) #test on(master, "#{r10k_fqp} deploy environment -p", :accept_all_exit_codes => true) do |r| # Rugged as of 0.28 has a different error message than shellgit regex = /((failed to resolve address for)|(Could not resolve proxy:)) foooooooo\.unresolvable/ assert(r.exit_code == 1, 'expected error code was not observed') assert_match(regex, r.stderr, 'The expected error message was not observed' ) end r10k-4.0.2/integration/tests/git_source/git_source_git.rb000077500000000000000000000077731460033767200235060ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-92 - C59235 - Single Git Source Using "GIT" Transport Protocol' confine(:to, :platform => 'el') if fact_on(master, "os.release.major").to_i < 6 || fact_on(master, "os.release.major").to_i > 8 skip_test('This version of EL is not supported by this test case!') end #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_control_remote = 'git://localhost/environments.git' git_environments_path = '/root/environments' git_provider = ENV['GIT_PROVIDER'] || 'shellgit' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF git_daemon_xinetd_enable_manifest = <<-MANIFEST package { 'git-daemon': ensure => present } include xinetd xinetd::service { 'git-daemon': port => '9418', server => '/usr/libexec/git-core/git-daemon', server_args => '--inetd --verbose --syslog --export-all --base-path=/git_repos', socket_type => 'stream', user => 'nobody', wait => 'no', service_type => 'UNLISTED', disable => 'no' } MANIFEST git_daemon_xinetd_disable_manifest = <<-MANIFEST xinetd::service { 'git-daemon': port => '9418', server => '/usr/libexec/git-core/git-daemon', disable => 'yes' } MANIFEST #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Stop "xinetd" Service' on(master, puppet('apply'), :stdin => git_daemon_xinetd_disable_manifest) on(master, puppet('resource', 'service', 'xinetd', 'ensure=stopped')) clean_up_r10k(master, last_commit, git_environments_path) step 'Run Puppet Agent to Clear Plug-in Cache' on(agents, puppet('agent', '--test', '--environment production')) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Install "puppetlabs-xinetd" Module' on(master, puppet('config print basemodulepath')) do |result| (result.stdout.include? ':') ? separator = ':' : separator = ';' @module_path = result.stdout.split(separator).first end on(master, puppet("module install puppetlabs-xinetd --modulepath #{@module_path}")) step 'Install and Configure "git-daemon" service' on(master, puppet('apply'), :stdin => git_daemon_xinetd_enable_manifest) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/git_source/git_source_repeated_remote.rb000066400000000000000000000037231460033767200260530ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'Verify the same remote can be used in more than one object' env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") code_dir = "#{env_path}/production" last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF # Install the same module in two different places puppetfile = <<-EOS mod 'prod_apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache.git', :tag => 'v6.0.0' mod 'test_apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache.git', :tag => 'v6.0.0' EOS teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Ask r10k to deploy' on(master, "#{r10k_fqp} deploy environment -p") step 'Add puppetfile with repeated remote' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile) git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) step 'Deploy r10k' on(master, "#{r10k_fqp} deploy environment -p") step 'Verify module was installed in both places' on(master, "test -d #{code_dir}/modules/prod_apache") on(master, "test -d #{code_dir}/modules/test_apache") r10k-4.0.2/integration/tests/git_source/git_source_ssh.rb000077500000000000000000000050671460033767200235120ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-92 - C59234 - Single Git Source Using "SSH" Transport Protocol' skip_test 'refactor to use shared private key internal to puppet' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_control_remote = 'git@github.com:puppetlabs/codemgmt-92.git' git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) jenkins_key_path = File.file?("#{ENV['HOME']}/.ssh/id_rsa") ? "#{ENV['HOME']}/.ssh/id_rsa" : File.expand_path('~/.ssh/id_rsa-jenkins') ssh_private_key_path = '/root/.ssh/id_rsa-jenkins' ssh_config_path = '/root/.ssh/config' r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' private_key: '#{ssh_private_key_path}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF ssh_config = <<-CONF StrictHostKeyChecking no Host github.com IdentityFile #{ssh_private_key_path} CONF #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Jenkins SSH Key' on(master, "rm -rf #{ssh_private_key_path}") step 'Remove SSH Config' on(master, "rm -rf #{ssh_config_path}") step 'Restore Original "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") if File.file?(jenkins_key_path) == false skip_test('Skipping test because necessary SSH key is not present!') end step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Configure SSH to Use SSH Key for "github.com"' create_remote_file(master, ssh_config_path, ssh_config) on(master, "chmod 600 #{ssh_config_path}") step 'Copy SSH Key to Master' scp_to(master, jenkins_key_path, ssh_private_key_path) on(master, "chmod 600 #{ssh_private_key_path}") #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/git_source/git_source_submodule.rb000077500000000000000000000051151460033767200247060ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-92 - C59238 - Single Git Source with Git Sub-module' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_repo_module_parent_path = '/git_repos' git_repo_module_name = 'helloworld' git_repo_module_path = File.join(git_repo_module_parent_path, "#{git_repo_module_name}.git") git_clone_module_path = '/root/helloworld_module' git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do 'Remove "helloworld" Git Repo' on(master, "rm -rf #{git_repo_module_path}") 'Remove "helloworld" Git Clone' on(master, "rm -rf #{git_clone_module_path}") clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Create Git Repo for "helloworld" Module' git_init_bare_repo_and_clone(master, git_repo_module_parent_path, git_repo_module_name, git_clone_module_path) step 'Copy "helloworld" Module to Git Repo' scp_to(master, helloworld_module_path, File.join(git_clone_module_path, 'helloworld')) git_add_commit_push(master, 'master', 'Add module.', git_clone_module_path) step 'Add "helloworld" Module Git Repo as Submodule' on(master, "cd #{git_environments_path};git -c protocol.file.allow=always submodule add file://#{git_repo_module_path} dist") step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 1) do |result| expect_failure('Expected to fail due to RK-30') do assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end end r10k-4.0.2/integration/tests/git_source/negative/000077500000000000000000000000001460033767200217345ustar00rootroot00000000000000r10k-4.0.2/integration/tests/git_source/negative/neg_git_broken_remote.rb000077500000000000000000000024221460033767200266130ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' require 'digest/sha1' test_name 'CODEMGMT-101 - C59261 - Attempt to Deploy Environment with Broken Git Remote' #Init git_control_remote = '/git_repos/environments.git' prod_branch_head_ref_path = File.join(git_control_remote, 'refs', 'heads', 'production') prod_branch_head_ref_path_backup = '/tmp/production.bak' r10k_fqp = get_r10k_fqp(master) invalid_sha_ref = Digest::SHA1.hexdigest('broken') #Verification error_message_regex = /ERROR\].*Blah/m #Teardown teardown do step 'Restore Original "production" Branch Head Ref' on(master, "mv #{prod_branch_head_ref_path_backup} #{prod_branch_head_ref_path}") end #Setup step 'Backup Current "production" Branch Head Ref' on(master, "mv #{prod_branch_head_ref_path} #{prod_branch_head_ref_path_backup}") step 'Inject Corrupt "production" Branch Head Ref' create_remote_file(master, prod_branch_head_ref_path, "#{invalid_sha_ref}\n") on(master, "chmod 644 #{prod_branch_head_ref_path}") #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -t", :acceptable_exit_codes => [0,1]) do |result| expect_failure('Expected to fail due to RK-28') do assert_match(error_message_regex, result.stderr, 'Expected message not found!') end end r10k-4.0.2/integration/tests/git_source/negative/neg_git_unauthorized_https.rb000077500000000000000000000025111460033767200277220ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-101 - C59236 - Attempt to Deploy Environment with Unauthorized "HTTPS" Git Source' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_control_remote = 'https://bad:user@github.com/puppetlabs/codemgmt-92.git' git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /ERROR.*Unable to determine current branches for Git source 'broken'/m #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/git_source/negative/neg_git_unauthorized_ssh.rb000077500000000000000000000042021460033767200273540ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' require 'openssl' test_name 'CODEMGMT-101 - C59237 - Attempt to Deploy Environment with Unauthorized "SSH" Git Source' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_control_remote = 'git@github.com:puppetlabs/codemgmt-92.git' git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) unauthorized_rsa_key = OpenSSL::PKey::RSA.new(2048) ssh_private_key_path = '/root/.ssh/unauthorized_key' ssh_config_path = '/root/.ssh/config' r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' private_key: '#{ssh_private_key_path}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF ssh_config = <<-CONF StrictHostKeyChecking no Host github.com IdentityFile #{ssh_private_key_path} CONF #Verification error_message_regex = /ERROR.*Unable to determine current branches for Git source 'broken'/m #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Unauthorized SSH Key' on(master, "rm -rf #{ssh_private_key_path}") step 'Remove SSH Config' on(master, "rm -rf #{ssh_config_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Copy Unauthorized SSH Key to Master' create_remote_file(master, ssh_config_path, ssh_config) on(master, "chmod 600 #{ssh_config_path}") step 'Configure SSH to Use Unauthorized SSH Key for "github.com"' create_remote_file(master, ssh_private_key_path, unauthorized_rsa_key) on(master, "chmod 600 #{ssh_private_key_path}") #Tests step 'Attempt to Deploy via r10k' on(master, "SSH_AUTH_SOCK= SSH_CONNECTION= SSH_CLIENT= #{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/git_source/negative/neg_git_unicode_branch.rb000077500000000000000000000022141460033767200267220ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-101 - C59231 - Attempt to Deploy Environment from Git Source with Branches Containing Unicode' #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) unicode_env = "\uAD62\uCC63\uC0C3\uBEE7\uBE23\uB7E9\uC715\uCEFE\uBF90\uAE69" #Verification error_message_regex = /ERROR\].*Blah/m #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step "Create \"#{unicode_env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{unicode_env}".force_encoding('BINARY'), git_environments_path) step "Push Changes to \"#{unicode_env}\" Environment" git_push(master, unicode_env, git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -t", :acceptable_exit_codes => [0,1]) do |result| expect_failure('Expected to fail due to RK-29') do assert_match(error_message_regex, result.stderr, 'Expected message not found!') end end r10k-4.0.2/integration/tests/i18n/000077500000000000000000000000001460033767200165465ustar00rootroot00000000000000r10k-4.0.2/integration/tests/i18n/deploy_module_with_unicode_in_file_name.rb000066400000000000000000000045731460033767200271730ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' require 'beaker-qa-i18n' test_name 'Deploy module with unicode file name' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end test_i18n_strings(10, [:syntax, :white_space]) do |test_string| #Setup test_file_path = File.join(git_environments_path, "site", 'helloworld', 'manifests', test_string) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Add unicode file to helloworld Module' create_remote_file(master, test_file_path, 'test file contents') step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") step 'test deployment of Unicode file' deployed_test_file_path = "/etc/puppetlabs/code/environments/production/site/helloworld/manifests/#{test_string}" on(master, "test -f #{deployed_test_file_path}", :accept_all_exit_codes => true) do |result| assert(result.exit_code == 0, "The unicode test file #{test_string} was not deployed by r10k") end agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end end r10k-4.0.2/integration/tests/purging/000077500000000000000000000000001460033767200174425ustar00rootroot00000000000000r10k-4.0.2/integration/tests/purging/content_not_purged_at_root.rb000066400000000000000000000055041460033767200254220ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-256 - C98049 - verify if non-module content is at root of dir, does not cause erroneous purging' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") code_dir = '/etc/puppetlabs/code/environments/production' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF puppetfile = <<-EOS mod 'non_module_object_1', :install_path => './', :git => 'https://github.com/puppetlabs/control-repo.git', :branch => 'production' mod 'non_module_object_2', :install_path => '', :git => 'https://github.com/puppetlabs/control-repo.git', :branch => 'production' EOS puppetfile_2 = <<-EOS mod 'puppetlabs-motd' EOS teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Ask r10k to deploy' on(master, "#{r10k_fqp} deploy environment -p") step 'Add puppetfile with non-module content at top of directory' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile) git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) step 'Deploy r10k' on(master, "#{r10k_fqp} deploy environment -p") step 'Add puppetfile #2' create_remote_file(master, "#{git_environments_path}/Puppetfile", puppetfile_2) git_add_commit_push(master, 'production', 'add Puppetfile to production', git_environments_path) step 'Deploy r10k after adding puppetfile #2' on(master, "#{r10k_fqp} deploy environment -p") step 'Verify that non-module object 1 has not been purged' on(master, "test -d #{code_dir}/non_module_object_1", :accept_all_exit_codes => true) do |result| non_module_error = 'Non-module object was purged; should have been left alone' assert(result.exit_code == 0, non_module_error) end step 'Verify that non-module object 2 has not been purged' on(master, "test -d #{code_dir}/non_module_object_2", :accept_all_exit_codes => true) do |result| non_module_error = 'Non-module object was purged; should have been left alone' assert(result.exit_code == 0, non_module_error) end r10k-4.0.2/integration/tests/purging/default_purging.rb000066400000000000000000000122071460033767200231500ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-256 - C98013 - verify default purging behavior' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end # initialize path name a....d (where it is) code_dir_path = '/etc/puppetlabs/code' fake_environment_path_a = [code_dir_path, 'environments'].join('/') fake_environment_path_b = [code_dir_path,'environments', 'production'].join('/') fake_environment_path_c = [code_dir_path, 'environments', 'production', 'modules'].join('/') fake_environment_path_d = [code_dir_path, 'environments', 'production', 'modules', 'motd'].join('/') #not sure if need # initalize directory name a...c (where it is, what it is called) fake_dir_a_to_be_purged = "#{fake_environment_path_a}/fakedir1" fake_dir_b_to_be_left_alone = "#{fake_environment_path_b}/fakedir2" fake_dir_c_to_be_purged = "#{fake_environment_path_c}/fakedir3" # initalize file name a...c (where it is, what it is called) fake_file_a_to_be_purged = "#{fake_environment_path_a}/fakefile1.txt" fake_file_b_to_be_left_alone = "#{fake_environment_path_b}/fakefile2.txt" fake_file_c_to_be_purged = "#{fake_environment_path_c}/fakefile3.txt" # initalize file content step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create content to be managed by r10k' git_on(master, 'checkout production', git_environments_path) create_remote_file(master, "#{git_environments_path}/Puppetfile", 'mod "puppetlabs-motd"') git_add_commit_push(master, 'production', 'add Puppetfile to production', git_environments_path) git_on(master, 'checkout production', git_environments_path) git_on(master, 'checkout -b cats', git_environments_path) create_remote_file(master, "#{git_environments_path}/Puppetfile", 'mod "puppetlabs-inifile"') git_add_commit_push(master, 'cats', 'add Puppetfile to cats', git_environments_path) step 'Ask r10k to deploy' on(master, "#{r10k_fqp} deploy environment -p") step 'Create fake file and directory at deployment level to be purged' create_remote_file(master, fake_file_a_to_be_purged, "foobar nonsense") on(master, "mkdir #{fake_dir_a_to_be_purged}") step 'Create fake file and directory at environment level to be left alone' create_remote_file(master, fake_file_b_to_be_left_alone, "foobar nonsense") on(master, "mkdir #{fake_dir_b_to_be_left_alone}") step 'Create fake file and directory at puppetfile level to be purged ' create_remote_file(master, fake_file_c_to_be_purged, "foobar nonsense") on(master, "mkdir #{fake_dir_c_to_be_purged}") step('Deploy r10k') on(master, "#{r10k_fqp} deploy environment -p") step('Assert to see if deployment level file is not there') on(master, "test -f #{fake_file_a_to_be_purged}", :accept_all_exit_codes => true) do |result| file_a_error = 'Puppet file purging was not observed' assert(result.exit_code == 1, file_a_error) end step('Assert to see if deployment level directory is not there') on(master, "test -d #{fake_dir_a_to_be_purged}", :accept_all_exit_codes => true) do |result| dir_a_error = 'Puppet directory purging was not observed' assert(result.exit_code == 1, dir_a_error) end step('Assert to see if environment level file is still there after second deployment') on(master, "test -f #{fake_file_b_to_be_left_alone}", :accept_all_exit_codes => true) do |result| file_b_error = 'Puppet file purging deleted this file when it should have left it alone :(' assert(result.exit_code == 0, file_b_error) end step('Assert to see if environment level directory is still there after second deployment') on(master, "test -d #{fake_dir_b_to_be_left_alone}", :accept_all_exit_codes => true) do |result| dir_b_error = 'Puppet directory purging deleted this directory when it should have left it alone :(' assert(result.exit_code == 0, dir_b_error) end step('Assert to see if puppetfile level file is not there') on(master, "test -f #{fake_file_c_to_be_purged}", :accept_all_exit_codes => true) do |result| file_c_error = 'Puppet file purging was not observed' assert(result.exit_code == 1, file_c_error) end step('Assert to see if puppetfile level directory is not there') on(master, "test -d #{fake_dir_c_to_be_purged}", :accept_all_exit_codes => true) do |result| dir_c_error = 'Puppet directory purging was not observed' assert(result.exit_code == 1, dir_c_error) end r10k-4.0.2/integration/tests/purging/does_not_purge_files_on_allowlist.rb000066400000000000000000000055351460033767200267630ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'RK-257 - C98046 - r10k does not purge files on allowlist' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") clean_up_r10k(master, last_commit, git_environments_path) end # initalize file content step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" deploy: purge_levels: ['deployment', 'environment', 'puppetfile'] purge_allowlist: ['**/*.pp'] CONF step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Copy Puppetfile to "production" Environment Git Repo' create_remote_file(master, "#{git_environments_path}/Puppetfile", "mod 'puppetlabs-stdlib' \n mod 'puppetlabs-motd'") step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) step 'Deploy production' on(master, "#{r10k_fqp} deploy environment -p") step 'commit a new Puppetfile to production' create_remote_file(master, "#{git_environments_path}/Puppetfile", 'mod "puppetlabs-motd"') step 'Push Changes' git_add_commit_push(master, 'production', 'add Puppetfile', git_environments_path) step 'create test pp files' do_not_purge = [ "/etc/puppetlabs/code/environments/production/environment_level.pp", "/etc/puppetlabs/code/environments/production/site/environment_level.pp" ].each do |file| create_remote_file(master, file, 'this is a test') end purge = [ "/etc/puppetlabs/code/environments/production/environment_level.zz", "/etc/puppetlabs/code/environments/production/site/environment_level.zz" ].each do |file| create_remote_file(master, file, 'this is a test') end #TEST step 'Deploy again and check files' on(master, "#{r10k_fqp} deploy environment -p") purge.each do |file| assert_message = "The file #{file}\n was not purged, it was expected to be" assert(on(master, "test -f #{file}", :accept_all_exit_codes => true).exit_code == 1, assert_message) end do_not_purge.each do |file| assert_message = "The file #{file}\n was purged, it was not expected to be" assert(on(master, "test -f #{file}", :accept_all_exit_codes => true).exit_code == 0, assert_message) end r10k-4.0.2/integration/tests/user_scenario/000077500000000000000000000000001460033767200206305ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflow/000077500000000000000000000000001460033767200236435ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflow/multi_env_1000_branches.rb000077500000000000000000000045761460033767200305160ustar00rootroot00000000000000require 'securerandom' require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-62 - C59241 - Single Source with 100 Branches' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip confdir_path = on(master, puppet('config', 'print', 'confdir')).stdout.rstrip modules_path = File.join(confdir_path, 'modules') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') env_names = (0 ... 100).to_a.map!{ |x| x > 0 ? SecureRandom.uuid.gsub(/-/,"") * 3 : 'production'} #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup env_names.each do |env| if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"helloworld\" Module to \"#{env}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, site_pp) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Push Changes to \"#{env}\" Environment" git_push(master, env, git_environments_path) end end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Select three environments at random and verify results. env_names.sample(3).each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') end end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/multi_env_custom_forge_git_module.rb000077500000000000000000000070551460033767200331700ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-73 - C59226 - Multiple Environments with Custom, Forge and Git Modules' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip confdir_path = on(master, puppet('config', 'print', 'confdir')).stdout.rstrip modules_path = File.join(confdir_path, 'modules') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') env_names = ['production', 'stage', 'test'] #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ stdlib_notify_message_regex = /The test message is:.*one.*=>.*1.*two.*=>.*bats.*three.*=>.*3.*/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" mod 'puppetlabs/stdlib', :git => 'https://github.com/puppetlabs/puppetlabs-stdlib.git', :tag => 'v7.0.1' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } $hash1 = {'one' => 1, 'two' => 2} $hash2 = {'two' => 'bats', 'three' => 3} $merged_hash = merge($hash1, $hash2) notify { 'Test Message': message => "The test message is: ${merged_hash}" } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") step 'Remove Static Modules' on(agents, "rm -rf #{modules_path}/*") end #Setup env_names.each do |env| if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"helloworld\" Module to \"#{env}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, site_pp) step "Update the \"#{env}\" Environment with Puppetfile" create_remote_file(master, puppet_file_path, puppet_file) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp, add modules, add Puppetfile.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Push Changes to \"#{env}\" Environment" git_push(master, env, git_environments_path) end end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') assert_match(stdlib_notify_message_regex, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end end end multi_env_custom_forge_git_module_static.rb000077500000000000000000000074431460033767200344610ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflowrequire 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-40 - C59223 - Multiple Environments with Custom, Forge and Git Modules Deployed to Static Path' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) step 'Read module path' on(master, puppet('config print basemodulepath')) do |result| (result.stdout.include? ':') ? separator = ':' : separator = ';' @module_path = result.stdout.split(separator).first end git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') env_names = ['production', 'stage', 'test'] #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ stdlib_notify_message_regex = /The test message is:.*one.*=>.*1.*two.*=>.*bats.*three.*=>.*3.*/ #File puppet_file = <<-PUPPETFILE moduledir '#{@module_path}' mod "puppetlabs/motd" mod 'puppetlabs/stdlib', :git => 'https://github.com/puppetlabs/puppetlabs-stdlib.git', :tag => 'v7.0.1' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } $hash1 = {'one' => 1, 'two' => 2} $hash2 = {'two' => 'bats', 'three' => 3} $merged_hash = merge($hash1, $hash2) notify { 'Test Message': message => "The test message is: ${merged_hash}" } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") step 'Remove Static Modules' on(agents, "rm -rf #{@module_path}/*") end #Setup env_names.each do |env| if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"helloworld\" Module to \"#{env}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, site_pp) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Push Changes to \"#{env}\" Environment" git_push(master, env, git_environments_path) end end step 'Update just the "production" Environment with Puppetfile' git_on(master, 'checkout production', git_environments_path) create_remote_file(master, puppet_file_path, puppet_file) git_add_commit_push(master, 'production', 'Add module.', git_environments_path) #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') assert_match(stdlib_notify_message_regex, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/multi_env_hiera.rb000077500000000000000000000067101460033767200273510ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-102 - C63192 - Multiple Environments with Hiera Data' skip_test('This test is blocked by RK-136') #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) local_files_root_path = ENV['FILES'] || 'files' hieratest_module_path = File.join(local_files_root_path, 'modules', 'hieratest') hiera_local_config_path = File.join(local_files_root_path, 'hiera.yaml') hiera_master_config_path = on(master, puppet('config', 'print', 'hiera_config')).stdout.rstrip if get_puppet_version(master) < 4.0 hiera_data_dir = File.join(git_environments_path, 'hiera') else hiera_data_dir = File.join(git_environments_path, 'hieradata') end site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include hieratest') env_names = ['production', 'stage', 'test'] #Teardown teardown do step 'Restore Original "hiera.yaml" Config' on(master, "mv #{hiera_master_config_path}.bak #{hiera_master_config_path}") step 'Restart the Puppet Server Service' restart_puppet_server(master) clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Backup Current "hiera.yaml" Config' on(master, "mv #{hiera_master_config_path} #{hiera_master_config_path}.bak") step 'Copy New "hiera.yaml" to Puppet Master' scp_to(master, hiera_local_config_path, hiera_master_config_path) env_names.each do |env| #In-line Files hiera_data = <<-HIERA --- hieratest::hiera_message: "I am in the #{env} environment" HIERA if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"hieratest\" Module to \"#{env}\" Environment Git Repo" scp_to(master, hieratest_module_path, File.join(git_environments_path, "site", 'hieratest')) step "Update Hiera Data for \"#{env}\" Environment" on(master, "mkdir -p #{hiera_data_dir}") create_remote_file(master, File.join(hiera_data_dir, "#{env}.yaml"), hiera_data) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, site_pp) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp, add hiera data.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Update Hiera Data for \"#{env}\" Environment" create_remote_file(master, File.join(hiera_data_dir, "#{env}.yaml"), hiera_data) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Add hiera data.', git_environments_path) end end step 'Restart the Puppet Server Service' restart_puppet_server(master) #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') end end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/multi_env_multi_source.rb000077500000000000000000000116771460033767200310030ustar00rootroot00000000000000require 'erb' require 'securerandom' require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-85 - C59240 - Multiple Sources with Multiple Branches' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_provider = ENV['GIT_PROVIDER'] || 'shellgit' local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #Sources and environments structures sources = [] GitEnv = Struct.new(:repo_path, :repo_name, :control_remote, :environments_path, :site_pp_path, :site_pp, :env_names ) #Push default source as first element sources.push(GitEnv.new('/git_repos', 'control', '/git_repos/environments.git', '/root/environments', '/root/environments/manifests/site.pp', create_site_pp(master_certname, ' include helloworld'), (0 ... 10).to_a.map!{ |x| x > 0 ? SecureRandom.uuid.gsub(/-/,"") : 'production'} ) ) #Generate the remaining environments (0..9).each do source_name = SecureRandom.uuid.gsub(/-/,"") sources.push(GitEnv.new("/tmp/git_repo_#{source_name}", "environments_#{source_name}", "/tmp/git_repo_#{source_name}/environments_#{source_name}.git", "/root/environments_#{source_name}", "/root/environments_#{source_name}/manifests/site.pp", create_site_pp(master_certname, ' include helloworld'), (0 ... 10).to_a.map!{ SecureRandom.uuid.gsub(/-/,"") } ) ) end #ERB Template r10k_conf_template_path = File.join(local_files_root_path, 'r10k_conf.yaml.erb') r10k_conf = ERB.new(File.read(r10k_conf_template_path)).result(binding) #Teardown last_commit = git_last_commit(master, sources.first.environments_path) teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Git Sources' sources.slice(1, sources.length).each do |source| on(master, "rm -rf #{source.repo_path} #{source.environments_path}") end clean_up_r10k(master, last_commit, sources.first.environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create Git Sources' sources.slice(1, sources.length).each do |source| init_r10k_source_from_prod(master, source.repo_path, source.repo_name, source.environments_path, source.env_names.first ) end #Create environments for sources sources.each do |source| source.env_names.each do |env_name| if env_name == source.env_names.first step "Checkout \"#{env_name}\" Branch" git_on(master, "checkout #{env_name}", source.environments_path) step "Copy \"helloworld\" Module to \"#{env_name}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(source.environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env_name}\" Environment" inject_site_pp(master, source.site_pp_path, source.site_pp) step "Push Changes to \"#{env_name}\" Environment" git_add_commit_push(master, env_name, 'Update site.pp and add module.', source.environments_path) else step "Create \"#{env_name}\" Branch from \"#{source.env_names.first}\"" git_on(master, "checkout #{source.env_names.first}", source.environments_path) git_on(master, "checkout -b #{env_name}", source.environments_path) step "Push Changes to \"#{env_name}\" Environment" git_push(master, env_name, source.environments_path) end end end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Select three environments at random and verify results. sources.sample(3).each do |source| source.env_names.sample(1).each do |env_name| agents.each do |agent| step "Run Puppet Agent Against \"#{env_name}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env_name}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env_name} environment/, result.stdout, 'Expected message not found!') end end end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/multi_source_custom_forge_git_module.rb000077500000000000000000000137001460033767200336720ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-85 - C59227 - Multiple Environments with Multiple Sources and Custom, Forge and Git Modules' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_provider = ENV['GIT_PROVIDER'] || 'shellgit' local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ stdlib_notify_message_regex = /The test message is:.*one.*=>.*1.*two.*=>.*bats.*three.*=>.*3.*/ #Manifest prod_env_manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } MANIFEST stage_env_manifest = <<-MANIFEST class { 'helloworld': } $hash1 = {'one' => 1, 'two' => 2} $hash2 = {'two' => 'bats', 'three' => 3} $merged_hash = merge($hash1, $hash2) notify { 'Test Message': message => "The test message is: ${merged_hash}" } MANIFEST #Environment Structures GitEnv = Struct.new(:repo_path, :repo_name, :control_remote, :environments_path, :puppet_file_path, :puppet_file, :site_pp_path, :site_pp) env_structs = {:production => GitEnv.new('/git_repos', 'environments', '/git_repos/environments.git', '/root/environments', '/root/environments/Puppetfile', 'mod "puppetlabs/motd"', '/root/environments/manifests/site.pp', create_site_pp(master_certname, prod_env_manifest) ), :stage => GitEnv.new('/git_repos_alt', 'environments_alt', '/git_repos_alt/environments_alt.git', '/root/environments_alt', '/root/environments_alt/Puppetfile', 'mod "puppetlabs/stdlib", :git => "https://github.com/puppetlabs/puppetlabs-stdlib.git", :tag => "v7.0.1"', '/root/environments_alt/manifests/site.pp', create_site_pp(master_certname, stage_env_manifest) ), } last_commit = git_last_commit(master, env_structs[:production].environments_path) #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{env_structs[:production].control_remote}" alt_control: basedir: "#{env_path}" remote: "#{env_structs[:stage].control_remote}" CONF #Teardown teardown do step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Alternate Git Source' on(master, "rm -rf #{env_structs[:stage].repo_path} #{env_structs[:stage].environments_path}") clean_up_r10k(master, last_commit, env_structs[:production].environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create Alternate Git Repo and Clone' init_r10k_source_from_prod(master, env_structs[:stage].repo_path, env_structs[:stage].repo_name, env_structs[:stage].environments_path, 'stage' ) env_structs.each do |env_name, env_info| step "Checkout \"#{env_name}\" Branch" git_on(master, "checkout #{env_name}", env_info.environments_path) step "Copy \"helloworld\" Module to \"#{env_name}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(env_info.environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env_name}\" Environment" inject_site_pp(master, env_info.site_pp_path, env_info.site_pp) step "Update the \"#{env_name}\" Environment with Puppetfile" create_remote_file(master, env_info.puppet_file_path, env_info.puppet_file) step "Push Changes to \"#{env_name}\" Environment" git_add_commit_push(master, env_name, 'Update site.pp, modules, Puppetfile.', env_info.environments_path) end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step 'Run Puppet Agent Against "production" Environment' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the production environment/, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end step 'Run Puppet Agent Against "stage" Environment' on(agent, puppet('agent', '--test', '--environment stage'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the stage environment/, result.stdout, 'Expected message not found!') assert_match(stdlib_notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/000077500000000000000000000000001460033767200254455ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_bad_basedir.rb000077500000000000000000000027761460033767200310610ustar00rootroot00000000000000require 'git_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59225 - Attempt to Deploy to Base Directory with Invalid Length' #Init env_path = '/asuyiyuyabvusayd2784782gh8hexistasdfaiasdhfa78v87va8vajkb3vwkasv7as8vba87vb87asdhfajsbdzxmcbvawbvr7av6baskudvbausdgasycyu7abywfegasfsauydgfasf7uas67vbexistasdfaiasdhfa78v87va8vajkb3vwkasv7as8vba87vb87asdhfajsbdzxmcbvawbvr7av6baskudvbausdgasycyu7abywfegasfsauydgfasf7uas67vb' git_repo_path = '/git_repos' git_control_remote = File.join(git_repo_path, 'environments.git') git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /ERROR.*(Failed to make directory|File name too long)/m #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_bad_forge_module.rb000077500000000000000000000025411460033767200321050ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59228 - Attempt to Deploy Environment with Non-existent Forge Module' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #File puppet_file = <<-PUPPETFILE mod "puppetlabs/nothere" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Verification if get_puppet_version(master) < 4.0 error_message_regex = /Could not install 'puppetlabs-nothere'/ else error_message_regex = /error.*The module puppetlabs-nothere does not exist on/i end #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update Puppetfile.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_bad_git_module.rb000077500000000000000000000026121460033767200315650ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59229 - Attempt to Deploy Environment with Non-existent Git Module' if ENV['GIT_PROVIDER'] == 'shellgit' skip_test('Skipping test because of known failure RK-80.') end #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #File puppet_file = <<-PUPPETFILE mod 'broken', :git => 'https://github.com/puppetlabs/puppetlabs-broken' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Verification error_message_regex = /ERROR.*uses the SSH protocol but no private key was given/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update Puppetfile.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => 1) do |result| assert_no_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_bad_git_module_ref.rb000077500000000000000000000024541460033767200324250ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59230 - Attempt to Deploy Environment with Invalid Git Module Reference' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #File puppet_file = <<-PUPPETFILE mod 'broken', :git => 'https://github.com/puppetlabs/puppetlabs-motd', :ref => 'does_not_exist' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Verification error_message_regex = /ERROR.*Could not resolve desired ref/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update Puppetfile.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_bad_git_remote.rb000077500000000000000000000023641460033767200315770ustar00rootroot00000000000000require 'git_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59224 - Attempt to Deploy from Non-existent Git Remote' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_control_remote = '/does/not/exist' git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /ERROR.*Unable to determine current branches for Git source 'broken'/ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_branch_name_collision.rb000077500000000000000000000037131460033767200331420ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-63 - C59257 - Attempt to Deploy Multiple Sources with Branch Name Collision' #Init env_path = on(master, puppet('config print environmentpath')).stdout.rstrip git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = File.join('/root', git_repo_name) git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) git_alt_repo_path = '/git_repos_alt' git_alt_repo_name = 'environments_alt' git_alt_control_remote = File.join(git_alt_repo_path, "#{git_alt_repo_name}.git") git_alt_environments_path = File.join('/root', git_alt_repo_name) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" alt_control: basedir: "#{env_path}" remote: "#{git_alt_control_remote}" CONF #Verification error_message_regex = /ERROR.*Environment collision/ #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Alternate Git Source' on(master, "rm -rf #{git_alt_repo_path} #{git_alt_environments_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create Alternate Git Repo and Clone' init_r10k_source_from_prod(master, git_alt_repo_path, git_alt_repo_name, git_alt_environments_path, 'production') #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_disk_full.rb000077500000000000000000000044541460033767200306110ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' test_name 'CODEMGMT-86 - C59265 - Attempt to Deploy Environment to Disk with Insufficient Free Space' if fact_on(master, 'os.family') == 'RedHat' and fact_on(master, "os.release.major").to_i < 6 skip_test('This version of EL is not supported by this test case!') end #Init git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" tmpfs_path = '/mnt/tmpfs' test_files_path = File.join(git_environments_path, 'test_files') #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{tmpfs_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /ERROR.*No space left on device/m #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Unmount and Destroy TMP File System' on(master, "umount #{tmpfs_path}") on(master, "rm -rf #{tmpfs_path}") clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create TMP File System and Mount' on(master, "mkdir -p #{tmpfs_path}") on(master, "mount -osize=10m tmpfs #{tmpfs_path} -t tmpfs") step 'Create Large Binary File' create_remote_file(master, File.join(git_environments_path, '.gitattributes'), '*.file binary') on(master, "mkdir -p #{test_files_path}") # create a 11 mb file to fill the mount on(master, "dd if=/dev/urandom of=#{test_files_path}.test.file bs=1048576 count=11") step 'Push Changes' git_add_commit_push(master, 'production', 'Add large file.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_duplicate_module_names.rb000077500000000000000000000023011460033767200333240ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-63 - C59258 - Attempt to Deploy Environment with Duplicate Module Names' #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #Verification deploy_str = %r[Deploying module /etc/puppetlabs/code/environments/production/modules/motd] #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" mod "jeffmccune/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Add modules.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => [0, 1]) do |result| assert_equal(1, result.exit_code, "Expected command to indicate error with exit code") end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_inaccessible_forge.rb000077500000000000000000000035571460033767200324460ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-42 - C59256 - Attempt to Deploy Environment with Inaccessible Forge' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) hosts_file_path = '/etc/hosts' #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Verification error_message_regex = /Error: Could not connect via HTTPS to https:\/\/forgeapi.puppet(labs)?.com/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Restore Original Hosts File' on(master, "mv #{hosts_file_path}.bak #{hosts_file_path}") end #Setup step 'Backup "/etc/hosts" File on Master' on(master, "mv #{hosts_file_path} #{hosts_file_path}.bak") step 'Point Forge Hostname to Localhost' on(master, "echo '127.0.0.1 forgeapi.puppet.com' > #{hosts_file_path}") on(master, "echo '127.0.0.1 forgeapi.puppetlabs.com' >> #{hosts_file_path}") step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update Puppetfile.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => 1) do |result| if get_puppet_version(master) > 4.0 expect_failure('expected to fail due to RK-134') do assert_match(error_message_regex, result.stderr, 'Expected message not found!') end else assert_match(error_message_regex, result.stderr, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_invalid_env_name.rb000077500000000000000000000020661460033767200321300ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-63 - C62511 - Attempt to Deploy Environment Containing Invalid Character in Name' #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) invalid_env_name = 'should-not-contain-dashes' r10k_fqp = get_r10k_fqp(master) #Verification error_message_regex = /ERROR\]/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step "Create \"#{invalid_env_name}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{invalid_env_name}", git_environments_path) step "Push Changes to \"#{invalid_env_name}\" Environment" git_push(master, invalid_env_name, git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v") do |result| expect_failure('Expected to fail due to CODEMGMT-65') do assert_match(error_message_regex, result.stderr, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_invalid_puppet_file.rb000077500000000000000000000020111460033767200326420ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' test_name 'CODEMGMT-86 - C63185 - Attempt to Deploy Environment with Invalid Puppetfile' #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) puppet_file_path = File.join(git_environments_path, 'Puppetfile') #In-line files puppet_file = <<-PUPPETFILE - modulo 'puppetlabs/motd",, PUPPETFILE #Verification error_message_regex = /ERROR.*Failed to evaluate/m #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update Puppetfile.', git_environments_path) #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment -v -p", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negative/neg_read_only.rb000077500000000000000000000031611460033767200306030ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' test_name 'CODEMGMT-86 - C59266 - Attempt to Deploy Environment to Read Only Directory' #Init git_repo_path = '/git_repos' git_repo_name = 'environments' git_control_remote = File.join(git_repo_path, "#{git_repo_name}.git") git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) git_provider = ENV['GIT_PROVIDER'] || 'shellgit' r10k_fqp = get_r10k_fqp(master) r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" tmpfs_path = '/mnt/tmpfs' #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: broken: basedir: "#{tmpfs_path}" remote: "#{git_control_remote}" CONF #Verification error_message_regex = /ERROR.*Read-only file system/m #Teardown teardown do step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Unmount and Destroy TMP File System' on(master, "umount #{tmpfs_path}") on(master, "rm -rf #{tmpfs_path}") end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Create Read Only TMP File System and Mount' on(master, "mkdir -p #{tmpfs_path}") on(master, "mount -osize=10m,ro tmpfs #{tmpfs_path} -t tmpfs") #Tests step 'Attempt to Deploy via r10k' on(master, "#{r10k_fqp} deploy environment", :acceptable_exit_codes => 1) do |result| assert_match(error_message_regex, result.stderr, 'Expected message not found!') end neg_specify_deleted_forge_module.rb000066400000000000000000000027261460033767200344320ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflow/negativerequire 'git_utils' require 'r10k_utils' test_name 'CODEMGMT-127 - C64288 - Attempt to Deploy Environment Specify Deleted Forge Module' #This test uses the regret module deleted from the acceptance forge (history at https://github.com/justinstoller/puppetlabs-regret/commits/master), with versions 0.1.0 - 0.4.0 deleted, effectively deleting the module. #Init git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #Verification error_notification_regex = /(The module puppetlabs-regret does not appear to have any published releases)|(module puppetlabs-regret does not exist on)/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/regret" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Tests step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Add module.', git_environments_path) #Tests step "Deploy production environment via r10k with specified module deleted" on(master, "#{r10k_fqp} deploy environment -p -v --trace", :acceptable_exit_codes => 1) do |result| assert_match(error_notification_regex, result.stderr, 'Unexpected error was detected!') end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_10000_files.rb000077500000000000000000000055521460033767200302350ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-62 - C59239 - Single Environment with 10,000 Files' if fact_on(master, 'os.family') == 'RedHat' and fact_on(master, "os.release.major").to_i < 6 skip_test('This version of EL is not supported by this test case!') end #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_path = File.join(environment_path, 'production') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') test_files = 'test_files' test_files_path = File.join(git_environments_path, test_files) #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ checksum_file_name = 'files.md5' prod_env_test_files_path = File.join(prod_env_path, test_files) prod_env_checksum_file_path = File.join(prod_env_test_files_path, checksum_file_name) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create 10,000 Files' create_remote_file(master, File.join(git_environments_path, '.gitattributes'), '*.file binary') on(master, "mkdir -p #{test_files_path}") # create 10000 1k files with random text on(master, "for n in {1..10000}; do dd if=/dev/urandom of=#{test_files_path}/test$( printf %03d \"$n\" ).file bs=1024 count=1; done") step 'Create MD5 Checksum of Files' on(master, "cd #{test_files_path};md5sum *.file > #{checksum_file_name}") step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") step 'Verify Files in "production" Environment' on(master, "cd #{prod_env_test_files_path};md5sum -c #{prod_env_checksum_file_path}") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_custom_forge_git_module.rb000077500000000000000000000062221460033767200333120ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-40 - C59222 - Single Environment with Custom, Forge and Git Modules' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ ini_file_path = '/tmp/foo.ini' ini_file_section = 'foo' ini_file_setting = 'foosetting' ini_file_value = 'FOO!' ini_file_contents_regex = /\[#{ini_file_section}\].*#{ini_file_setting}\s=\s#{ini_file_value}/m notify_message_regex = /I am in the production environment/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" mod 'puppetlabs/inifile', :git => 'https://github.com/puppetlabs/puppetlabs-inifile', :tag => 'v5.0.1' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } ini_setting { "sample setting": ensure => present, path => '#{ini_file_path}', section => '#{ini_file_section}', setting => '#{ini_file_setting}', value => '#{ini_file_value}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") step 'Remove Temp INI File' on(agents, "rm -rf #{ini_file_path}") end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end step "Verify INI File Contents" on(agent, "cat #{ini_file_path}") do |result| assert_match(ini_file_contents_regex, result.stdout, 'File content is invalid!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_custom_forge_module.rb000077500000000000000000000046211460033767200324500ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-22 - C59121 - Single Environment with Custom Module and Forge Module' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ notify_message_regex = /I am in the production environment/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_custom_module.rb000077500000000000000000000033211460033767200312620ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-22 - C59118 - Single Environment with Custom Module' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_large_files.rb000077500000000000000000000055771460033767200306760ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-62 - C59242 - Single Environment with Large Binary Files' if fact_on(master, 'os.family') == 'RedHat' and fact_on(master, "os.release.major").to_i < 6 skip_test('This version of EL is not supported by this test case!') end #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_path = File.join(environment_path, 'production') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') test_files = 'test_files' test_files_path = File.join(git_environments_path, 'test_files') #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ checksum_file_name = 'files.md5' prod_env_test_files_path = File.join(prod_env_path, test_files) prod_env_checksum_file_path = File.join(prod_env_test_files_path, checksum_file_name) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create Large Binary Files' create_remote_file(master, File.join(git_environments_path, '.gitattributes'), '*.file binary') on(master, "mkdir -p #{test_files_path}") # create 10 25 MB files with random characters on(master, "for n in {1..10}; do dd if=/dev/urandom of=#{test_files_path}/test$( printf %03d \"$n\" ).file bs=1048576 count=25; done") step 'Create MD5 Checksum of Files' on(master, "cd #{test_files_path};md5sum *.file > #{checksum_file_name}") step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") step 'Verify Files in "production" Environment' on(master, "cd #{prod_env_test_files_path};md5sum -c #{prod_env_checksum_file_path}") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_module_already_installed.rb000077500000000000000000000045261460033767200334400ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-117 - C63601 - Single Environment Specify Module that is Already Installed' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) forge_module_path = File.join(environment_path, 'production', 'modules') #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'motd': content => '#{motd_contents}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Add motd module from the forge using the PMT' on(master, puppet('module', 'install', 'puppetlabs-motd', '--modulepath', forge_module_path)) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k with modules' on(master, "#{r10k_fqp} deploy environment -p -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify Contents of MOTD Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_non-existent_base_dir.rb000077500000000000000000000060151460033767200326710ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-62 - C62387 - Single Environment Deployed to Non-existent Base Directory Path' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip original_env_path = on(master, puppet('config print environmentpath')).stdout.rstrip env_path = '/tmp/puppet/temp/environments' r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' git_repo_path = '/git_repos' git_control_remote = File.join(git_repo_path, 'environments.git') git_provider = ENV['GIT_PROVIDER'] || 'shellgit' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_config_path = get_r10k_config_file_path(master) r10k_config_bak_path = "#{r10k_config_path}.bak" #In-line files r10k_conf = <<-CONF cachedir: '/var/cache/r10k' git: provider: '#{git_provider}' sources: control: basedir: "#{env_path}" remote: "#{git_control_remote}" CONF #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_regex = /I am in the production environment/ #Teardown teardown do step 'Restore Original "environmentpath" Path' on(master, puppet("config set environmentpath \"#{original_env_path}\"")) step 'Restore Original "r10k" Config' on(master, "mv #{r10k_config_bak_path} #{r10k_config_path}") step 'Remove Temporary Environments Path' on(master, "rm -rf #{env_path}") step 'Restart the Puppet Server Service' restart_puppet_server(master) clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Backup Current "r10k" Config' on(master, "mv #{r10k_config_path} #{r10k_config_bak_path}") step 'Update the "r10k" Config' create_remote_file(master, r10k_config_path, r10k_conf) step 'Change Puppet "environmentpath"' on(master, puppet("config set environmentpath \"#{env_path}\"")) step 'Restart the Puppet Server Service' restart_puppet_server(master) step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step "Run Puppet Agent" on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_purge_unmanaged_modules.rb000077500000000000000000000056241460033767200333040ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-78 - Puppetfile Purge --puppetfile & --moduledir flag usage' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environments_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.strip moduledir = File.join(environments_path, 'production', 'modules') puppetfile_path = File.join(environments_path, 'production', 'Puppetfile') git_remote_environments_path = '/root/environments' last_commit = git_last_commit(master, git_remote_environments_path) r10k_fqp = get_r10k_fqp(master) #Verification motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ #File puppetfile = <<-PUPPETFILE mod "puppetlabs/xinetd" PUPPETFILE remote_puppetfile_path = File.join(git_remote_environments_path, 'Puppetfile') #Manifest manifest = <<-MANIFEST class { 'motd': content => '#{motd_contents}', } MANIFEST remote_site_pp_path = File.join(git_remote_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, manifest) #Teardown teardown do clean_up_r10k(master, last_commit, git_remote_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_remote_environments_path) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, remote_puppetfile_path, puppetfile) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, remote_site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_remote_environments_path) step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment --modules --verbose debug --trace") step 'Manually Install the "motd" Module from the Forge' on(master, puppet("module install puppetlabs-motd --modulepath #{moduledir}")) #Tests agents.each do |agent| step 'Run Puppet Agent Against "production" Environment' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end end step 'Use r10k to Purge Unmanaged Modules' on(master, "#{r10k_fqp} puppetfile purge --puppetfile #{puppetfile_path} --moduledir #{moduledir} --verbose debug --trace") #Agent will fail because r10k will purge the "motd" module agents.each do |agent| step 'Attempt to Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 1) do |result| assert_match(/Could not find declared class motd/, result.stderr, 'Module was not purged') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_switch_forge_git_module.rb000077500000000000000000000074161460033767200333070ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-111 - C63600 - Single Environment Switch Between Forge and Git for Puppetfile Module' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip prod_env_modules_path = File.join(env_path, 'production', 'modules') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) #Verification motd_path = '/etc/motd' motd_template_path = File.join(prod_env_modules_path, 'motd', 'templates', 'motd.erb') motd_template_contents_forge = 'Hello!' motd_contents_git = 'Bonjour!' motd_contents_forge_regex = /\A#{motd_template_contents_forge}\n\z/ motd_contents_git_regex = /\A#{motd_contents_git}\z/ #File puppet_file_forge = <<-PUPPETFILE mod "puppetlabs/motd", '1.1.1' PUPPETFILE puppet_file_git = <<-PUPPETFILE mod "puppetlabs/motd", :git => 'https://github.com/puppetlabs/puppetlabs-motd', :tag => '1.2.0' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Manifest manifest_forge = <<-MANIFEST include motd MANIFEST manifest_git = <<-MANIFEST class { 'motd': content => '#{motd_contents_git}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp_forge = create_site_pp(master_certname, manifest_forge) site_pp_git = create_site_pp(master_certname, manifest_git) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp_forge) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file_forge) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") step 'Update MOTD Template' create_remote_file(master, motd_template_path, motd_template_contents_forge) on(master, "chmod 644 #{motd_template_path}") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify MOTD Contents for Forge Version of Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_forge_regex, result.stdout, 'File content is invalid!') end end step 'Update "Puppetfile" to use Git for MOTD Module' create_remote_file(master, puppet_file_path, puppet_file_git) step 'Update "site.pp" in the "production" Environment' inject_site_pp(master, site_pp_path, site_pp_git) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and Puppetfile.', git_environments_path) step 'Deploy "production" Environment Again via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify MOTD Contents for Git Version of Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_git_regex, result.stdout, 'File content is invalid!') end end r10k-4.0.2/integration/tests/user_scenario/basic_workflow/single_env_unicode_paths.rb000077500000000000000000000045321460033767200312350ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-62 - C59260 - Single Environment with Unicode File Paths' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip prod_env_modules_path = File.join(environment_path, 'production', 'modules') r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' unicode_module_path = File.join(local_files_root_path, 'modules', 'unicode') unicode_remote_original_file_path = File.join(git_environments_path, 'modules', 'unicode', 'files', 'pretend_unicode') unicode_remote_rename_file_path = File.join(git_environments_path, 'modules', 'unicode', 'files', "\uAD62\uCC63\uC0C3\uBEE7\uBE23\uB7E9\uC715\uCEFE\uBF90\uAE69") #Verification unicode_file_contents_regex = /\AHa ha ha! I am in Korean!\n\z/ #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include unicode') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "unicode" Module to "production" Environment Git Repo' scp_to(master, unicode_module_path, File.join(git_environments_path, 'modules')) #Required because of CODEMGMT-87 step 'Rename File to Actual Unicode' on(master, "mv #{unicode_remote_original_file_path} #{unicode_remote_rename_file_path}".force_encoding('BINARY')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Note: Usually a full Puppet Run would be performed for verification. #Since Puppet has problems with Unicode, this test will verify the file #directly in the r10k environment. step 'Verify Unicode File' on(master, "cat #{unicode_remote_rename_file_path}") do |result| assert_match(unicode_file_contents_regex, result.stdout, 'File content is invalid!') end single_env_upgrade_forge_mod_revert_change.rb000066400000000000000000000132731460033767200346740ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/basic_workflowrequire 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-117 - C63602- Single Environment Upgrade Forge Module then Revert Change' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip env_path = on(master, puppet('config print environmentpath')).stdout.rstrip prod_env_modules_path = File.join(env_path, 'production', 'modules') git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) r10k_fqp = get_r10k_fqp(master) #Verification motd_path = '/etc/motd' motd_template_path = File.join(prod_env_modules_path, 'motd', 'templates', 'motd.erb') motd_template_contents = 'Hello!' motd_new_contents = 'Yolo!' motd_contents_old_regex = /\A#{motd_template_contents}\n\z/ motd_contents_new_regex = /\A#{motd_new_contents}\z/ motd_old_version = /"*1.1.1*"/ motd_new_version = /"*1.2.0*"/ motd_version_file_path = File.join(prod_env_modules_path, 'motd', 'metadata.json') #File puppet_file_old_motd = <<-PUPPETFILE mod "puppetlabs/motd", '1.1.1' PUPPETFILE puppet_file_new_motd = <<-PUPPETFILE mod "puppetlabs/motd", '1.2.0' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') puppet_file_path_bak = "#{puppet_file_path}.bak" #Manifest manifest_old = <<-MANIFEST include motd MANIFEST manifest_new = <<-MANIFEST class { 'motd': content => '#{motd_new_contents}', } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp_old = create_site_pp(master_certname, manifest_old) site_pp_new = create_site_pp(master_certname, manifest_new) #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp_old) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file_old_motd) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") step 'Update MOTD Template' create_remote_file(master, motd_template_path, motd_template_contents) on(master, "chmod 644 #{motd_template_path}") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify MOTD Contents for Forge Version of Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_old_regex, result.stdout, 'File content is invalid!') end step 'Verify Version 1.1.1 of the MOTD Module' on(master, "grep version #{motd_version_file_path}") do |result| assert_match(motd_old_version, result.stdout, 'File content is invalid!') end end step 'Backup Old MOTD "Puppetfile" to allow for creation of New MOTD "Puppetfile"' on(master, "mv #{puppet_file_path} #{puppet_file_path_bak}") step 'Update "Puppetfile" to use New Module Version 1.2.0' create_remote_file(master, puppet_file_path, puppet_file_new_motd) step 'Update "site.pp" in the "production" Environment to New Style Manifest' inject_site_pp(master, site_pp_path, site_pp_new) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and Puppetfile.', git_environments_path) step 'Deploy "production" Environment Again via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify MOTD Contents for New Version of Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_new_regex, result.stdout, 'File content is invalid!') end step 'Verify Version 1.2.0 of the MOTD Module' on(master, "grep version #{motd_version_file_path}") do |result| assert_match(motd_new_version, result.stdout, 'File content is invalid!') end end step 'Restore Old MOTD "Puppetfile"' on(master, "mv #{puppet_file_path_bak} #{puppet_file_path}") step 'Revert "site.pp" in the "production" Environment to Old Style Manifest' inject_site_pp(master, site_pp_path, site_pp_old) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add modules.', git_environments_path) step 'Deploy "production" Environment Again via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") step 'Update MOTD Template' create_remote_file(master, motd_template_path, motd_template_contents) on(master, "chmod 644 #{motd_template_path}") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => [0,2]) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') end step 'Verify MOTD Contents for Old Version of Module' on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_old_regex, result.stdout, 'File content is invalid!') end step 'Verify Version 1.1.1 of the MOTD Module' on(master, "grep version #{motd_version_file_path}") do |result| assert_match(motd_old_version, result.stdout, 'File content is invalid!') end end r10k-4.0.2/integration/tests/user_scenario/complex_workflow/000077500000000000000000000000001460033767200242315ustar00rootroot00000000000000r10k-4.0.2/integration/tests/user_scenario/complex_workflow/multi_env_add_change_remove.rb000077500000000000000000000137461460033767200322700ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-48 - C59262 - Multiple Environments with Additions, Changes and Removal of Branches' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) initial_env_names = ['production', 'stage', 'test'] #Verification for "production" Environment motd_path = '/etc/motd' motd_contents = 'Hello!' motd_contents_regex = /\A#{motd_contents}\z/ prod_env_notify_message_regex = /I am in the production environment/ #Verification for "stage" Environment stage_env_notify_message = 'This is a different message' stage_env_notify_message_regex = /#{stage_env_notify_message}/ #Verification for "test" Environment test_env_message_regex = /Environment 'test' not found on server/ #Verification for "temp" Environment test_env_notify_message_regex = /I am in the temp environment/ #Manifest prod_env_motd_manifest = <<-MANIFEST class { 'helloworld': } class { 'motd': content => '#{motd_contents}', } MANIFEST stage_env_custom_mod_manifest = <<-MANIFEST class helloworld { notify { "Hello world!": message => "#{stage_env_notify_message}" } } MANIFEST site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') original_site_pp = create_site_pp(master_certname, ' include helloworld') prod_env_motd_site_pp = create_site_pp(master_certname, prod_env_motd_manifest) #File puppet_file = <<-PUPPETFILE mod "puppetlabs/motd" PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) step 'Remove "/etc/motd" File' on(agents, "rm -rf #{motd_path}") end #Setup initial_env_names.each do |env| if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"helloworld\" Module to \"#{env}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, original_site_pp) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp and add module.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Push Changes to \"#{env}\" Environment" git_push(master, env, git_environments_path) end end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Initial Verification initial_env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') end end end #Add, Change, Remove Environments step 'Create "temp" Branch from "production"' git_on(master, 'checkout production', git_environments_path) git_on(master, 'checkout -b temp', git_environments_path) git_push(master, 'temp', git_environments_path) step 'Add "puppetlabs-motd" Module to the "production" Environment' git_on(master, 'checkout production', git_environments_path) inject_site_pp(master, site_pp_path, prod_env_motd_site_pp) create_remote_file(master, puppet_file_path, puppet_file) git_add_commit_push(master, 'production', 'Add motd module.', git_environments_path) step 'Update Custom Module in the "stage" Environment' hw_init_pp_path = File.join(git_environments_path, 'site', 'helloworld', 'manifests', 'init.pp') git_on(master, 'checkout stage', git_environments_path) create_remote_file(master, hw_init_pp_path, stage_env_custom_mod_manifest) git_add_commit_push(master, 'stage', 'Update custom module.', git_environments_path) step 'Delete the "test" Environment' git_on(master, 'branch -D test', git_environments_path) git_on(master, 'push origin --delete test', git_environments_path) step 'Re-deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") #Second Pass Verification agents.each do |agent| step 'Run Puppet Agent Against "production" Environment' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(prod_env_notify_message_regex, result.stdout, 'Expected message not found!') end step 'Run Puppet Agent Against "temp" Environment' on(agent, puppet('agent', '--test', '--environment temp'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(test_env_notify_message_regex, result.stdout, 'Expected message not found!') end step "Verify MOTD Contents" on(agent, "cat #{motd_path}") do |result| assert_match(motd_contents_regex, result.stdout, 'File content is invalid!') end step 'Run Puppet Agent Against "stage" Environment' on(agent, puppet('agent', '--test', '--environment stage'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(stage_env_notify_message_regex, result.stdout, 'Expected message not found!') end step 'Attempt to Run Puppet Agent Against "test" Environment' on(agent, puppet('agent', '--test', '--environment test'), :acceptable_exit_codes => 2) do |result| assert_match(test_env_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/complex_workflow/multi_env_remove_re-add.rb000077500000000000000000000101731460033767200313560ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-48 - C59263 - Multiple Environments with Adding, Removing and Re-adding Same Branch Name' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') r10k_fqp = get_r10k_fqp(master) initial_env_names = ['production', 'stage'] #Verification notify_message_regex = /I am in the production environment/ stage_env_message_regex = /Environment 'stage' not found on server/ #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup initial_env_names.each do |env| if env == 'production' step "Checkout \"#{env}\" Branch" git_on(master, "checkout #{env}", git_environments_path) step "Copy \"helloworld\" Module to \"#{env}\" Environment Git Repo" scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step "Inject New \"site.pp\" to the \"#{env}\" Environment" inject_site_pp(master, site_pp_path, site_pp) step "Push Changes to \"#{env}\" Environment" git_add_commit_push(master, env, 'Update site.pp and add module.', git_environments_path) else step "Create \"#{env}\" Branch from \"production\"" git_on(master, 'checkout production', git_environments_path) git_on(master, "checkout -b #{env}", git_environments_path) step "Push Changes to \"#{env}\" Environment" git_push(master, env, git_environments_path) end end #Tests step 'Deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Initial Verification initial_env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') end end end #Remove "stage" Environment step 'Delete the "stage" Environment' git_on(master, 'checkout production', git_environments_path) git_on(master, 'branch -D stage', git_environments_path) git_on(master, 'push origin --delete stage', git_environments_path) step 'Re-deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Second Pass Verification agents.each do |agent| step 'Run Puppet Agent Against "production" Environment' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_regex, result.stdout, 'Expected message not found!') end step 'Attempt to Run Puppet Agent Against "stage" Environment' on(agent, puppet('agent', '--test', '--environment stage'), :acceptable_exit_codes => 2) do |result| assert_match(stage_env_message_regex, result.stdout, 'Expected message not found!') end end #Create the "stage" Environment Again step 'Create "stage" Branch from "production"' git_on(master, 'checkout production', git_environments_path) git_on(master, 'checkout -b stage', git_environments_path) step 'Push Changes to "stage" Environment' git_push(master, 'stage', git_environments_path) step 'Re-deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") #Final Verification initial_env_names.each do |env| agents.each do |agent| step "Run Puppet Agent Against \"#{env}\" Environment" on(agent, puppet('agent', '--test', "--environment #{env}"), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(/I am in the #{env} environment/, result.stdout, 'Expected message not found!') end end end r10k-4.0.2/integration/tests/user_scenario/complex_workflow/multi_env_unamanaged.rb000077500000000000000000000057231460033767200307520ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-155 - C62421 - Multiple Environments with Existing Unmanaged Environments' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Verification notify_message_prod_env_regex = /I am in the production environment/ notify_message_test_env_regex = /I am in the test environment/ removal_message_test_env_regex = /Removing unmanaged path.*test/ missing_message_regex = /Environment 'test' not found on server/ #Teardown teardown do clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Checkout "production" Branch' git_on(master, 'checkout production', git_environments_path) step 'Copy "helloworld" Module to "production" Environment Git Repo' scp_to(master, helloworld_module_path, File.join(git_environments_path, "site", 'helloworld')) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Push Changes' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_prod_env_regex, result.stdout, 'Expected message not found!') end end step 'Create Unmanaged "test" Environment' on(master, "cp -r #{environment_path}/production #{environment_path}/test") agents.each do |agent| step 'Run Puppet Agent Against "test" Environment' on(agent, puppet('agent', '--test', '--environment test'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_message_test_env_regex, result.stdout, 'Expected message not found!') end end step 'Re-deploy Environments via r10k' on(master, "#{r10k_fqp} deploy environment -v") do |result| assert_match(removal_message_test_env_regex, result.output, 'Unexpected error was detected!') end agents.each do |agent| step 'Run Puppet Agent Against "test" Environment' on(agent, puppet('agent', '--test', '--environment test'), :acceptable_exit_codes => 2) do |result| assert_match(missing_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/integration/tests/user_scenario/complex_workflow/single_env_git_module_update.rb000077500000000000000000000073131460033767200324700ustar00rootroot00000000000000require 'git_utils' require 'r10k_utils' require 'master_manipulator' test_name 'CODEMGMT-155 - C64588 - Single Environment with Git Module Using a Branch Reference where Updates Occur After Initial Deploy' #Init master_certname = on(master, puppet('config', 'print', 'certname')).stdout.rstrip environment_path = on(master, puppet('config', 'print', 'environmentpath')).stdout.rstrip r10k_fqp = get_r10k_fqp(master) git_repo_parent_path = '/git_repos' git_repo_module_name = 'helloworld_module' git_remote_module_path = File.join(git_repo_parent_path, "#{git_repo_module_name}.git") git_module_clone_path = '/root/helloworld' git_environments_path = '/root/environments' last_commit = git_last_commit(master, git_environments_path) local_files_root_path = ENV['FILES'] || 'files' helloworld_module_path = File.join(local_files_root_path, 'modules', 'helloworld') #Verification notify_original_message_regex = /I am in the production environment/ notify_updated_message = 'A totally different message' notify_updated_message_regex = /#{notify_updated_message}/ #File puppet_file = <<-PUPPETFILE mod 'test/helloworld', :git => '#{git_remote_module_path}', :ref => 'master' PUPPETFILE puppet_file_path = File.join(git_environments_path, 'Puppetfile') updated_helloworld_manifest = <<-MANIFEST class helloworld { notify { "Hello world!": message => "#{notify_updated_message}"} } MANIFEST #Manifest site_pp_path = File.join(git_environments_path, 'manifests', 'site.pp') site_pp = create_site_pp(master_certname, ' include helloworld') #Teardown teardown do step 'Remove Git Repo and Clone for Module' on(master, "rm -rf #{git_remote_module_path} #{git_module_clone_path}") clean_up_r10k(master, last_commit, git_environments_path) end #Setup step 'Create Bare Git Repo and Clone' on(master, "mkdir -p #{git_remote_module_path} #{git_module_clone_path}") git_init_bare_repo_and_clone(master, git_repo_parent_path, git_repo_module_name, git_module_clone_path) step 'Copy "helloworld" Module to Git Repo' scp_to(master, "#{helloworld_module_path}/manifests", git_module_clone_path) step 'Push Changes for Module Git Repo to Remote' git_add_commit_push(master, 'master', 'Add module.', git_module_clone_path) step 'Inject New "site.pp" to the "production" Environment' inject_site_pp(master, site_pp_path, site_pp) step 'Create "Puppetfile" for the "production" Environment' create_remote_file(master, puppet_file_path, puppet_file) step 'Push Changes to Environments Git Repo Remote' git_add_commit_push(master, 'production', 'Update site.pp and add module.', git_environments_path) #Tests step 'Deploy "production" Environment via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_original_message_regex, result.stdout, 'Expected message not found!') end end step 'Update "helloworld" Module and Push Changes' create_remote_file(master, "#{git_module_clone_path}/manifests/init.pp", updated_helloworld_manifest) on(master, "chmod -R 644 #{git_module_clone_path}") git_add_commit_push(master, 'master', 'Update module.', git_module_clone_path) step 'Deploy "production" Environment Again via r10k' on(master, "#{r10k_fqp} deploy environment -v -p") agents.each do |agent| step 'Run Puppet Agent' on(agent, puppet('agent', '--test', '--environment production'), :acceptable_exit_codes => 2) do |result| assert_no_match(/Error:/, result.stderr, 'Unexpected error was detected!') assert_match(notify_updated_message_regex, result.stdout, 'Expected message not found!') end end r10k-4.0.2/lib/000077500000000000000000000000001460033767200130505ustar00rootroot00000000000000r10k-4.0.2/lib/r10k.rb000066400000000000000000000004111460033767200141460ustar00rootroot00000000000000require 'gettext-setup' module R10K GettextSetup.initialize(File.absolute_path('../locales', File.dirname(__FILE__))) # Attempt to set the R10k error and log message locale FastGettext.locale = ENV["LANG"] end require 'r10k/version' require 'r10k/logging' r10k-4.0.2/lib/r10k/000077500000000000000000000000001460033767200136255ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/action/000077500000000000000000000000001460033767200151025ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/action/base.rb000066400000000000000000000021321460033767200163370ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/util/setopts' module R10K module Action class Base include R10K::Logging include R10K::Util::Setopts attr_accessor :settings # @param opts [Hash] A hash of options defined in #allowed_initialized_opts # and managed by the SetOps mixin within the Action::Base class. # Corresponds to the CLI flags and options. # @param argv [Enumerable] Typically CRI::ArgumentList or Array. A list-like # collection of the remaining arguments to the CLI invocation (after # removing flags and options). # @param settings [Hash] A hash of configuration loaded from the relevant # config (r10k.yaml). # # @note All arguments will be required in the next major version def initialize(opts, argv, settings = {}) @opts = opts @argv = argv @settings = settings setopts(opts, allowed_initialize_opts) end private def allowed_initialize_opts { :config => true, :trace => true, } end end end end r10k-4.0.2/lib/r10k/action/cri_runner.rb000066400000000000000000000040771460033767200176050ustar00rootroot00000000000000require 'r10k/action/runner' module R10K module Action # Adapt the Cri runner interface to the R10K::Action::Runner interface # # This class provides the necessary glue to translate behavior specific # to Cri and the CLI component in general to the interface agnostic runner # class. # # @api private class CriRunner def self.wrap(klass) new(klass) end def initialize(klass) @klass = klass end # Intercept any instatiations of klass # # Defining #new allows this object to proxy method calls on the wrapped # runner and decorate various methods. Doing so allows this class to # manage CLI specific behaviors and isolate the underlying code from # having to deal with those particularities # # @param opts [Hash] # @param argv [Array] # @param _cmd [Cri::Command] The command that was invoked. This value # is not used and is only present to adapt the Cri interface to r10k. # @return [self] def new(opts, argv, _cmd = nil) handle_opts(opts) handle_argv(argv) @runner = R10K::Action::Runner.new(@opts, @argv, @klass) self end # @return [Hash] The adapted options for the runner def handle_opts(opts) if opts[:verbose] # Translate from the Cri verbose logging option to the internal logging setting. opts[:loglevel] = opts.delete(:verbose) end # Colored logging is only appropriate for CLI interactions, so we # handle this while we're still in CLI specific code. use_color = opts.delete(:color) if use_color R10K::Logging.use_color = use_color end @opts = opts end # @return [Array] The adapted arguments for the runner def handle_argv(argv) @argv = argv end # Invoke the wrapped behavior, determine if it succeeded, and exit with # the resulting exit code. def call rv = @runner.call exit(rv ? 0 : 1) end end end end r10k-4.0.2/lib/r10k/action/deploy.rb000066400000000000000000000003041460033767200167200ustar00rootroot00000000000000module R10K module Action module Deploy require 'r10k/action/deploy/environment' require 'r10k/action/deploy/module' require 'r10k/action/deploy/display' end end end r10k-4.0.2/lib/r10k/action/deploy/000077500000000000000000000000001460033767200163765ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/action/deploy/deploy_helpers.rb000066400000000000000000000023131460033767200217400ustar00rootroot00000000000000require 'r10k/logging' module R10K module Action module Deploy module DeployHelpers include R10K::Logging # Ensure that a config file has been found (and presumably loaded) and exit # with a helpful error if it hasn't. # # @raise [SystemExit] If no config file was loaded def expect_config! if @config.nil? logger.fatal(_("No configuration file given, no config file found in current directory, and no global config present")) exit(8) end end # Check to see if the deploy write_lock setting has been set, and log the lock message # and exit if it has been set. # # @param config [Hash] The r10k config hash # # @raise [SystemExit] if the deploy write_lock setting has been set def check_write_lock!(config) write_lock = config.fetch(:deploy, {})[:write_lock] if write_lock logger.fatal(_("Making changes to deployed environments has been administratively disabled.")) logger.fatal(_("Reason: %{write_lock}") % {write_lock: write_lock}) exit(16) end end end end end end r10k-4.0.2/lib/r10k/action/deploy/display.rb000066400000000000000000000075341460033767200204010ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/action/deploy/deploy_helpers' require 'r10k/deployment' module R10K module Action module Deploy class Display < R10K::Action::Base include R10K::Action::Deploy::DeployHelpers # @param opts [Hash] A hash of options defined in #allowed_initialized_opts # and managed by the SetOps mixin within the Action::Base class. # Corresponds to the CLI flags and options. # @param argv [Enumerable] Typically CRI::ArgumentList or Array. A list-like # collection of the remaining arguments to the CLI invocation (after # removing flags and options). # @param settings [Hash] A hash of configuration loaded from the relevant # config (r10k.yaml). # # @note All arguments will be required in the next major version def initialize(opts, argv, settings = {}) super @settings = @settings.merge({ overrides: { environments: { preload_environments: @fetch, requested_environments: @argv.map { |arg| arg.gsub(/\W/, '_') } }, modules: {}, output: { format: @format, trace: @trace, detail: @detail }, purging: {} } }) end def call expect_config! deployment = R10K::Deployment.new(@settings) if @settings.dig(:overrides, :environments, :preload_environments) deployment.preload! deployment.validate! end output = { :sources => deployment.sources.map { |source| source_info(source, @settings.dig(:overrides, :environments, :requested_environments)) } } case @settings.dig(:overrides, :output, :format) when 'json' then json_format(output) else yaml_format(output) end # exit 0 true rescue => e logger.error R10K::Errors::Formatting.format_exception(e, @settings.dig(:overrides, :output, :trace)) false end private def json_format(output) require 'json' puts JSON.pretty_generate(output) end def yaml_format(output) require 'yaml' puts output.to_yaml end def source_info(source, requested_environments = []) source_info = { :name => source.name, :basedir => source.basedir, } source_info[:prefix] = source.prefix if source.prefix source_info[:remote] = source.remote if source.respond_to?(:remote) select_all_envs = requested_environments.empty? env_list = source.environments.select { |env| select_all_envs || requested_environments.include?(env.name) } source_info[:environments] = env_list.map { |env| environment_info(env) } source_info end def environment_info(env) modules = @settings.dig(:overrides, :environments, :deploy_modules) if !modules && !@settings.dig(:overrides, :output, :detail) env.dirname else env_info = env.info.merge({ :status => (env.status rescue nil), }) env_info[:modules] = env.modules.map { |mod| module_info(mod) } if modules env_info end end def module_info(mod) if @settings.dig(:overrides, :output, :detail) { :name => mod.title, :properties => mod.properties } else mod.title end end def allowed_initialize_opts super.merge({ puppetfile: :modules, modules: :self, detail: :self, format: :self, fetch: :self }) end end end end end r10k-4.0.2/lib/r10k/action/deploy/environment.rb000066400000000000000000000222011460033767200212640ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/action/deploy/deploy_helpers' require 'r10k/action/visitor' require 'r10k/deployment' require 'r10k/util/setopts' require 'json' module R10K module Action module Deploy class Environment < R10K::Action::Base include R10K::Action::Deploy::DeployHelpers include R10K::Action::Visitor # Deprecated attr_reader :force attr_reader :settings # @param opts [Hash] A hash of options defined in #allowed_initialized_opts # and managed by the SetOps mixin within the Action::Base class. # Corresponds to the CLI flags and options. # @param argv [Enumerable] Typically CRI::ArgumentList or Array. A list-like # collection of the remaining arguments to the CLI invocation (after # removing flags and options). # @param settings [Hash] A hash of configuration loaded from the relevant # config (r10k.yaml). # # @note All arguments will be required in the next major version def initialize(opts, argv, settings = {}) super # instance variables below are set by the super class based on the # spec of #allowed_initialize_opts and any command line flags. This # gives a preference order of cli flags > config files > defaults. @settings = @settings.merge({ overrides: { environments: { requested_environments: @argv.map { |arg| arg.gsub(/\W/,'_') }, default_branch_override: @default_branch_override, generate_types: @generate_types || settings.dig(:deploy, :generate_types) || false, preload_environments: true, incremental: @incremental }, modules: { default_ref: settings.dig(:git, :default_ref), exclude_spec: settings.dig(:deploy, :exclude_spec), requested_modules: [], deploy_modules: @modules, pool_size: @settings[:pool_size] || 4, force: !@no_force, # force here is used to make it easier to reason about }, purging: { purge_levels: settings.dig(:deploy, :purge_levels) || [], purge_allowlist: settings.dig(:deploy, :purge_allowlist) || [] }, forge: { allow_puppetfile_override: settings.dig(:forge, :allow_puppetfile_override) || false }, output: {} } }) end def call @visit_ok = true begin expect_config! deployment = R10K::Deployment.new(@settings) check_write_lock!(@settings) deployment.accept(self) rescue => e @visit_ok = false logger.error R10K::Errors::Formatting.format_exception(e, @trace) end @visit_ok end private def visit_deployment(deployment) # Ensure that everything can be preloaded. If we cannot preload all # sources then we can't fully enumerate all environments which # could be dangerous. If this fails then an exception will be raised # and execution will be halted. if @settings.dig(:overrides, :environments, :preload_environments) deployment.preload! deployment.validate! end undeployable = undeployable_environment_names(deployment.environments, @settings.dig(:overrides, :environments, :requested_environments)) if !undeployable.empty? @visit_ok = false logger.error _("Environment(s) \'%{environments}\' cannot be found in any source and will not be deployed.") % {environments: undeployable.join(", ")} end yield if @settings.dig(:overrides, :purging, :purge_levels).include?(:deployment) logger.debug("Purging unmanaged environments for deployment...") deployment.sources.each do |source| source.reload! end deployment.purge! end ensure if (postcmd = @settings[:postrun]) if postcmd.grep('$modifiedenvs').any? envs = deployment.environments.map { |e| e.dirname } requested_envs = @settings.dig(:overrides, :environments, :requested_environments) envs.reject! { |e| !requested_envs.include?(e) } if requested_envs.any? postcmd = postcmd.map { |e| e.gsub('$modifiedenvs', envs.join(' ')) } end logger.debug _("Executing postrun command.") subproc = R10K::Util::Subprocess.new(postcmd) subproc.logger = logger subproc.execute end end def visit_source(source) yield end def visit_environment(environment) requested_envs = @settings.dig(:overrides, :environments, :requested_environments) if !(requested_envs.empty? || requested_envs.any? { |name| environment.dirname == name }) logger.debug1(_("Environment %{env_dir} does not match environment name filter, skipping") % {env_dir: environment.dirname}) return end started_at = Time.new @environment_ok = true status = environment.status logger.info _("Deploying environment %{env_path}") % {env_path: environment.path} environment.sync logger.info _("Environment %{env_dir} is now at %{env_signature}") % {env_dir: environment.dirname, env_signature: environment.signature} if status == :absent || @settings.dig(:overrides, :modules, :deploy_modules) if status == :absent logger.debug(_("Environment %{env_dir} is new, updating all modules") % {env_dir: environment.dirname}) end previous_ok = @visit_ok @visit_ok = true environment.deploy @environment_ok = @visit_ok @visit_ok &&= previous_ok end if @settings.dig(:overrides, :purging, :purge_levels).include?(:environment) if @visit_ok logger.debug("Purging unmanaged content for environment '#{environment.dirname}'...") environment.purge!(:recurse => true, :whitelist => environment.whitelist(@settings.dig(:overrides, :purging, :purge_allowlist))) else logger.debug("Not purging unmanaged content for environment '#{environment.dirname}' due to prior deploy failures.") end end if @settings.dig(:overrides, :environments, :generate_types) if @environment_ok logger.debug("Generating puppet types for environment '#{environment.dirname}'...") environment.generate_types! else logger.debug("Not generating puppet types for environment '#{environment.dirname}' due to puppetfile failures.") end end write_environment_info!(environment, started_at, @visit_ok) end def write_environment_info!(environment, started_at, success) module_deploys = begin environment.modules.map do |mod| props = mod.properties { name: mod.name, version: props[:expected], sha: props[:type] == :git ? props[:actual] : nil } end rescue logger.debug("Unable to get environment module deploy data for .r10k-deploy.json at #{environment.path}") [] end # make this file write as atomic as possible in pure ruby final = "#{environment.path}/.r10k-deploy.json" staging = "#{environment.path}/.r10k-deploy.json~" File.open(staging, 'w') do |f| deploy_info = environment.info.merge({ :started_at => started_at, :finished_at => Time.new, :deploy_success => success, :module_deploys => module_deploys, }) f.puts(JSON.pretty_generate(deploy_info)) end FileUtils.mv(staging, final) end def undeployable_environment_names(environments, expected_names) if expected_names.empty? [] else known_names = environments.map(&:dirname) expected_names - known_names end end def allowed_initialize_opts super.merge(puppetfile: :modules, modules: :self, cachedir: :self, incremental: :self, 'no-force': :self, 'exclude-spec': :self, 'generate-types': :self, 'puppet-path': :self, 'puppet-conf': :self, 'private-key': :self, 'oauth-token': :self, 'default-branch-override': :self, 'github-app-id': :self, 'github-app-key': :self, 'github-app-ttl': :self) end end end end end r10k-4.0.2/lib/r10k/action/deploy/module.rb000066400000000000000000000117041460033767200202130ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/action/deploy/deploy_helpers' require 'r10k/action/visitor' require 'r10k/deployment' module R10K module Action module Deploy class Module < R10K::Action::Base include R10K::Action::Deploy::DeployHelpers include R10K::Action::Visitor # Deprecated attr_reader :force attr_reader :settings # @param opts [Hash] A hash of options defined in #allowed_initialized_opts # and managed by the SetOps mixin within the Action::Base class. # Corresponds to the CLI flags and options. # @param argv [Enumerable] Typically CRI::ArgumentList or Array. A list-like # collection of the remaining arguments to the CLI invocation (after # removing flags and options). # @param settings [Hash] A hash of configuration loaded from the relevant # config (r10k.yaml). # # @note All arguments will be required in the next major version def initialize(opts, argv, settings = {}) super requested_env = @opts[:environment] ? [@opts[:environment].gsub(/\W/, '_')] : [] @modified_envs = [] @settings = @settings.merge({ overrides: { environments: { requested_environments: requested_env, generate_types: @generate_types }, modules: { default_ref: settings.dig(:git, :default_ref), exclude_spec: settings.dig(:deploy, :exclude_spec), pool_size: @settings[:pool_size] || 4, requested_modules: @argv.map.to_a, # force here is used to make it easier to reason about force: !@no_force }, forge: { allow_puppetfile_override: settings.dig(:forge, :allow_puppetfile_override) || false }, purging: {}, output: {} } }) end def call @visit_ok = true begin expect_config! deployment = R10K::Deployment.new(@settings) check_write_lock!(@settings) deployment.accept(self) rescue => e @visit_ok = false logger.error R10K::Errors::Formatting.format_exception(e, @trace) end @visit_ok end private def visit_deployment(deployment) yield ensure if (postcmd = @settings[:postrun]) if @modified_envs.any? envs_to_run = @modified_envs.join(' ') logger.debug _("Running postrun command for environments: %{envs_to_run}.") % { envs_to_run: envs_to_run } if postcmd.grep('$modifiedenvs').any? postcmd = postcmd.map { |e| e.gsub('$modifiedenvs', envs_to_run) } end subproc = R10K::Util::Subprocess.new(postcmd) subproc.logger = logger subproc.execute else logger.debug _("No environments were modified, not executing postrun command.") end end end def visit_source(source) yield end def visit_environment(environment) requested_envs = @settings.dig(:overrides, :environments, :requested_environments) if !requested_envs.empty? && !requested_envs.include?(environment.dirname) logger.debug1(_("Only updating modules in environment(s) %{opt_env} skipping environment %{env_path}") % {opt_env: requested_envs.inspect, env_path: environment.path}) else logger.debug1(_("Updating modules %{modules} in environment %{env_path}") % {modules: @settings.dig(:overrides, :modules, :requested_modules).inspect, env_path: environment.path}) updated_modules = environment.deploy # We actually synced a module in this env if !updated_modules.nil? && !updated_modules.empty? # Record modified environment for postrun command @modified_envs << environment.dirname if generate_types = @settings.dig(:overrides, :environments, :generate_types) logger.debug("Generating puppet types for environment '#{environment.dirname}'...") environment.generate_types! end end end end def allowed_initialize_opts super.merge(environment: true, cachedir: :self, 'exclude-spec': :self, 'no-force': :self, 'generate-types': :self, 'puppet-path': :self, 'puppet-conf': :self, 'private-key': :self, 'oauth-token': :self, 'github-app-id': :self, 'github-app-key': :self, 'github-app-ttl': :self) end end end end end r10k-4.0.2/lib/r10k/action/puppetfile.rb000066400000000000000000000003771460033767200176130ustar00rootroot00000000000000module R10K module Action module Puppetfile require 'r10k/action/puppetfile/cri_runner' require 'r10k/action/puppetfile/install' require 'r10k/action/puppetfile/check' require 'r10k/action/puppetfile/purge' end end end r10k-4.0.2/lib/r10k/action/puppetfile/000077500000000000000000000000001460033767200172575ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/action/puppetfile/check.rb000066400000000000000000000021601460033767200206600ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/errors/formatting' require 'r10k/module_loader/puppetfile' module R10K module Action module Puppetfile class Check < R10K::Action::Base def call options = { basedir: @root } options[:overrides] = {} options[:overrides][:modules] = { default_ref: @settings.dig(:git, :default_ref) } options[:moduledir] = @moduledir if @moduledir options[:puppetfile] = @puppetfile if @puppetfile loader = R10K::ModuleLoader::Puppetfile.new(**options) begin loader.load! loader.modules.each do |mod| if mod.instance_of?(R10K::Module::Git) mod.validate_ref_defined end end $stderr.puts _("Syntax OK") true rescue => e $stderr.puts R10K::Errors::Formatting.format_exception(e, @trace) false end end private def allowed_initialize_opts super.merge(root: :self, puppetfile: :self, moduledir: :self) end end end end end r10k-4.0.2/lib/r10k/action/puppetfile/cri_runner.rb000066400000000000000000000007051460033767200217540ustar00rootroot00000000000000require 'r10k/action/cri_runner' module R10K module Action module Puppetfile # Extend the default Cri Runner with Puppetfile specific opts # # @api private class CriRunner < R10K::Action::CriRunner include R10K::Logging def handle_opts(opts) opts[:root] ||= wd super(opts) end private def wd Dir.getwd end end end end end r10k-4.0.2/lib/r10k/action/puppetfile/install.rb000066400000000000000000000032531460033767200212550ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/content_synchronizer' require 'r10k/errors/formatting' require 'r10k/module_loader/puppetfile' require 'r10k/util/cleaner' module R10K module Action module Puppetfile class Install < R10K::Action::Base def call begin options = { basedir: @root, overrides: { force: @force || false } } options[:overrides][:modules] = { default_ref: @settings.dig(:git, :default_ref) } options[:moduledir] = @moduledir if @moduledir options[:puppetfile] = @puppetfile if @puppetfile options[:module_exclude_regex] = @module_exclude_regex if @module_exclude_regex loader = R10K::ModuleLoader::Puppetfile.new(**options) loaded_content = loader.load! pool_size = @settings[:pool_size] || 4 modules = loaded_content[:modules] if pool_size > 1 R10K::ContentSynchronizer.concurrent_sync(modules, pool_size, logger) else R10K::ContentSynchronizer.serial_sync(modules, logger) end R10K::Util::Cleaner.new(loaded_content[:managed_directories], loaded_content[:desired_contents], loaded_content[:purge_exclusions]).purge! true rescue => e logger.error R10K::Errors::Formatting.format_exception(e, @trace) false end end private def allowed_initialize_opts super.merge(root: :self, puppetfile: :self, moduledir: :self, :'module-exclude-regex' => :self, force: :self ) end end end end end r10k-4.0.2/lib/r10k/action/puppetfile/purge.rb000066400000000000000000000020001460033767200207160ustar00rootroot00000000000000require 'r10k/action/base' require 'r10k/errors/formatting' require 'r10k/module_loader/puppetfile' require 'r10k/util/cleaner' module R10K module Action module Puppetfile class Purge < R10K::Action::Base def call options = { basedir: @root } options[:moduledir] = @moduledir if @moduledir options[:puppetfile] = @puppetfile if @puppetfile loader = R10K::ModuleLoader::Puppetfile.new(**options) loaded_content = loader.load! R10K::Util::Cleaner.new(loaded_content[:managed_directories], loaded_content[:desired_contents], loaded_content[:purge_exclusions]).purge! true rescue => e logger.error R10K::Errors::Formatting.format_exception(e, @trace) false end private def allowed_initialize_opts super.merge(root: :self, puppetfile: :self, moduledir: :self) end end end end end r10k-4.0.2/lib/r10k/action/runner.rb000066400000000000000000000134701460033767200167450ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/errors' require 'r10k/util/license' require 'puppet_forge/connection' module R10K module Action class Runner include R10K::Logging def initialize(opts, argv, klass) @opts = opts @argv = argv @klass = klass @settings = {} end def instance if @_instance.nil? iopts = @opts.dup iopts.delete(:loglevel) @_instance = @klass.new(iopts, @argv, @settings) end @_instance end def call setup_logging setup_settings # @todo check arguments setup_authorization instance.call end def setup_logging if @opts.key?(:loglevel) R10K::Logging.level = @opts[:loglevel] end end def setup_settings config_settings = settings_from_config(@opts[:config]) overrides = {} overrides[:cachedir] = @opts[:cachedir] if @opts.key?(:cachedir) if @opts.key?(:'puppet-path') || @opts.key?(:'generate-types') || @opts.key?(:'exclude-spec') || @opts.key?(:'puppet-conf') overrides[:deploy] = {} overrides[:deploy][:puppet_path] = @opts[:'puppet-path'] if @opts.key?(:'puppet-path') overrides[:deploy][:puppet_conf] = @opts[:'puppet-conf'] if @opts.key?(:'puppet-conf') overrides[:deploy][:generate_types] = @opts[:'generate-types'] if @opts.key?(:'generate-types') overrides[:deploy][:exclude_spec] = @opts[:'exclude-spec'] if @opts.key?(:'exclude-spec') end # If the log level has been given as an argument, ensure that output happens on stderr if @opts.key?(:loglevel) overrides[:logging] = {} overrides[:logging][:level] = @opts[:loglevel] overrides[:logging][:disable_default_stderr] = false end with_overrides = config_settings.merge(overrides) do |key, oldval, newval| newval = oldval.merge(newval) if oldval.is_a? Hash logger.debug2 _("Overriding config file setting '%{key}': '%{old_val}' -> '%{new_val}'") % {key: key, old_val: oldval, new_val: newval} newval end # Credentials from the CLI override both the global and per-repo # credentials from the config, and so need to be handled specially with_overrides = add_credential_overrides(with_overrides) @settings = R10K::Settings.global_settings.evaluate(with_overrides) R10K::Initializers::GlobalInitializer.new(@settings).call rescue R10K::Settings::Collection::ValidationError => e logger.error e.format exit(8) end # Set up authorization from license file if it wasn't # already set via the config def setup_authorization if PuppetForge::Connection.authorization.nil? begin license = R10K::Util::License.load if license.respond_to?(:authorization_token) logger.debug "Using token from license to connect to the Forge." PuppetForge::Connection.authorization = license.authorization_token end rescue R10K::Error => e logger.warn e.message end end end private def settings_from_config(override_path) loader = R10K::Settings::Loader.new path = loader.search(override_path) results = {} if path @opts[:config] = path logger.debug2 _("Reading configuration from %{config_path}") % {config_path: path.inspect} results = loader.read(path) else logger.debug2 _("No config file explicitly given and no default config file could be found, default settings will be used.") end results end def add_credential_overrides(overrides) sshkey_path = @opts[:'private-key'] token_path = @opts[:'oauth-token'] app_id = @opts[:'github-app-id'] app_private_key_path = @opts[:'github-app-key'] app_ttl = @opts[:'github-app-ttl'] if sshkey_path && token_path raise R10K::Error, "Cannot specify both an SSH key and a token to use with this deploy." end if sshkey_path && (app_private_key_path || app_id) raise R10K::Error, "Cannot specify both an SSH key and an SSL key or Github App id to use with this deploy." end if token_path && (app_private_key_path || app_id) raise R10K::Error, "Cannot specify both an OAuth token and an SSL key or Github App id to use with this deploy." end if app_id && ! app_private_key_path || app_private_key_path && ! app_id raise R10K::Error, "Must specify both id and SSL private key to use Github App for this deploy." end if sshkey_path overrides[:git] ||= {} overrides[:git][:private_key] = sshkey_path if repo_settings = overrides[:git][:repositories] repo_settings.each do |repo| repo[:private_key] = sshkey_path end end elsif token_path overrides[:git] ||= {} overrides[:git][:oauth_token] = token_path if repo_settings = overrides[:git][:repositories] repo_settings.each do |repo| repo[:oauth_token] = token_path end end elsif app_id overrides[:git] ||= {} overrides[:git][:github_app_id] = app_id overrides[:git][:github_app_key] = app_private_key_path overrides[:git][:github_app_ttl] = app_ttl if repo_settings = overrides[:git][:repositories] repo_settings.each do |repo| repo[:github_app_id] = app_id repo[:github_app_key] = app_private_key_path repo[:github_app_ttl] = app_ttl end end end overrides end end end end r10k-4.0.2/lib/r10k/action/visitor.rb000066400000000000000000000021111460033767200171210ustar00rootroot00000000000000require 'r10k/errors/formatting' require 'r10k/logging' module R10K module Action # Implement the Visitor pattern via pseudo double dispatch. # # Visitor classes must implement #visit_type methods for each type that may # be visited. If the visitor should descend into child objects the #visit_ # method should yield to the passed block. # # Visitor classes must implement #logger so that error messages can be logged. # # @api private module Visitor include R10K::Logging # Dispatch to the type specific visitor method # # @param type [Symbol] The object type to dispatch for # @param other [Object] The actual object to pass to the visitor method # @param block [Proc] The block that the called visitor method may yield # to in case recursion is desired. # @return [void] def visit(type, other, &block) send("visit_#{type}", other, &block) rescue => e logger.error R10K::Errors::Formatting.format_exception(e, @trace) @visit_ok = false end end end end r10k-4.0.2/lib/r10k/cli.rb000066400000000000000000000031431460033767200147220ustar00rootroot00000000000000require 'r10k' require 'r10k/version' require 'r10k/cli/ext/logging' require 'cri' module R10K::CLI def self.command @cmd ||= Cri::Command.define do name 'r10k' usage 'r10k [options]' summary 'Killer robot powered Puppet environment deployment' description <<-EOD r10k is a suite of commands to help deploy and manage puppet code for complex environments. EOD flag :h, :help, 'Show help for this command' do |value, cmd| # This is evil because we may not necessarily be called from the # command line and have a meaningful ARGV to scan. However the best # way of having a globally useful --help command is to define the # behavior in the block of the option to immediately handle it and exit # and we don't have access to the verbose option, so the simple method # is to simply scan ARGV. verbose = (ARGV.include?('-v') || ARGV.include?('--verbose')) puts cmd.help(:verbose => verbose) exit 0 end flag :t, :trace, 'Display stack traces on application crash' loglevels = R10K::Logging::LOG_LEVELS.reverse.map(&:downcase).join(", ") optional :v, :verbose, "Set log verbosity. Valid values: #{loglevels}" flag nil, :color, 'Enable colored log messages' required :c, :config, 'Specify a global configuration file' run do |opts, args, cmd| puts cmd.help(:verbose => opts[:verbose]) exit 0 end end end end require 'r10k/cli/deploy' require 'r10k/cli/puppetfile' require 'r10k/cli/version' require 'r10k/cli/help' r10k-4.0.2/lib/r10k/cli/000077500000000000000000000000001460033767200143745ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/cli/deploy.rb000066400000000000000000000134151460033767200162210ustar00rootroot00000000000000require 'r10k/cli' require 'r10k/deployment' require 'r10k/deployment/config' require 'r10k/action/cri_runner' require 'r10k/action/deploy' require 'cri' module R10K::CLI module Deploy class TransformExcludeSpec def call(input) # To be backward compatible with the 3.x flag version of this setting, # r10k allows this flag to have an optional argument. When no argument # is supplied, cri defaults to setting the class to true, so we check # for TrueClass here as well as "true". return true if input == true || input == 'true' return false if input == 'false' raise ArgumentError end end def self.command @cmd ||= Cri::Command.define do name 'deploy' usage 'deploy ' summary 'Puppet dynamic environment deployment' description <<-DESCRIPTION `r10k deploy` implements the Git branch to Puppet environment workflow (https://puppet.com/docs/puppet/latest/environments_about.html). DESCRIPTION option nil, :cachedir, 'Specify a cachedir, overriding the value in config', argument: :required flag nil, :'no-force', 'Prevent the overwriting of local module modifications' flag nil, :'generate-types', 'Run `puppet generate types` after updating an environment' option nil, :'exclude-spec', 'Exclude the module\'s spec dir for deployment', argument: :optional, transform: TransformExcludeSpec.new option nil, :'puppet-path', 'Path to puppet executable', argument: :required do |value, cmd| unless File.executable? value $stderr.puts "The specified puppet executable #{value} is not executable." puts cmd.help exit 1 end end option nil, :'puppet-conf', 'Path to puppet.conf', argument: :required option nil, :'private-key', 'Path to SSH key to use when cloning. Only valid with rugged provider', argument: :required option nil, :'oauth-token', 'Path to OAuth token to use when cloning. Only valid with rugged provider', argument: :required option nil, :'github-app-id', 'Github App id. Only valid with rugged provider', argument: :required option nil, :'github-app-key', 'Github App private key. Only valid with rugged provider', argument: :required option nil, :'github-app-ttl', 'Github App token expiration, in seconds. Only valid with rugged provider', default: "120", argument: :optional run do |opts, args, cmd| puts cmd.help(:verbose => opts[:verbose]) exit 0 end end end module Environment def self.command @cmd ||= Cri::Command.define do name 'environment' usage 'environment <...>' summary 'Deploy environments and their dependent modules' description <<-DESCRIPTION `r10k deploy environment` creates and updates Puppet environments based on Git branches. Environments can provide a Puppetfile at the root of the directory to deploy independent Puppet modules. To recursively deploy an environment, pass the `--modules` flag to the command. **NOTE**: If an environment has a Puppetfile when it is instantiated a recursive update will be forced. It is assumed that environments are dependent on modules specified in the Puppetfile and an update will be automatically scheduled. On subsequent deployments, Puppetfile deployment will default to off. DESCRIPTION flag :p, :puppetfile, 'Deploy modules (deprecated, use -m)' flag :m, :modules, 'Deploy modules' flag nil, :incremental, 'Used with the --modules flag, only update those modules whose definition has changed or whose definition allows the version to float' option nil, :'default-branch-override', 'Specify a branchname to override the default branch in the puppetfile', argument: :required runner R10K::Action::CriRunner.wrap(R10K::Action::Deploy::Environment) end end end module Module def self.command @cmd ||= Cri::Command.define do name 'module' usage 'module [module] ' summary 'Deploy modules in all environments' description <<-DESCRIPTION `r10k deploy module` Deploys and updates modules inside of Puppet environments. It will load the Puppetfile configurations out of all environments, and will try to deploy the given module names in all environments. DESCRIPTION option :e, :environment, 'Update the modules in the given environment', argument: :required runner R10K::Action::CriRunner.wrap(R10K::Action::Deploy::Module) end end end module Display def self.command @cmd ||= Cri::Command.define do name 'display' aliases 'list' usage 'display' summary 'Display environments and modules in the deployment' flag :p, :puppetfile, 'Display modules (deprecated, use -m)' flag :m, :modules, 'Display modules' flag nil, :detail, 'Display detailed information' flag nil, :fetch, 'Update available environment lists from all remote sources' option nil, :format, 'Display output in a specific format. Valid values: json, yaml. Default: yaml', argument: :required runner R10K::Action::CriRunner.wrap(R10K::Action::Deploy::Display) end end end end end R10K::CLI.command.add_command(R10K::CLI::Deploy.command) R10K::CLI::Deploy.command.add_command(R10K::CLI::Deploy::Environment.command) R10K::CLI::Deploy.command.add_command(R10K::CLI::Deploy::Module.command) R10K::CLI::Deploy.command.add_command(R10K::CLI::Deploy::Display.command) r10k-4.0.2/lib/r10k/cli/ext/000077500000000000000000000000001460033767200151745ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/cli/ext/logging.rb000066400000000000000000000003651460033767200171530ustar00rootroot00000000000000require 'cri' module Cri class CommandDSL include R10K::Logging def logger unless @logger @logger = Log4r::Logger.new(@command.name) @logger.add R10K::Logging.outputter end @logger end end end r10k-4.0.2/lib/r10k/cli/help.rb000066400000000000000000000002051460033767200156460ustar00rootroot00000000000000require 'r10k/cli' require 'cri' module R10K::CLI help_cmd = Cri::Command.new_basic_help self.command.add_command(help_cmd) end r10k-4.0.2/lib/r10k/cli/puppetfile.rb000066400000000000000000000047441460033767200171070ustar00rootroot00000000000000require 'r10k/cli' require 'r10k/action/puppetfile' require 'cri' module R10K::CLI module Puppetfile def self.command @cmd ||= Cri::Command.define do name 'puppetfile' usage 'puppetfile ' summary 'Perform operations on a Puppetfile' description <<-DESCRIPTION `r10k puppetfile` provides an implementation of the librarian-puppet style Puppetfile (http://bombasticmonkey.com/librarian-puppet/). DESCRIPTION run do |opts, args, cmd| puts cmd.help(:verbose => opts[:verbose]) exit 0 end end end module Install def self.command @cmd ||= Cri::Command.define do name 'install' usage 'install' summary 'Install all modules from a Puppetfile' option nil, :moduledir, 'Path to install modules to', argument: :required option nil, :puppetfile, 'Path to puppetfile', argument: :required option nil, :'module-exclude-regex', 'A regex to exclude modules from installation. Helpful in CI environments.', argument: :required flag nil, :force, 'Force locally changed files to be overwritten' runner R10K::Action::Puppetfile::CriRunner.wrap(R10K::Action::Puppetfile::Install) end end end module Check def self.command @cmd ||= Cri::Command.define do name 'check' usage 'check' summary 'Try and load the Puppetfile to verify the syntax is correct.' option nil, :puppetfile, 'Path to Puppetfile', argument: :required runner R10K::Action::Puppetfile::CriRunner.wrap(R10K::Action::Puppetfile::Check) end end end module Purge def self.command @cmd ||= Cri::Command.define do name 'purge' usage 'purge' summary 'Purge unmanaged modules from a Puppetfile managed directory' option nil, :moduledir, 'Path to install modules to', argument: :required option nil, :puppetfile, 'Path to Puppetfile', argument: :required runner R10K::Action::Puppetfile::CriRunner.wrap(R10K::Action::Puppetfile::Purge) end end end end end R10K::CLI.command.add_command(R10K::CLI::Puppetfile.command) R10K::CLI::Puppetfile.command.add_command(R10K::CLI::Puppetfile::Install.command) R10K::CLI::Puppetfile.command.add_command(R10K::CLI::Puppetfile::Check.command) R10K::CLI::Puppetfile.command.add_command(R10K::CLI::Puppetfile::Purge.command) r10k-4.0.2/lib/r10k/cli/version.rb000066400000000000000000000014141460033767200164060ustar00rootroot00000000000000require 'r10k/cli' require 'r10k/version' require 'cri' module R10K::CLI module Version def self.command @cmd ||= Cri::Command.define do name 'version' usage 'version' summary 'Print the version of r10k' run do |opts, args, cmd| puts "r10k #{R10K::VERSION}" if opts[:verbose] puts RUBY_DESCRIPTION cmdpath = caller.last.slice(/\A.*#{$PROGRAM_NAME}/) puts "Command path: #{cmdpath}" puts "Interpreter path: #{Gem.ruby}" if RUBY_VERSION >= '1.9' puts "Default encoding: #{Encoding.default_external.name}" end end exit 0 end end end end self.command.add_command(Version.command) end r10k-4.0.2/lib/r10k/content_synchronizer.rb000066400000000000000000000064311460033767200204450ustar00rootroot00000000000000module R10K module ContentSynchronizer def self.serial_accept(modules, visitor, loader) visitor.visit(:puppetfile, loader) do serial_sync(modules) end end def self.serial_sync(modules) updated_modules = [] modules.each do |mod| updated = mod.sync updated_modules << mod.name if updated end updated_modules end # Returns a Queue of the names of modules actually updated def self.concurrent_accept(modules, visitor, loader, pool_size, logger) mods_queue = modules_visit_queue(modules, visitor, loader) sync_queue(mods_queue, pool_size, logger) end # Returns a Queue of the names of modules actually updated def self.concurrent_sync(modules, pool_size, logger) mods_queue = modules_sync_queue(modules) sync_queue(mods_queue, pool_size, logger) end # Returns a Queue of the names of modules actually updated def self.sync_queue(mods_queue, pool_size, logger) logger.debug _("Updating modules with %{pool_size} threads") % {pool_size: pool_size} updated_modules = Queue.new thread_pool = pool_size.times.map { sync_thread(mods_queue, logger, updated_modules) } thread_exception = nil # If any threads raise an exception the deployment is considered a failure. # In that event clear the queue, wait for other threads to finish their # current work, then re-raise the first exception caught. begin thread_pool.each(&:join) # Return the list of all modules that were actually updated updated_modules rescue => e logger.error _("Error during concurrent deploy of a module: %{message}") % {message: e.message} mods_queue.clear thread_exception ||= e retry ensure raise thread_exception unless thread_exception.nil? end end def self.modules_visit_queue(modules, visitor, loader) Queue.new.tap do |queue| visitor.visit(:puppetfile, loader) do enqueue_modules(queue, modules) end end end def self.modules_sync_queue(modules) Queue.new.tap do |queue| enqueue_modules(queue, modules) end end def self.enqueue_modules(queue, modules) modules_by_cachedir = modules.group_by { |mod| mod.cachedir } modules_without_vcs_cachedir = modules_by_cachedir.delete(:none) || [] modules_without_vcs_cachedir.each {|mod| queue << Array(mod) } modules_by_cachedir.values.each {|mods| queue << mods } end def self.sync_thread(mods_queue, logger, updated_modules) Thread.new do begin while mods = mods_queue.pop(true) do mods.each do |mod| begin updated = mod.sync updated_modules << mod.name if updated rescue Exception => e logger.error _("Module %{mod_name} failed to synchronize due to %{message}") % {mod_name: mod.name, message: e.message} raise e end end end rescue ThreadError => e logger.debug _("Module thread %{id} exiting: %{message}") % {message: e.message, id: Thread.current.object_id} Thread.exit rescue => e Thread.main.raise(e) end end end end end r10k-4.0.2/lib/r10k/deployment.rb000066400000000000000000000071771460033767200163460ustar00rootroot00000000000000require 'r10k/source' require 'r10k/util/basedir' require 'r10k/errors' require 'set' module R10K # A deployment models the entire state of the configuration that a Puppet # master can use. It contains a set of sources that can produce environments # and manages the contents of directories where environments are deployed. # # @api private class Deployment require 'r10k/deployment/config' # Generate a deployment object based on a config # # @deprecated # # @param path [String] The path to the deployment config # @return [R10K::Deployment] The deployment loaded with the given config def self.load_config(path, overrides={}) config = R10K::Deployment::Config.new(path, overrides) new(config) end # @!attribute [r] config # @return [R10K::Deployment::Config] attr_reader :config def initialize(config) @config = config end def preload! sources.each(&:preload!) end # Lazily load all sources # # This instantiates the @_sources instance variable, but should not be # used directly as it could be legitimately unset if we're doing lazy # loading. # # @return [Array] All repository sources # specified in the config def sources load_sources if @_sources.nil? @_sources end # Lazily load all environments # # This instantiates the @_environments instance variable, but should not be # used directly as it could be legitimately unset if we're doing lazy # loading. # # @return [Array] All enviroments across # all sources def environments load_environments if @_environments.nil? @_environments end # @return [Array] The paths used by all contained sources def paths paths_and_sources.keys end # @return [Hash] def paths_and_sources pathmap = Hash.new { |h, k| h[k] = [] } sources.each { |source| pathmap[source.basedir] << source } pathmap end # Remove unmanaged content from all source paths def purge! paths_and_sources.each_pair do |path, sources_at_path| R10K::Util::Basedir.new(path, sources_at_path).purge! end end def validate! hash = {} sources.each do |source| source.environments.each do |environment| if hash.key?(environment.path) osource, oenvironment = hash[environment.path] msg = _("Environment collision at %{env_path} between %{source}:%{env_name} and %{osource}:%{oenv_name}") % {env_path: environment.path, source: source.name, env_name: environment.name, osource: osource.name, oenv_name: oenvironment.name} raise R10K::Error, msg else hash[environment.path] = [source, environment] end end end end def accept(visitor) visitor.visit(:deployment, self) do sources.each do |source| source.accept(visitor) end end end private def load_sources sources = @config[:sources] if sources.nil? || sources.empty? raise R10K::Error, _("Unable to load sources; the supplied configuration does not define the 'sources' key") end @_sources = sources.map do |(name, hash)| R10K::Source.from_hash(name, hash.merge({overrides: @config[:overrides]})) end end def load_environments @_environments = [] sources.each do |source| @_environments += source.environments end end end end r10k-4.0.2/lib/r10k/deployment/000077500000000000000000000000001460033767200160055ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/deployment/config.rb000066400000000000000000000023031460033767200175750ustar00rootroot00000000000000require 'r10k/deployment' require 'r10k/settings/loader' require 'r10k/util/symbolize_keys' require 'r10k/errors' require 'r10k/initializers' require 'yaml' module R10K class Deployment class Config include R10K::Logging attr_accessor :configfile def initialize(configfile, overrides={}) @configfile = configfile @overrides = overrides load_config end # Perform a scan for key and check for both string and symbol keys def setting(key) @config[key] end alias [] setting def settings @config end # Load and store a config file, and set relevant options # # @param [String] configfile The path to the YAML config file def load_config loader = R10K::Settings::Loader.new hash = loader.read(@configfile) with_overrides = hash.merge(@overrides) do |key, oldval, newval| logger.debug2 _("Overriding config file setting '%{key}': '%{old_val}' -> '%{new_val}'") % {key: key, old_val: oldval, new_val: newval} newval end @config = R10K::Settings.global_settings.evaluate(with_overrides) initializer = R10K::Initializers::GlobalInitializer.new(@config) initializer.call end class ConfigError < R10K::Error end end end end r10k-4.0.2/lib/r10k/environment.rb000066400000000000000000000016551460033767200165250ustar00rootroot00000000000000module R10K module Environment def self.factory @factory ||= R10K::KeyedFactory.new end def self.register(key, klass) factory.register(key, klass) end def self.retrieve(key) factory.retrieve(key) end def self.generate(type, name, basedir, dirname, options = {}) factory.generate(type, name, basedir, dirname, options) end def self.from_hash(name, hash) R10K::Util::SymbolizeKeys.symbolize_keys!(hash) basedir = hash.delete(:basedir) dirname = hash.delete(:dirname) || name type = hash.delete(:type) type = type.is_a?(String) ? type.to_sym : type generate(type, name, basedir, dirname, hash) end require 'r10k/environment/base' require 'r10k/environment/with_modules' require 'r10k/environment/plain' require 'r10k/environment/git' require 'r10k/environment/svn' require 'r10k/environment/tarball' end end r10k-4.0.2/lib/r10k/environment/000077500000000000000000000000001460033767200161715ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/environment/base.rb000066400000000000000000000156071460033767200174410ustar00rootroot00000000000000require 'r10k/content_synchronizer' require 'r10k/logging' require 'r10k/module_loader/puppetfile' require 'r10k/util/cleaner' require 'r10k/util/subprocess' # This class defines a common interface for environment implementations. # # @since 1.3.0 class R10K::Environment::Base include R10K::Logging # @!attribute [r] name # @return [String] A name for this environment that is unique to the given source attr_reader :name # @!attribute [r] basedir # @return [String] The path that this environment will be created in attr_reader :basedir # @!attribute [r] dirname # @return [String] The directory name for the given environment attr_reader :dirname # @!attribute [r] path # @return [Pathname] The full path to the given environment attr_reader :path # @!attribute [r] puppetfile # @api public # @return [R10K::Puppetfile] The puppetfile instance associated with this environment attr_reader :puppetfile # @!attribute [r] puppetfile_name # @api public # @return [String] The puppetfile name (relative) attr_reader :puppetfile_name attr_reader :managed_directories, :desired_contents attr_reader :loader # Initialize the given environment. # # @param name [String] The unique name describing this environment. # @param basedir [String] The base directory where this environment will be created. # @param dirname [String] The directory name for this environment. # @param options [Hash] An additional set of options for this environment. # The semantics of this environment may depend on the environment implementation. def initialize(name, basedir, dirname, options = {}) @name = name @basedir = basedir @dirname = dirname @options = options @puppetfile_name = options.delete(:puppetfile_name) @overrides = options.delete(:overrides) || {} @full_path = File.join(@basedir, @dirname) @path = Pathname.new(File.join(@basedir, @dirname)) @puppetfile = R10K::Puppetfile.new(@full_path, {overrides: @overrides, force: @overrides.dig(:modules, :force), puppetfile_name: @puppetfile_name}) @puppetfile.environment = self loader_options = { basedir: @full_path, overrides: @overrides, environment: self } loader_options[:puppetfile] = @puppetfile_name if @puppetfile_name @loader = R10K::ModuleLoader::Puppetfile.new(**loader_options) if @overrides.dig(:environments, :incremental) @loader.load_metadata end @base_modules = nil @purge_exclusions = nil @managed_directories = [ @full_path ] @desired_contents = [] end # Synchronize the given environment. # # @api public # @abstract # @return [void] def sync raise NotImplementedError, _("%{class} has not implemented method %{method}") % {class: self.class, method: __method__} end # Determine the current status of the environment. # # This can return the following values: # # * :absent - there is no module installed # * :mismatched - there is a module installed but it must be removed and reinstalled # * :outdated - the correct module is installed but it needs to be updated # * :insync - the correct module is installed and up to date, or the module is actually a boy band. # # @api public # @abstract # @return [Symbol] def status raise NotImplementedError, _("%{class} has not implemented method %{method}") % {class: self.class, method: __method__} end # Returns a unique identifier for the environment's current state. # # @api public # @abstract # @return [String] def signature raise NotImplementedError, _("%{class} has not implemented method %{method}") %{class: self.class, method: __method__} end # Returns a hash describing the current state of the environment. # # @return [Hash] def info { :name => self.name, :signature => self.signature, } end # @return [Array] All modules defined in the Puppetfile # associated with this environment. def modules if @base_modules.nil? load_puppetfile_modules end @base_modules end # @return [Array] Whether or not the given module # conflicts with any modules already defined in the r10k environment # object. def module_conflicts?(mod) false end def accept(visitor) visitor.visit(:environment, self) do puppetfile.accept(visitor) end end # Returns a Queue of the names of modules actually updated def deploy if @base_modules.nil? load_puppetfile_modules end if ! @base_modules.empty? pool_size = @overrides.dig(:modules, :pool_size) updated_modules = R10K::ContentSynchronizer.concurrent_sync(@base_modules, pool_size, logger) end if (@overrides.dig(:purging, :purge_levels) || []).include?(:puppetfile) logger.debug("Purging unmanaged Puppetfile content for environment '#{dirname}'...") @puppetfile_cleaner.purge! end updated_modules end def load_puppetfile_modules loaded_content = @loader.load @base_modules = loaded_content[:modules] @purge_exclusions = determine_purge_exclusions(loaded_content[:managed_directories], loaded_content[:desired_contents]) @puppetfile_cleaner = R10K::Util::Cleaner.new(loaded_content[:managed_directories], loaded_content[:desired_contents], loaded_content[:purge_exclusions]) end def whitelist(user_whitelist=[]) user_whitelist.collect { |pattern| File.join(@full_path, pattern) } end def determine_purge_exclusions(pf_managed_dirs = @puppetfile.managed_directories, pf_desired_contents = @puppetfile.desired_contents) list = [File.join(@full_path, '.r10k-deploy.json')].to_set list += pf_managed_dirs list += pf_desired_contents.flat_map do |item| desired_tree = [] if File.directory?(item) desired_tree << File.join(item, '**', '*') end Pathname.new(item).ascend do |path| break if path.to_s == @full_path desired_tree << path.to_s end desired_tree end list.to_a end def purge_exclusions if @purge_exclusions.nil? load_puppetfile_modules end @purge_exclusions end def generate_types! argv = [R10K::Settings.puppet_path, 'generate', 'types', '--environment', dirname, '--environmentpath', basedir, '--config', R10K::Settings.puppet_conf] subproc = R10K::Util::Subprocess.new(argv) subproc.raise_on_fail = true subproc.logger = logger result = subproc.execute unless result.stderr.empty? logger.warn "There were problems generating types for environment #{dirname}:" result.stderr.split(%r{\n}).map { |msg| logger.warn msg } end end end r10k-4.0.2/lib/r10k/environment/git.rb000066400000000000000000000054751460033767200173140ustar00rootroot00000000000000require 'r10k/puppetfile' require 'r10k/git/stateful_repository' require 'forwardable' # This class implements an environment based on a Git branch. # # @since 1.3.0 class R10K::Environment::Git < R10K::Environment::WithModules R10K::Environment.register(:git, self) # Register git as the default environment type R10K::Environment.register(nil, self) # @!attribute [r] remote # @return [String] The URL to the remote git repository attr_reader :remote # @!attribute [r] ref # @return [String] The git reference to use for this environment attr_reader :ref # @!attribute [r] repo # @api private # @return [R10K::Git::StatefulRepository] The git repo backing this environment attr_reader :repo include R10K::Util::Setopts # Initialize the given Git environment. # # @param name [String] The unique name describing this environment. # @param basedir [String] The base directory where this environment will be created. # @param dirname [String] The directory name for this environment. # @param options [Hash] An additional set of options for this environment. # # @param options [String] :remote The URL to the remote git repository # @param options [String] :ref The git reference to use for this environment def initialize(name, basedir, dirname, options = {}) super setopts(options, { # Standard option interface :version => :ref, :source => :remote, :type => ::R10K::Util::Setopts::Ignore, # Type-specific options :ref => :self, :remote => :self, }, raise_on_unhandled: false) # TODO: in r10k 4.0.0, a major version bump, stop allowing garbage options. # We only allow them now, here, on this object, because prior to adopting # setopts in the constructor, this object type didn't do any validation # checking of options passed, and would permit garbage parameters. @repo = R10K::Git::StatefulRepository.new(@remote, @basedir, @dirname) end # Clone or update the given Git environment. # # If the environment is being created for the first time, it will # automatically update all modules to ensure that the environment is complete. # # @api public # @return [void] def sync @repo.sync(@ref) end def status @repo.status(@ref) end # Return a sting which uniquely identifies (per source) the current state of the # environment. # # @api public # @return [String] def signature @repo.head end include R10K::Util::Purgeable # Returns an array of the full paths to all the content being managed. # @note This implements a required method for the Purgeable mixin # @return [Array] def desired_contents desired = [File.join(@full_path, '.git')] desired += @repo.tracked_paths.map { |entry| File.join(@full_path, entry) } desired += super end end r10k-4.0.2/lib/r10k/environment/name.rb000066400000000000000000000053151460033767200174420ustar00rootroot00000000000000module R10K module Environment # Handle environment name validation and modification. # # @api private class Name # @!attribute [r] name # @return [String] The functional name of the environment derived from inputs and options. attr_reader :name # @!attribute [r] original_name # @return [String] The unmodified name originally given to create the object. attr_reader :original_name INVALID_CHARACTERS = %r[\W] def initialize(original_name, opts) @source = opts[:source] @prefix = opts[:prefix] @invalid = opts[:invalid] @name = derive_name(original_name, opts[:strip_component]) @original_name = original_name @opts = opts case @invalid when 'correct_and_warn' @validate = true @correct = true when 'correct' @validate = false @correct = true when 'error' @validate = true @correct = false when NilClass @validate = opts[:validate] @correct = opts[:correct] end end # Should the environment name have invalid characters removed? def correct? @correct end def validate? @validate end def valid? if @validate ! @name.match(INVALID_CHARACTERS) else true end end # The directory name for the environment, modified as necessary to remove # invalid characters. # # @return [String] def dirname dir = @name.dup prefix = derive_prefix(@source,@prefix) if @correct dir.gsub!(INVALID_CHARACTERS, '_') end "#{prefix}#{dir}" end private def derive_name(original_name, strip_component) return original_name unless strip_component unless strip_component.is_a?(String) raise _('Improper configuration value given for strip_component setting in %{src} source. ' \ 'Value must be a string, a /regex/, false, or omitted. Got "%{val}" (%{type})' \ % {src: @source, val: strip_component, type: strip_component.class}) end if %r{^/.*/$}.match(strip_component) regex = Regexp.new(strip_component[1..-2]) original_name.gsub(regex, '') elsif original_name.start_with?(strip_component) original_name[strip_component.size..-1] else original_name end end def derive_prefix(source,prefix) if prefix == true "#{source}_" elsif prefix.is_a? String "#{prefix}_" else nil end end end end end r10k-4.0.2/lib/r10k/environment/plain.rb000066400000000000000000000003451460033767200176230ustar00rootroot00000000000000class R10K::Environment::Plain < R10K::Environment::WithModules R10K::Environment.register(:plain, self) def sync path.mkpath end def status :not_applicable end def signature 'plain-default' end end r10k-4.0.2/lib/r10k/environment/svn.rb000066400000000000000000000053541460033767200173330ustar00rootroot00000000000000require 'r10k/puppetfile' require 'r10k/svn/working_dir' require 'r10k/util/setopts' # This class implements an environment based on an SVN branch. # # @since 1.3.0 class R10K::Environment::SVN < R10K::Environment::Base R10K::Environment.register(:svn, self) # @!attribute [r] remote # @return [String] The URL to the remote SVN branch to check out attr_reader :remote # @!attribute [r] working_dir # @api private # @return [R10K::SVN::WorkingDir] The SVN working directory backing this environment attr_reader :working_dir # @!attribute [r] username # @return [String, nil] The SVN username to be passed to the underlying SVN commands # @api private attr_reader :username # @!attribute [r] password # @return [String, nil] The SVN password to be passed to the underlying SVN commands # @api private attr_reader :password include R10K::Util::Setopts # Initialize the given SVN environment. # # @param name [String] The unique name describing this environment. # @param basedir [String] The base directory where this environment will be created. # @param dirname [String] The directory name for this environment. # @param options [Hash] An additional set of options for this environment. # # @option options [String] :remote The URL to the remote SVN branch to check out # @option options [String] :username The SVN username # @option options [String] :password The SVN password def initialize(name, basedir, dirname, options = {}) super setopts(options, { # Standard option interface :source => :remote, :version => :expected_revision, :type => ::R10K::Util::Setopts::Ignore, # Type-specific options :remote => :self, :username => :self, :password => :self, }) @working_dir = R10K::SVN::WorkingDir.new(Pathname.new(@full_path), :username => @username, :password => @password) end # Perform an initial checkout of the SVN repository or update the repository. # # If the environment is being created for the first time, it will # automatically update all modules to ensure that the environment is complete. # # @api public # @return [void] def sync if @working_dir.is_svn? @working_dir.update else @working_dir.checkout(@remote, @expected_revision) end @synced = true end # Return a sting which uniquely identifies (per source) the current state of the # environment. # # @api public # @return [String] def signature @working_dir.revision end def status if !@path.exist? :absent elsif !@working_dir.is_svn? :mismatched elsif !(@remote == @working_dir.url) :mismatched elsif !@synced :outdated else :insync end end end r10k-4.0.2/lib/r10k/environment/tarball.rb000066400000000000000000000041471460033767200201450ustar00rootroot00000000000000require 'r10k/util/setopts' require 'r10k/tarball' require 'r10k/environment' class R10K::Environment::Tarball < R10K::Environment::WithModules R10K::Environment.register(:tarball, self) # @!attribute [r] tarball # @api private # @return [R10K::Tarball] attr_reader :tarball include R10K::Util::Setopts # Initialize the given tarball environment. # # @param name [String] The unique name describing this environment. # @param basedir [String] The base directory where this environment will be created. # @param dirname [String] The directory name for this environment. # @param options [Hash] An additional set of options for this environment. # # @param options [String] :source Where to get the tarball from # @param options [String] :version The sha256 digest of the tarball def initialize(name, basedir, dirname, options = {}) super setopts(options, { # Standard option interface :type => ::R10K::Util::Setopts::Ignore, :source => :self, :version => :checksum, # Type-specific options :checksum => :self, }) @tarball = R10K::Tarball.new(name, @source, checksum: @checksum) end def path @path ||= Pathname.new(File.join(@basedir, @dirname)) end def sync tarball.get unless tarball.cache_valid? case status when :absent, :mismatched tarball.unpack(path.to_s) # Untracked files left behind from previous extractions are expected to # be deleted by r10k's purge facility. end end def status if not path.exist? :absent elsif not (tarball.cache_valid? && tarball.insync?(path.to_s, ignore_untracked_files: true)) :mismatched else :insync end end def signature @checksum || @tarball.cache_checksum end include R10K::Util::Purgeable # Returns an array of the full paths to all the content being managed. # @note This implements a required method for the Purgeable mixin # @return [Array] def desired_contents desired = [] desired += @tarball.paths.map { |entry| File.join(@full_path, entry) } desired += super end end r10k-4.0.2/lib/r10k/environment/with_modules.rb000066400000000000000000000122201460033767200212160ustar00rootroot00000000000000require 'r10k/util/purgeable' # This abstract base class implements an environment that can include module # content # # @since 3.4.0 class R10K::Environment::WithModules < R10K::Environment::Base # @!attribute [r] moduledir # @return [String] The directory to install environment-defined modules # into (default: #{basedir}/modules) attr_reader :moduledir # Initialize the given environment. # # @param name [String] The unique name describing this environment. # @param basedir [String] The base directory where this environment will be created. # @param dirname [String] The directory name for this environment. # @param options [Hash] An additional set of options for this environment. # # @param options [String] :moduledir The path to install modules to # @param options [Hash] :modules Modules to add to the environment def initialize(name, basedir, dirname, options = {}) super @all_modules = nil @managed_content = {} @modules = [] @moduledir = case options[:moduledir] when nil File.join(@basedir, @dirname, 'modules') when File.absolute_path(options[:moduledir]) options.delete(:moduledir) else File.join(@basedir, @dirname, options.delete(:moduledir)) end modhash = options.delete(:modules) load_modules(modhash) unless modhash.nil? end # @return [Array] All modules associated with this environment. # Modules may originate from either: # - The r10k environment object # - A Puppetfile in the environment's content def modules if @all_modules.nil? puppetfile_modules = super() @all_modules = @modules + puppetfile_modules end @all_modules end def module_conflicts?(mod_b) conflict = @modules.any? { |mod_a| mod_a.name == mod_b.name } return false unless conflict msg_vars = {src: mod_b.origin, name: mod_b.name} msg_error = _('Environment and %{src} both define the "%{name}" module' % msg_vars) msg_continue = _("#{msg_error}. The %{src} definition will be ignored" % msg_vars) case conflict_opt = @options[:module_conflicts] when 'override_and_warn', nil logger.warn msg_continue when 'override' logger.debug msg_continue when 'error' raise R10K::Error, msg_error else raise R10K::Error, _('Unexpected value for `module_conflicts` setting in %{env} ' \ 'environment: %{val}' % {env: self.name, val: conflict_opt}) end true end def accept(visitor) visitor.visit(:environment, self) do @modules.each do |mod| mod.sync end puppetfile.accept(visitor) end end def deploy @modules.each do |mod| mod.sync end super end def load_modules(module_hash) module_hash.each do |name, args| if !args.is_a?(Hash) args = { type: 'forge', version: args } end add_module(name, args) end end def resolve_path(base, dirname, path) if Pathname.new(path).absolute? cleanpath(path) else cleanpath(File.join(base, dirname, path)) end end # .cleanpath is as good as we can do without touching the filesystem. # The .realpath methods will choke if some of the intermediate paths # are missing, even though in some cases we will create them later as # needed. def cleanpath(path) Pathname.new(path).cleanpath.to_s end def validate_install_path(path, modname) unless /^#{Regexp.escape(@basedir)}.*/ =~ path raise R10K::Error.new("Environment cannot manage content '#{modname}' outside of containing environment: #{path} is not within #{@basedir}") end true end # @param [String] name # @param [Hash] args def add_module(name, args) # symbolize keys in the args hash args = args.inject({}) { |memo,(k,v)| memo[k.to_sym] = v; memo } args[:overrides] = @overrides if install_path = args.delete(:install_path) install_path = resolve_path(@basedir, @dirname, install_path) validate_install_path(install_path, name) else install_path = @moduledir end # Keep track of all the content this environment is managing to enable purging. @managed_content[install_path] = Array.new unless @managed_content.has_key?(install_path) mod = R10K::Module.new(name, install_path, args, self.name) mod.origin = :environment @managed_content[install_path] << mod.name @modules << mod end include R10K::Util::Purgeable # Returns an array of the full paths of filenames that should exist. Files # inside managed_directories that are not listed in desired_contents will # be purged. # @note This implements a required method for the Purgeable mixin # @return [Array] def desired_contents list = @managed_content.keys list += @managed_content.flat_map do |install_path, modnames| modnames.collect { |name| File.join(install_path, name) } end end def purge_exclusions super + @managed_content.flat_map do |install_path, modnames| modnames.map do |name| File.join(install_path, name, '**', '*') end end end end r10k-4.0.2/lib/r10k/errors.rb000066400000000000000000000026571460033767200155000ustar00rootroot00000000000000require 'r10k' module R10K # An error class that accepts an optional hash and wrapped error message # class Error < StandardError attr_accessor :original # Generate a wrapped exception # # @param original [Exception] The exception to wrap # @param mesg [String] # @param options [Hash] # # @return [R10K::Error] def self.wrap(original, mesg, options = {}) new(mesg, options).tap do |e| e.set_backtrace(caller(4)) e.original = original end end # @overload initialize(mesg) # @param mesg [String] The exception mesg # # @overload initialize(mesg, options) # @param mesg [String] The exception mesg # @param options [Hash] A set of options to store on the exception # # @options options [Array] :backtrace def initialize(mesg, options = {}) super(mesg) bt = options.delete(:backtrace) if bt set_backtrace(bt) end @options = options end protected def structure_exception(name, exc) struct = [] struct << "#{name}:" if exc.respond_to?(:format) struct << indent(exc.format) else struct << indent(exc.message) end struct.join("\n") end def indent(str, level = 4) prefix = ' ' * level str.gsub(/^/, prefix) end end # An error class for configuration errors # class ConfigError < Error end end r10k-4.0.2/lib/r10k/errors/000077500000000000000000000000001460033767200151415ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/errors/formatting.rb000066400000000000000000000013711460033767200176420ustar00rootroot00000000000000require 'r10k/errors' module R10K module Errors module Formatting module_function # Format this exception for displaying to the user # # @param exc [Exception] The exception to format # @param with_backtrace [true, false] Whether the backtrace should be # included with this exception # @return [String] def format_exception(exc, with_backtrace = false) lines = [] lines << exc.message if with_backtrace lines.concat(exc.backtrace) end if exc.respond_to?(:original) && exc.original lines << "Original exception:" lines<< format_exception(exc.original, with_backtrace) end lines.join("\n") end end end end r10k-4.0.2/lib/r10k/feature.rb000066400000000000000000000035161460033767200156120ustar00rootroot00000000000000require 'r10k/logging' module R10K # Detect whether a given feature is present or absent class Feature include R10K::Logging # @attribute [r] name # @return [Symbol] The name of this feature attr_reader :name # @param name [Symbol] The name of this feature # @param opts [Hash] # @param block [Proc] An optional block to detect if this feature is available # # @option opts [String, Array] :libraries One or more libraries to # require to make sure this feature is present. def initialize(name, opts = {}, &block) @name = name @libraries = Array(opts.delete(:libraries)) @block = block end # @return [true, false] Is this feature available? def available? logger.debug1 { _("Testing to see if feature %{name} is available.") % {name: @name} } rv = @libraries.all? { |lib| library_available?(lib) } && proc_available? msg = rv ? "is" : "is not" logger.debug1 { _("Feature %{name} %{message} available.") % {name: @name, message: msg} } rv end private def library_available?(lib) logger.debug2 { _("Attempting to load library '%{lib}' for feature %{name}") % {lib: lib, name: @name} } require lib true rescue ScriptError => e logger.debug2 { _("Error while loading library %{lib} for feature %{name}: %{error_msg}") % {lib: lib, name: @name, error_msg: e.message} } false end def proc_available? if @block logger.debug2 { _("Evaluating proc %{block} to test for feature %{name}") % {block: @block.inspect, name: @name} } output = @block.call logger.debug2 { _("Proc %{block} for feature %{name} returned %{output}") % {block: @block.inspect, name: @name, output: output.inspect } } !!output else true end end end end r10k-4.0.2/lib/r10k/feature/000077500000000000000000000000001460033767200152605ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/feature/collection.rb000066400000000000000000000012251460033767200177400ustar00rootroot00000000000000require 'r10k/feature' # Store all features and indicate if they're available. class R10K::Feature::Collection def initialize @features = {} end # @param name [Symbol] The feature to add # @param opts [Hash] Additional options for the feature, see {R10K::Feature} # @param block [Proc] An optional block to detect if this feature is present # @return [void] def add(name, opts = {}, &block) @features[name] = R10K::Feature.new(name, opts, &block) end # @return [true, false] Does a feature by this name exist and is it available? def available?(name) if @features.key?(name) @features[name].available? end end end r10k-4.0.2/lib/r10k/features.rb000066400000000000000000000007121460033767200157700ustar00rootroot00000000000000require 'r10k/feature/collection' require 'forwardable' require 'r10k/util/commands' module R10K module Features @features = R10K::Feature::Collection.new class << self extend Forwardable def_delegators :@features, :add, :available? end end end R10K::Features.add(:shellgit) { R10K::Util::Commands.which('git') } R10K::Features.add(:rugged, :libraries => 'rugged') R10K::Features.add(:pe_license, :libraries => 'pe_license') r10k-4.0.2/lib/r10k/forge/000077500000000000000000000000001460033767200147275ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/forge/module_release.rb000066400000000000000000000231271460033767200202460ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/settings/mixin' require 'r10k/util/cacheable' require 'fileutils' require 'tmpdir' require 'puppet_forge' module R10K module Forge # Download, unpack, and install modules from the Puppet Forge class ModuleRelease include R10K::Settings::Mixin def_setting_attr :proxy def_setting_attr :baseurl def_setting_attr :cache_root, R10K::Util::Cacheable.default_cachedir include R10K::Logging # @!attribute [r] forge_release # @api private # @return [PuppetForge::V3::ModuleRelease] The Forge V3 API module # release object used for downloading and verifying the module # release. attr_reader :forge_release # @!attribute [rw] download_path # @return [Pathname] Where the module tarball will be downloaded to. attr_accessor :download_path # @!attribute [rw] tarball_cache_path # @return [Pathname] Where the module tarball will be cached to. attr_accessor :tarball_cache_path # @!attribute [rw] tarball_cache_root # @return [Pathname] Directory where the module tarball will be cached to. attr_accessor :tarball_cache_root # @!attribute [rw] md5_file_path # @return [Pathname] Where the md5 of the cached tarball is stored. attr_accessor :md5_file_path # @!attribute [rw] sha256_file_path # @return [Pathname] Where the SHA256 of the cached tarball is stored. attr_accessor :sha256_file_path # @!attribute [rw] unpack_path # @return [Pathname] Where the module will be unpacked to. attr_accessor :unpack_path # @param full_name [String] The hyphen separated name of the module # @param version [String] The version of the module def initialize(full_name, version) @full_name = PuppetForge::V3.normalize_name(full_name) @version = version # Copy the PuppetForge base connection to the release class; the connection # objects are created in the class instances and thus are not shared with # subclasses. PuppetForge::V3::Release.conn = PuppetForge::V3::Base.conn @forge_release = PuppetForge::V3::Release.new({ :name => @full_name, :version => @version, :slug => "#{@full_name}-#{@version}" }) tarball_name = @forge_release.slug + '.tar.gz' @download_path = Pathname.new(Dir.mktmpdir) + (tarball_name) @tarball_cache_root = Pathname.new(settings[:cache_root]) + (@forge_release.slug + "/tarball/") @tarball_cache_path = @tarball_cache_root + tarball_name md5_filename = @forge_release.slug + '.md5' @md5_file_path = @tarball_cache_root + md5_filename sha256_filename = @forge_release.slug + '.sha256' @sha256_file_path = @tarball_cache_root + sha256_filename @unpack_path = Pathname.new(Dir.mktmpdir) + @forge_release.slug end # Download, unpack, and install this module release to the target directory. # # @example # environment_path = Pathname.new('/etc/puppetlabs/puppet/environments/production') # target_dir = environment_path + 'eight_hundred' # mod = R10K::Forge::ModuleRelease.new('branan-eight_hundred', '8.0.0') # mod.install(target_dir) # # @param target_dir [Pathname] The full path to where the module should be installed. # @return [void] def install(target_dir) download verify unpack(target_dir) ensure cleanup end # Download the module release to {#download_path} and cache to {#tarball_cache_path} # # @return [void] def download if @tarball_cache_path.exist? logger.debug1 "Using cached copy of #{@forge_release.slug} tarball" else logger.debug1 "Downloading #{@forge_release.slug} from #{PuppetForge::Release.conn.url_prefix} to #{@download_path}" @forge_release.download(download_path) FileUtils::mkdir_p(@tarball_cache_root) FileUtils::mv(@download_path, @tarball_cache_path) end end # Verify the module release cached in {#tarball_cache_path} against the # module release checksum given by the Puppet Forge. On mismatch, remove # the cached copy. # # @raise [R10K::Error] when no SHA256 is available and FIPS mode is on # @return [void] def verify logger.debug1 "Verifying that #{@tarball_cache_path} matches checksum" sha256_of_tarball = Digest(:SHA256).file(@tarball_cache_path).hexdigest if @sha256_file_path.exist? verify_from_file(sha256_of_tarball, @sha256_file_path) else if @forge_release.respond_to?(:file_sha256) && !@forge_release.file_sha256.nil? && !@forge_release.file_sha256.size.zero? forge_256_checksum = @forge_release.file_sha256 verify_from_forge(sha256_of_tarball, forge_256_checksum, @sha256_file_path) else if R10K::Util::Platform.fips? raise R10K::Error, "Could not verify module, no SHA256 checksum available, and MD5 checksums not allowed in FIPS mode" end logger.debug1 "No SHA256 checksum available, falling back to MD5" md5_of_tarball = Digest(:MD5).file(@tarball_cache_path).hexdigest if @md5_file_path.exist? verify_from_file(md5_of_tarball, @md5_file_path) else verify_from_forge(md5_of_tarball, @forge_release.file_md5, @md5_file_path) end end end end # Verify the checksum of the cached tarball against the # module release checksum stored in the cache as well. # On mismatch, remove the cached copy of both files. # @param tarball_checksum [String] the checksum (either md5 or SHA256) # of the downloaded module tarball # @param file [Pathname] the file containing the checksum as downloaded # previously from the forge # @param digest_class [Digest::SHA256, Digest::MD5] which checksum type # to verify with # # @raise [PuppetForge::V3::Release::ChecksumMismatch] The # cached module release checksum doesn't match the cached checksum. # # @return [void] def verify_from_file(tarball_checksum, checksum_file_path) checksum_from_file = File.read(checksum_file_path).strip if tarball_checksum != checksum_from_file logger.error "Checksum of #{@tarball_cache_path} (#{tarball_checksum}) does not match checksum #{checksum_from_file} in #{checksum_file_path}. Removing both files." @tarball_cache_path.delete checksum_file_path.delete raise PuppetForge::V3::Release::ChecksumMismatch.new end end # Verify the checksum of the cached tarball against the # module release checksum from the forge. # On mismatch, remove the cached copy of the tarball. # @param tarball_checksum [String] the checksum (either md5 or SHA256) # of the downloaded module tarball # @param forge_checksum [String] the checksum downloaded from the Forge # @param checksum_file_path [Pathname] the path to write the verified # checksum to # # @raise [PuppetForge::V3::Release::ChecksumMismatch] The # cached module release checksum doesn't match the forge checksum. # # @return [void] def verify_from_forge(tarball_checksum, forge_checksum, checksum_file_path) if tarball_checksum != forge_checksum logger.debug1 "Checksum of #{@tarball_cache_path} (#{tarball_checksum}) does not match checksum #{forge_checksum} found on the forge. Removing tarball." @tarball_cache_path.delete raise PuppetForge::V3::Release::ChecksumMismatch.new else File.write(checksum_file_path, forge_checksum) end end # Unpack the module release at {#tarball_cache_path} into the given target_dir # # @param target_dir [Pathname] The final path where the module release # should be unpacked/installed into. # @return [void] def unpack(target_dir) logger.debug1 _("Unpacking %{tarball_cache_path} to %{target_dir} (with tmpdir %{tmp_path})") % {tarball_cache_path: tarball_cache_path, target_dir: target_dir, tmp_path: unpack_path} file_lists = PuppetForge::Unpacker.unpack(tarball_cache_path.to_s, target_dir.to_s, unpack_path.to_s) logger.debug2 _("Valid files unpacked: %{valid_files}") % {valid_files: file_lists[:valid]} if !file_lists[:invalid].empty? logger.debug1 _("These files existed in the module's tar file, but are invalid filetypes and were not unpacked: %{invalid_files}") % {invalid_files: file_lists[:invalid]} end if !file_lists[:symlinks].empty? logger.warn _("Symlinks are unsupported and were not unpacked from the module tarball. %{release_slug} contained these ignored symlinks: %{symlinks}") % {release_slug: @forge_release.slug, symlinks: file_lists[:symlinks]} end end # Remove all files created while downloading and unpacking the module. def cleanup cleanup_unpack_path cleanup_download_path end # Remove the temporary directory used for unpacking the module. def cleanup_unpack_path if unpack_path.parent.exist? unpack_path.parent.rmtree end end # Remove the downloaded module release. def cleanup_download_path if download_path.parent.exist? download_path.parent.rmtree end end end end end r10k-4.0.2/lib/r10k/git.rb000066400000000000000000000135251460033767200147430ustar00rootroot00000000000000require 'uri' require 'r10k/features' require 'r10k/errors' require 'r10k/settings' require 'r10k/logging' require 'r10k/util/platform' module R10K module Git require 'r10k/git/shellgit' require 'r10k/git/rugged' extend R10K::Logging # A list of Git providers, sorted by priority. Providers have features that # must be available for them to be used, and a module which is the namespace # containing the implementation. @providers = [ [ :shellgit, { :feature => :shellgit, :module => R10K::Git::ShellGit, } ], [ :rugged, { :feature => :rugged, :module => R10K::Git::Rugged, :on_set => proc do [:ssh, :https].each do |transport| if !::Rugged.features.include?(transport) logger.warn _("Rugged has been compiled without support for %{transport}; Git repositories will not be reachable via %{transport}.") % {transport: transport} end end end } ], ] # Mark the current provider as invalid. # # If a provider is set to an invalid provider, we need to make sure that # the provider doesn't fall back to the default value, thereby ignoring the # explicit value and silently continuing. If the current provider is # assigned to this value, no provider will be used until the provider is # either reset or assigned a valid provider. # # @api private NULL_PROVIDER = Object.new # Mark the current provider as unset. # # If the provider has never been set we need to indicate that there is no # current value but the default value can be used. If the current provider # is assigned to this value and the provider is looked up, the default # provider will be looked up and used. # # @api private UNSET_PROVIDER = Object.new # Return the first available Git provider. # # @raise [R10K::Error] if no Git providers are functional. # @return [String] The name of the first available Git implementation. def self.default_name name, _ = @providers.find { |(_, hash)| R10K::Features.available?(hash[:feature]) } if name.nil? raise R10K::Error, _("No Git providers are functional.") end name end extend R10K::Logging # Manually set the Git provider by name. # # @param name [Symbol] The name of the Git provider to use. # @raise [R10K::Error] if the requested Git provider doesn't exist. # @raise [R10K::Error] if the requested Git provider isn't functional. # @return [void] def self.provider=(name) _, attrs = @providers.find { |(providername, _)| name == providername } if attrs.nil? @provider = NULL_PROVIDER raise R10K::Error, _("No Git provider named '%{name}'.") % {name: name} end if !R10K::Features.available?(attrs[:feature]) @provider = NULL_PROVIDER raise R10K::Error, _("Git provider '%{name}' is not functional.") % {name: name} end if attrs[:on_set] attrs[:on_set].call end @provider = attrs[:module] logger.debug1 { _("Setting Git provider to %{provider}") % {provider: @provider.name} } end # @return [Module] The namespace of the first available Git implementation. # Implementation classes should be looked up against this returned Module. def self.provider case @provider when NULL_PROVIDER raise R10K::Error, _("No Git provider set.") when UNSET_PROVIDER self.provider = default_name logger.debug1 { _("Setting Git provider to default provider %{name}") % {name: default_name} } end @provider end def self.cache provider::Cache end def self.bare_repository provider::BareRepository end def self.thin_repository provider::ThinRepository end # Clear the currently set provider. # # @api private def self.reset! @provider = UNSET_PROVIDER end @provider = UNSET_PROVIDER extend R10K::Settings::Mixin::ClassMethods def_setting_attr :private_key def_setting_attr :oauth_token def_setting_attr :github_app_id def_setting_attr :github_app_key def_setting_attr :github_app_ttl def_setting_attr :proxy def_setting_attr :username def_setting_attr :repositories, {} def self.get_repo_settings(remote) self.settings[:repositories].find {|r| r[:remote] == remote } end def self.get_proxy_for_remote(remote) # We only support proxy for HTTP(S) transport return nil unless remote =~ /^http(s)?/i repo_settings = self.get_repo_settings(remote) if repo_settings && repo_settings.has_key?(:proxy) proxy = repo_settings[:proxy] unless repo_settings[:proxy].nil? || repo_settings[:proxy].empty? else proxy = self.settings[:proxy] end R10K::Git.log_proxy_for_remote(proxy, remote) if proxy proxy end def self.log_proxy_for_remote(proxy, remote) # Sanitize passwords out of the proxy URI for loggging. proxy_uri = URI.parse(proxy) proxy_str = "#{proxy_uri.scheme}://" proxy_str << "#{proxy_uri.userinfo.gsub(/:(.*)$/, ':')}@" if proxy_uri.userinfo proxy_str << "#{proxy_uri.host}:#{proxy_uri.port}" logger.debug { "Using HTTP proxy '#{proxy_str}' for '#{remote}'." } nil end # Execute block with given proxy configured in ENV def self.with_proxy(new_proxy) unless new_proxy.nil? old_proxy = Hash[ ['HTTPS_PROXY', 'HTTP_PROXY', 'https_proxy', 'http_proxy'].collect do |var| old_value = ENV[var] ENV[var] = new_proxy [var, old_value] end ] end begin yield ensure ENV.update(old_proxy) if old_proxy end nil end end end r10k-4.0.2/lib/r10k/git/000077500000000000000000000000001460033767200144105ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/git/alternates.rb000066400000000000000000000023721460033767200171030ustar00rootroot00000000000000require 'pathname' # Manage `$GIT_DIR/objects/info/alternates` # # @see man gitrepository-layout(5) class R10K::Git::Alternates # @attribute [r] file # @return [Pathname] The alternates file attr_reader :file # @param git_dir [Pathname] The path to the git repository def initialize(git_dir) @file = git_dir + File.join('objects', 'info', 'alternates') @entries = [] end def add(path) write(to_a << path) end alias << add # Conditionally add path to the alternates file # # @param path [String] The file path to add to the file if not already present # @return [true, false] If the entry was added. def add?(path) paths = read() add_entry = !paths.include?(path) if add_entry paths << path write(paths) end add_entry end def include?(path) to_a.include?(path) end def write(entries) if ! @file.parent.directory? raise R10K::Git::GitError, _("Cannot write %{file}; parent directory does not exist") % {file: @file} end @file.open("w") do |fh| entries.each do |entry| fh.puts(entry) end end end def read entries = [] if @file.file? entries = @file.readlines.map(&:chomp) end entries end alias to_a read end r10k-4.0.2/lib/r10k/git/cache.rb000066400000000000000000000047051460033767200160060ustar00rootroot00000000000000require 'r10k/git' require 'r10k/settings' require 'r10k/instance_cache' require 'forwardable' require 'r10k/util/cacheable' # Cache Git repository mirrors for object database reuse. # # This implements most of the behavior needed for Git repo caching, but needs # to have a specific Git bare repository provided. Subclasses should implement # the {bare_repository} method. # # @abstract # @see man git-clone(1) class R10K::Git::Cache include R10K::Settings::Mixin include R10K::Util::Cacheable def_setting_attr :cache_root, R10K::Util::Cacheable.default_cachedir('git') @instance_cache = R10K::InstanceCache.new(self) # @api private def self.instance_cache @instance_cache end # Generate a new instance with the given remote or return an existing object # with the given remote. This should be used over R10K::Git::Cache.new. # # @api public # @param remote [String] The git remote to cache # @return [R10K::Git::Cache] The requested cache object. def self.generate(remote) instance_cache.generate(remote) end # @abstract # @return [Object] The concrete bare repository implementation to use for # interacting with the cached Git repository. def self.bare_repository raise NotImplementedError end include R10K::Logging extend Forwardable def_delegators :@repo, :git_dir, :objects_dir, :branches, :tags, :exist?, :resolve, :ref_type # @!attribute [r] path # @deprecated # @return [String] The path to the git cache repository def path logger.warn _("%{class}#path is deprecated; use #git_dir") % {class: self.class} git_dir end # @!attribute [r] repo # @api private attr_reader :repo # @param remote [String] The URL of the Git remote URL to cache. def initialize(remote) @remote = remote @repo = self.class.bare_repository.new(settings[:cache_root], sanitized_dirname) end def sync if !@synced sync! @synced = true end end def synced? @synced end def sync! if cached? @repo.fetch else logger.debug1 _("Creating new git cache for %{remote}") % {remote: @remote.inspect} # TODO extract this to an initialization step if !File.exist?(settings[:cache_root]) FileUtils.mkdir_p settings[:cache_root] end @repo.clone(@remote) end end # @api private def reset! @synced = false end alias cached? exist? def sanitized_dirname @sanitized_dirname ||= super(@remote) end end r10k-4.0.2/lib/r10k/git/errors.rb000066400000000000000000000010231460033767200162450ustar00rootroot00000000000000require 'r10k/errors' module R10K module Git class GitError < R10K::Error attr_reader :git_dir def initialize(mesg, options = {}) super @git_dir = @options[:git_dir] end def message msg = super if @git_dir msg << " at #{@git_dir}" end msg end end class UnresolvableRefError < GitError attr_reader :ref def initialize(mesg, options = {}) super @ref = @options[:ref] end end end end r10k-4.0.2/lib/r10k/git/rugged.rb000066400000000000000000000004711460033767200162140ustar00rootroot00000000000000require 'r10k/git' begin require 'rugged' rescue LoadError end module R10K module Git module Rugged require 'r10k/git/rugged/bare_repository' require 'r10k/git/rugged/working_repository' require 'r10k/git/rugged/cache' require 'r10k/git/rugged/thin_repository' end end end r10k-4.0.2/lib/r10k/git/rugged/000077500000000000000000000000001460033767200156655ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/git/rugged/bare_repository.rb000066400000000000000000000055071460033767200214310ustar00rootroot00000000000000require 'r10k/git/rugged' require 'r10k/git/rugged/base_repository' require 'r10k/git/errors' class R10K::Git::Rugged::BareRepository < R10K::Git::Rugged::BaseRepository # @param basedir [String] The base directory of the Git repository # @param dirname [String] The directory name of the Git repository def initialize(basedir, dirname) @path = Pathname.new(File.join(basedir, dirname)) if exist? @_rugged_repo = ::Rugged::Repository.bare(@path.to_s) end end # @return [Pathname] The path to this Git repository def git_dir @path end # @return [Pathname] The path to the objects directory in this Git repository def objects_dir @path + "objects" end # Clone the given remote. # # This should only be called if the repository does not exist. # # @param remote [String] The URL of the Git remote to clone. # @return [void] def clone(remote) logger.debug1 { _("Cloning '%{remote}' into %{path}") % {remote: remote, path: @path} } @_rugged_repo = ::Rugged::Repository.init_at(@path.to_s, true).tap do |repo| config = repo.config config['remote.origin.url'] = remote config['remote.origin.fetch'] = '+refs/*:refs/*' config['remote.origin.mirror'] = 'true' end fetch('origin') rescue Rugged::SshError, Rugged::NetworkError => e raise R10K::Git::GitError.new(e.message, :git_dir => git_dir, :backtrace => e.backtrace) end # Fetch refs and objects from the origin remote # # @return [void] def fetch(remote_name='origin') logger.debug1 { _("Fetching remote '%{remote_name}' at %{path}") % {remote_name: remote_name, path: @path } } # Check to see if we have a version of Rugged that supports "fetch --prune" and warn if not if defined?(Rugged::Version) && !Gem::Dependency.new('rugged', '>= 0.24.0').match?('rugged', Rugged::Version) logger.warn { _("Rugged versions prior to 0.24.0 do not support pruning stale branches during fetch, please upgrade your \'rugged\' gem. (Current version is: %{version})") % {version: Rugged::Version} } end remote = remotes[remote_name] proxy = R10K::Git.get_proxy_for_remote(remote) options = {:credentials => credentials, :prune => true, :proxy_url => proxy} refspecs = ['+refs/*:refs/*'] results = nil R10K::Git.with_proxy(proxy) do results = with_repo { |repo| repo.fetch(remote_name, refspecs, **options) } end report_transfer(results, remote_name) rescue Rugged::SshError, Rugged::NetworkError => e if e.message =~ /Unsupported proxy scheme for/ message = e.message + "As of curl ver 7.50.2, unsupported proxy schemes no longer fall back to HTTP." else message = e.message end raise R10K::Git::GitError.new(message, :git_dir => git_dir, :backtrace => e.backtrace) rescue raise end def exist? @path.exist? end end r10k-4.0.2/lib/r10k/git/rugged/base_repository.rb000066400000000000000000000053011460033767200214220ustar00rootroot00000000000000require 'r10k/git/rugged' require 'r10k/git/rugged/credentials' require 'r10k/logging' class R10K::Git::Rugged::BaseRepository include R10K::Logging # @return [Pathname] The path to this repository. # @note The `@path` instance variable must be set by inheriting classes on instantiation. attr_reader :path def resolve(pattern) object = with_repo { |repo| repo.rev_parse(pattern) } case object when NilClass nil when ::Rugged::Tag, ::Rugged::Tag::Annotation object.target.oid else object.oid end rescue ::Rugged::ReferenceError, ::Rugged::OdbError => e logger.debug2(_("Unable to resolve %{pattern}: %{e} ") % {pattern: pattern, e: e }) nil end def branches with_repo { |repo| repo.branches.each_name(:local).to_a } end def tags with_repo { |repo| repo.tags.each_name.to_a } end # @return [Symbol] The type of the given ref, one of :branch, :tag, :commit, or :unknown def ref_type(pattern) # Try to match and resolve SHA refs as quickly as possible. if pattern =~ /^[0-9a-f]{5,40}$/i && @_rugged_repo.include?(pattern) :commit elsif @_rugged_repo.tags[pattern] :tag elsif @_rugged_repo.branches[pattern] :branch elsif resolve(pattern) :commit else :unknown end end def remotes remotes_hash = {} if @_rugged_repo @_rugged_repo.remotes.each do |remote| remotes_hash[remote.name] = remote.url end end remotes_hash end # Update a remote URL # @param [String] The remote URL of the git repository # @param [String] An optional remote name for the git repository def update_remote(remote, remote_name='origin') if @_rugged_repo logger.debug2(_("Remote URL is different from cache, updating %{orig} to %{update}") % {orig: remotes[remote_name], update: remote}) @_rugged_repo.remotes.set_url(remote_name, remote) end end private def with_repo(opts={}) if @_rugged_repo yield @_rugged_repo end ensure @_rugged_repo.close if @_rugged_repo end # Generate a lambda that can create a credentials object for the # authentication type in question. # # @note The Rugged API expects an object that responds to #call; the # Credentials subclasses implement #call returning self so that # the Credentials object can be used, or a Proc that returns a # Credentials object can be used. # # @api private # # @return [Proc] def credentials R10K::Git::Rugged::Credentials.new(self) end def report_transfer(results, remote) logger.debug2 { "Transferred #{results[:total_objects]} objects (#{results[:received_bytes]} bytes) from '#{remote}' into #{git_dir}'" } nil end end r10k-4.0.2/lib/r10k/git/rugged/cache.rb000066400000000000000000000006601460033767200172570ustar00rootroot00000000000000require 'r10k/git/rugged' require 'r10k/git/cache' class R10K::Git::Rugged::Cache < R10K::Git::Cache @instance_cache = R10K::InstanceCache.new(self) def self.bare_repository R10K::Git::Rugged::BareRepository end # Update the remote URL if the cache differs from the current configuration def sync! if cached? && @repo.remotes['origin'] != @remote @repo.update_remote(@remote) end super end end r10k-4.0.2/lib/r10k/git/rugged/credentials.rb000066400000000000000000000176641460033767200205250ustar00rootroot00000000000000require 'r10k/git/rugged' require 'r10k/git/errors' require 'r10k/logging' require 'json' require 'jwt' require 'net/http' require 'openssl' # Generate credentials for secured remote connections. # # @api private class R10K::Git::Rugged::Credentials include R10K::Logging # @param repository [R10K::Git::Rugged::BaseRepository] def initialize(repository) @repository = repository @called = 0 end def call(url, username_from_url, allowed_types) @called += 1 # Break out of infinite HTTP auth retry loop introduced in libgit2/rugged 0.24.0, libssh # auth seems to already abort after ~50 attempts. if @called > 50 raise R10K::Git::GitError.new(_("Authentication failed for Git remote %{url}.") % {url: url.inspect} ) end if allowed_types.include?(:ssh_key) get_ssh_key_credentials(url, username_from_url) elsif allowed_types.include?(:plaintext) get_plaintext_credentials(url, username_from_url) else get_default_credentials(url, username_from_url) end end def get_ssh_key_credentials(url, username_from_url) user = get_git_username(url, username_from_url) per_repo_private_key = nil if per_repo_settings = R10K::Git.get_repo_settings(url) per_repo_private_key = per_repo_settings[:private_key] end global_private_key = R10K::Git.settings[:private_key] if per_repo_private_key private_key = per_repo_private_key logger.debug2 _("Using per-repository private key %{key} for URL %{url}") % {key: private_key, url: url.inspect} elsif global_private_key private_key = global_private_key logger.debug2 _("URL %{url} has no per-repository private key using '%{key}'." ) % {key: private_key, url: url.inspect} else raise R10K::Git::GitError.new(_("Git remote %{url} uses the SSH protocol but no private key was given") % {url: url.inspect}, :git_dir => @repository.path.to_s) end if !File.readable?(private_key) raise R10K::Git::GitError.new(_("Unable to use SSH key auth for %{url}: private key %{private_key} is missing or unreadable") % {url: url.inspect, private_key: private_key.inspect}, :git_dir => @repository.path.to_s) end Rugged::Credentials::SshKey.new(:username => user, :privatekey => private_key) end def get_plaintext_credentials(url, username_from_url) per_repo_oauth_token = nil per_repo_github_app_id = nil per_repo_github_app_key = nil per_repo_github_app_ttl = nil if per_repo_settings = R10K::Git.get_repo_settings(url) per_repo_oauth_token = per_repo_settings[:oauth_token] per_repo_github_app_id = per_repo_settings[:github_app_id] per_repo_github_app_key = per_repo_settings[:github_app_key] per_repo_github_app_ttl = per_repo_settings[:github_app_ttl] end app_id = per_repo_github_app_id || R10K::Git.settings[:github_app_id] app_key = per_repo_github_app_key || R10K::Git.settings[:github_app_key] app_ttl = per_repo_github_app_ttl || R10K::Git.settings[:github_app_ttl] if token_path = per_repo_oauth_token || R10K::Git.settings[:oauth_token] @oauth_token ||= extract_token(token_path, url) user = 'x-oauth-token' password = @oauth_token elsif app_id && app_key && app_ttl user = 'x-access-token' password = github_app_token(app_id, app_key, app_ttl) else user = get_git_username(url, username_from_url) password = URI.parse(url).password || '' end Rugged::Credentials::UserPassword.new(username: user, password: password) end def extract_token(token_path, url) if token_path == '-' token = $stdin.read.strip logger.debug2 _("Using OAuth token from stdin for URL %{url}") % { url: url } elsif File.readable?(token_path) token = File.read(token_path).strip logger.debug2 _("Using OAuth token from %{token_path} for URL %{url}") % { token_path: token_path, url: url } else raise R10K::Git::GitError, _("%{path} is missing or unreadable, cannot load OAuth token") % { path: token_path } end unless valid_token?(token) raise R10K::Git::GitError, _("Supplied OAuth token contains invalid characters.") end token end # This regex is the only real requirement for OAuth token format, # per https://www.oauth.com/oauth2-servers/access-tokens/access-token-response/ # Bitbucket's tokens also can include an underscore, so that is added here. def valid_token?(token) return token =~ /^[\w\-\.~_\+\/]+$/ end def get_default_credentials(url, username_from_url) Rugged::Credentials::Default.new end def get_git_username(url, username_from_url) git_user = R10K::Git.settings[:username] user = nil if !username_from_url.nil? && !username_from_url.empty? user = username_from_url logger.debug2 _("URL %{url} includes the username %{username}, using that user for authentication.") % {url: url.inspect, username: username_from_url} elsif git_user user = git_user logger.debug2 _("URL %{url} did not specify a user, using %{user} from configuration") % {url: url.inspect, user: user.inspect} else user = Etc.getlogin logger.debug2 _("URL %{url} did not specify a user, using current user %{user}") % {url: url.inspect, user: user.inspect} end user end def github_app_token(app_id, private_key, ttl) raise R10K::Git::GitError, _('Github App id contains invalid characters.') unless app_id =~ /^\d+$/ raise R10K::Git::GitError, _('Github App token ttl contains invalid characters.') unless ttl =~ /^\d+$/ raise R10K::Git::GitError, _('Github App key is missing or unreadable') unless File.readable?(private_key) begin ssl_key = OpenSSL::PKey::RSA.new(File.read(private_key).strip) unless ssl_key.private? raise R10K::Git::GitError, _('Github App key is not a valid SSL private key') end rescue OpenSSL::PKey::RSAError raise R10K::Git::GitError, _('Github App key is not a valid SSL key') end logger.debug2 _("Using Github App id %{app_id} with SSL key from %{key_path}") % { key_path: private_key, app_id: app_id } jwt_issue_time = Time.now.to_i - 60 jwt_exp_time = (jwt_issue_time + 60) + ttl.to_i payload = { iat: jwt_issue_time, exp: jwt_exp_time, iss: app_id } jwt = JWT.encode(payload, ssl_key, "RS256") get = URI.parse("https://api.github.com/app/installations") get_request = Net::HTTP::Get.new(get) get_request["Authorization"] = "Bearer #{jwt}" get_request["Accept"] = "application/vnd.github.v3+json" get_req_options = { use_ssl: get.scheme == "https", } get_response = Net::HTTP.start(get.hostname, get.port, get_req_options) do |http| http.request(get_request) end unless (get_response.class < Net::HTTPSuccess) logger.debug2 _("Unexpected response code: #{get_response.code}\nResponse body: #{get_response.body}") raise R10K::Git::GitError, _("Error using private key to get Github App access token from url") end access_tokens_url = JSON.parse(get_response.body)[0]['access_tokens_url'] post = URI.parse(access_tokens_url) post_request = Net::HTTP::Post.new(post) post_request["Authorization"] = "Bearer #{jwt}" post_request["Accept"] = "application/vnd.github.v3+json" post_req_options = { use_ssl: post.scheme == "https", } post_response = Net::HTTP.start(post.hostname, post.port, post_req_options) do |http| http.request(post_request) end unless (post_response.class < Net::HTTPSuccess) logger.debug2 _("Unexpected response code: #{post_response.code}\nResponse body: #{post_response.body}") raise R10K::Git::GitError, _("Error using private key to generate access token from #{access_token_url}") end token = JSON.parse(post_response.body)['token'] raise R10K::Git::GitError, _("Github App token contains invalid characters.") unless valid_token?(token) logger.debug2 _("Github App token generated, expires at: %{expire}") % {expire: JSON.parse(post_response.body)['expires_at']} token end end r10k-4.0.2/lib/r10k/git/rugged/thin_repository.rb000066400000000000000000000057551460033767200214670ustar00rootroot00000000000000require 'r10k/git' require 'r10k/git/rugged/working_repository' require 'r10k/git/rugged/cache' class R10K::Git::Rugged::ThinRepository < R10K::Git::Rugged::WorkingRepository def initialize(basedir, dirname, cache_repo) @cache_repo = cache_repo super(basedir, dirname) end # Clone this git repository # # @param remote [String] The Git remote to clone # @param opts [Hash] # # @options opts [String] :ref The git ref to check out on clone # # @return [void] def clone(remote, opts = {}) logger.debug1 { "Cloning '#{remote}' into #{@path}" } @cache_repo.sync cache_objects_dir = @cache_repo.objects_dir.to_s # {Rugged::Repository.clone_at} doesn't support :alternates, which # completely breaks how thin repositories need to work. To circumvent # this we manually create a Git repository, set up git remotes, and # update 'objects/info/alternates' with the path. We don't actually # fetch any objects because we don't need them, and we don't actually # use any refs in this repository so we skip all those steps. ::Rugged::Repository.init_at(@path.to_s, false) @_rugged_repo = ::Rugged::Repository.new(@path.to_s, :alternates => [cache_objects_dir]) alternates << cache_objects_dir with_repo do |repo| config = repo.config config['remote.origin.url'] = remote config['remote.origin.fetch'] = '+refs/heads/*:refs/remotes/origin/*' config['remote.cache.url'] = @cache_repo.git_dir.to_s config['remote.cache.fetch'] = '+refs/heads/*:refs/remotes/cache/*' end checkout(opts.fetch(:ref, 'HEAD')) end def checkout(ref, opts = {}) super(@cache_repo.resolve(ref), opts) end # Fetch refs and objects from one of the Git remotes # # @param remote [String] The remote to fetch, defaults to 'cache' # @return [void] def fetch(remote = 'cache') super(remote) end # @return [String] The cache remote URL def cache with_repo { |repo| repo.config['remote.cache.url'] } end def tracked_paths(ref="HEAD") with_repo do |repo| commit = repo.rev_parse(ref) unless commit && commit.tree raise R10K::Error.new("Unable to resolve '#{ref}' to a valid commit in repo #{@path}") end commit.tree.walk(:postorder).collect do |root, entry| root.empty? ? entry[:name] : File.join(root, entry[:name]) end end end def stage_files(files=['.']) with_repo do |repo| index = repo.index files.each { |p| index.add( :path => p ) } end end private # Override the parent class repo setup so that we can make sure the alternates file is up to date # before we create the Rugged::Repository object, which reads from the alternates file. def setup_rugged_repo entry_added = alternates.add?(@cache_repo.objects_dir.to_s) if entry_added logger.debug2 { _("Updated repo %{path} to include alternate object db path %{objects_dir}") % {path: @path, objects_dir: @cache_repo.objects_dir} } end super end end r10k-4.0.2/lib/r10k/git/rugged/working_repository.rb000066400000000000000000000110501460033767200221660ustar00rootroot00000000000000require 'r10k/git/rugged' require 'r10k/git/rugged/base_repository' require 'r10k/git/errors' class R10K::Git::Rugged::WorkingRepository < R10K::Git::Rugged::BaseRepository # @return [Pathname] The path to the Git repository inside of this directory def git_dir @path + '.git' end # @param basedir [String] The base directory of the Git repository # @param dirname [String] The directory name of the Git repository def initialize(basedir, dirname) @path = Pathname.new(File.join(basedir, dirname)) end # Clone this git repository # # @param remote [String] The Git remote to clone # @param opts [Hash] # # @options opts [String] :ref The git ref to check out on clone # @options opts [String] :reference A Git repository to use as an alternate object database # # @return [void] def clone(remote, opts = {}) logger.debug1 { _("Cloning '%{remote}' into %{path}") % {remote: remote, path: @path } } proxy = R10K::Git.get_proxy_for_remote(remote) # libgit2/rugged doesn't support cloning a repository and providing an # alternate object database, making the handling of :alternates a noop. # Unfortunately this means that this method can't really use alternates # and running the clone will duplicate all objects in the specified # repository. However alternate databases can be handled when an existing # repository is loaded, so loading a cloned repo will correctly use # alternate object database. options = {:credentials => credentials, :proxy_url => proxy} options.merge!(:alternates => [File.join(opts[:reference], 'objects')]) if opts[:reference] R10K::Git.with_proxy(proxy) do @_rugged_repo = ::Rugged::Repository.clone_at(remote, @path.to_s, options) end if opts[:reference] alternates << File.join(opts[:reference], 'objects') end if opts[:ref] # todo: always check out something; since we're fetching a repository we # won't populate the working directory. checkout(opts[:ref]) end rescue Rugged::SshError, Rugged::NetworkError => e raise R10K::Git::GitError.new(e.message, :git_dir => git_dir, :backtrace => e.backtrace) end # Check out the given Git ref # # @param ref [String] The git reference to check out # @return [void] def checkout(ref, opts = {}) sha = resolve(ref) if sha logger.debug2 { _("Checking out ref '%{ref}' (resolved to SHA '%{sha}') in repository %{path}") % {ref: ref, sha: sha, path: @path} } else raise R10K::Git::GitError.new("Unable to check out unresolvable ref '#{ref}'", git_dir: git_dir) end # :force defaults to true force = !opts.has_key?(:force) || opts[:force] with_repo do |repo| # rugged/libgit2 will not update (at least) the execute bit a file if the SHA is already at # the value being reset to, so this is now changed to an if ... else if force repo.reset(sha, :hard) else repo.checkout(sha) end end end def fetch(remote_name = 'origin') logger.debug1 { _("Fetching remote '%{remote}' at %{path}") % {remote: remote_name, path: @path} } remote = remotes[remote_name] proxy = R10K::Git.get_proxy_for_remote(remote) options = {:credentials => credentials, :proxy_url => proxy} refspecs = ["+refs/heads/*:refs/remotes/#{remote_name}/*"] results = nil R10K::Git.with_proxy(proxy) do results = with_repo { |repo| repo.fetch(remote_name, refspecs, **options) } end report_transfer(results, remote) rescue Rugged::SshError, Rugged::NetworkError => e raise R10K::Git::GitError.new(e.message, :git_dir => git_dir, :backtrace => e.backtrace) end def exist? @path.exist? end def head resolve('HEAD') end def alternates R10K::Git::Alternates.new(git_dir) end def origin with_repo { |repo| repo.config['remote.origin.url'] } end def dirty?(exclude_spec=true) with_repo do |repo| if exclude_spec diff = repo.diff_workdir('HEAD').select { |d| ! d.delta.old_file[:path].start_with?('spec/') } else diff = repo.diff_workdir('HEAD').to_a end diff.each do |p| logger.debug(_("Found local modifications in %{file_path}" % {file_path: File.join(@path, p.delta.old_file[:path])})) logger.debug1(p.to_s) end return diff.size > 0 end end private def with_repo if @_rugged_repo.nil? && git_dir.exist? setup_rugged_repo end super end def setup_rugged_repo @_rugged_repo = ::Rugged::Repository.new(@path.to_s, :alternates => alternates.to_a) end end r10k-4.0.2/lib/r10k/git/shellgit.rb000066400000000000000000000003301460033767200165440ustar00rootroot00000000000000module R10K module Git module ShellGit require 'r10k/git/shellgit/bare_repository' require 'r10k/git/shellgit/working_repository' require 'r10k/git/shellgit/thin_repository' end end end r10k-4.0.2/lib/r10k/git/shellgit/000077500000000000000000000000001460033767200162235ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/git/shellgit/bare_repository.rb000066400000000000000000000021141460033767200217560ustar00rootroot00000000000000require 'r10k/git/shellgit' require 'r10k/git/shellgit/base_repository' # Create and manage Git bare repositories. class R10K::Git::ShellGit::BareRepository < R10K::Git::ShellGit::BaseRepository # @param basedir [String] The base directory of the Git repository # @param dirname [String] The directory name of the Git repository def initialize(basedir, dirname) @path = Pathname.new(File.join(basedir, dirname)) end # @return [Pathname] The path to this Git repository def git_dir @path end # @return [Pathname] The path to the objects directory in this Git repository def objects_dir @path + "objects" end def clone(remote) proxy = R10K::Git.get_proxy_for_remote(remote) R10K::Git.with_proxy(proxy) do git ['clone', '--mirror', remote, git_dir.to_s] end end def fetch(remote_name='origin') remote = remotes[remote_name] proxy = R10K::Git.get_proxy_for_remote(remote) R10K::Git.with_proxy(proxy) do git ['fetch', remote_name, '--prune'], :git_dir => git_dir.to_s end end def exist? @path.exist? end end r10k-4.0.2/lib/r10k/git/shellgit/base_repository.rb000066400000000000000000000067511460033767200217720ustar00rootroot00000000000000require 'r10k/git/shellgit' require 'r10k/util/subprocess' require 'r10k/logging' class R10K::Git::ShellGit::BaseRepository # @abstract # @return [Pathname] The path to the Git directory def git_dir raise NotImplementedError end # Resolve the given Git ref to a commit # # @param pattern [String] The git ref to resolve # @return [String, nil] The commit SHA if the ref could be resolved, nil otherwise. def resolve(pattern) result = git ['rev-parse', "#{pattern}^{commit}"], :git_dir => git_dir.to_s, :raise_on_fail => false if result.success? result.stdout end end # For compatibility with R10K::Git::Ref # @todo remove alias alias rev_parse resolve # @return [Array] All local branches in this repository def branches for_each_ref('refs/heads') end def is_branch?(pattern) result = git ['rev-parse', '-q', '--verify', "refs/heads/#{pattern}"], :git_dir => git_dir.to_s, :raise_on_fail => false result.success? end # @return [Array] All tags in this repository def tags for_each_ref('refs/tags') end def is_tag?(pattern) result = git ['rev-parse', '-q', '--verify', "refs/tags/#{pattern}"], :git_dir => git_dir.to_s, :raise_on_fail => false result.success? end # @return [Symbol] The type of the given ref, one of :branch, :tag, :commit, or :unknown def ref_type(pattern) @_ref_type_cache ||= {} @_ref_type_cache[pattern] ||= begin # Try to match and resolve SHA refs as quickly as possible. if pattern =~ /^[0-9a-f]{5,40}$/i && resolve(pattern) :commit elsif is_tag? pattern :tag elsif is_branch? pattern :branch elsif resolve(pattern) :commit else :unknown end end end # @return [Hash] Collection of remotes for this repo, keys are the remote name and values are the remote URL. def remotes result = git ['config', '--local', '--get-regexp', '^remote\..*\.url$'], :git_dir => git_dir.to_s, :raise_on_fail => false if result.success? Hash[ result.stdout.split("\n").collect do |remote| matches = /^remote\.(.*)\.url (.*)$/.match(remote) [matches[1], matches[2]] end ] else {} end end include R10K::Logging private # @param pattern [String] def for_each_ref(pattern) matcher = %r[#{pattern}/(.*)$] output = git ['for-each-ref', pattern, '--format', '%(refname)'], :git_dir => git_dir.to_s output.stdout.scan(matcher).flatten end # Wrap git commands # # @param cmd [Array] cmd The arguments for the git prompt # @param opts [Hash] opts # # @option opts [String] :path # @option opts [String] :git_dir # @option opts [String] :work_tree # @option opts [String] :raise_on_fail # # @raise [R10K::ExecutionFailure] If the executed command exited with a # nonzero exit code. # # @return [String] The git command output def git(cmd, opts = {}) raise_on_fail = opts.fetch(:raise_on_fail, true) argv = %w{git} if opts[:path] argv << "--git-dir" << File.join(opts[:path], '.git') argv << "--work-tree" << opts[:path] else if opts[:git_dir] argv << "--git-dir" << opts[:git_dir] end if opts[:work_tree] argv << "--work-tree" << opts[:work_tree] end end argv.concat(cmd) subproc = R10K::Util::Subprocess.new(argv) subproc.raise_on_fail = raise_on_fail subproc.logger = self.logger subproc.execute end end r10k-4.0.2/lib/r10k/git/shellgit/cache.rb000066400000000000000000000003531460033767200176140ustar00rootroot00000000000000require 'r10k/git/shellgit' require 'r10k/git/cache' class R10K::Git::ShellGit::Cache < R10K::Git::Cache @instance_cache = R10K::InstanceCache.new(self) def self.bare_repository R10K::Git::ShellGit::BareRepository end end r10k-4.0.2/lib/r10k/git/shellgit/thin_repository.rb000066400000000000000000000040341460033767200220120ustar00rootroot00000000000000require 'r10k/git/shellgit' require 'r10k/git/shellgit/cache' require 'r10k/git/shellgit/working_repository' # Manage a Git working repository backed with cached bare repositories. Instead # of duplicating all objects for new clones and updates, this uses Git # alternate object databases to reuse objects from an existing repository, # making new clones very lightweight. class R10K::Git::ShellGit::ThinRepository < R10K::Git::ShellGit::WorkingRepository def initialize(basedir, dirname, cache_repo) @cache_repo = cache_repo super(basedir, dirname) end # Clone this git repository # # @param remote [String] The Git remote to clone # @param opts [Hash] # # @options opts [String] :ref The git ref to check out on clone # # @return [void] def clone(remote, opts = {}) # todo check if opts[:reference] is set @cache_repo.sync super(remote, opts.merge(:reference => @cache_repo.git_dir.to_s)) setup_cache_remote end # Fetch refs from the backing bare Git repository. def fetch(remote = 'cache') git ['fetch', remote], :path => @path.to_s end # @return [String] The origin remote URL def cache git(['config', '--get', 'remote.cache.url'], :path => @path.to_s, :raise_on_fail => false).stdout end def tracked_paths(ref="HEAD") git(['ls-tree', '-t', '-r', '--name-only', ref], :path => @path.to_s).stdout.split("\n") end def stage_files(files=['.']) git(['add', files].flatten, :path => @path.to_s) end private def setup_cache_remote git ["remote", "add", "cache", @cache_repo.git_dir.to_s], :path => @path.to_s fetch end def git(cmd, opts = {}) if !@_synced_alternates sync_alternates @_synced_alternates = true end super end def sync_alternates if git_dir.exist? entry_added = alternates.add?(@cache_repo.objects_dir.to_s) if entry_added logger.debug2 { _("Updated repo %{path} to include alternate object db path %{objects_dir}") % {path: @path, objects_dir: @cache_repo.objects_dir} } end end end end r10k-4.0.2/lib/r10k/git/shellgit/working_repository.rb000066400000000000000000000055721460033767200225400ustar00rootroot00000000000000require 'r10k/git' require 'r10k/git/alternates' require 'r10k/git/shellgit/base_repository' # Manage a non-bare Git repository class R10K::Git::ShellGit::WorkingRepository < R10K::Git::ShellGit::BaseRepository # @attribute [r] path # @return [Pathname] attr_reader :path # @return [Pathname] The path to the Git directory inside of this repository def git_dir @path + '.git' end def initialize(basedir, dirname) @path = Pathname.new(File.join(basedir, dirname)) end # Clone this git repository # # @param remote [String] The Git remote to clone # @param opts [Hash] # # @options opts [String] :ref The git ref to check out on clone # @options opts [String] :reference A Git repository to use as an alternate object database # # @return [void] def clone(remote, opts = {}) argv = ['clone', remote, @path.to_s] if opts[:reference] argv += ['--reference', opts[:reference]] end proxy = R10K::Git.get_proxy_for_remote(remote) R10K::Git.with_proxy(proxy) do git argv end if opts[:ref] checkout(opts[:ref]) end end # Check out the given Git ref # # @param ref [String] The git reference to check out # @param opts [Hash] Optional hash of additional options. def checkout(ref, opts = {}) argv = ['checkout', ref] # :force defaults to true if !opts.has_key?(:force) || opts[:force] argv << '--force' end git argv, :path => @path.to_s end def fetch(remote_name='origin') remote = remotes[remote_name] proxy = R10K::Git.get_proxy_for_remote(remote) R10K::Git.with_proxy(proxy) do git ['fetch', remote_name, '--prune'], :path => @path.to_s end end def exist? @path.exist? end # @return [String] The currently checked out ref def head resolve('HEAD') end def alternates R10K::Git::Alternates.new(git_dir) end # @return [String] The origin remote URL def origin result = git(['config', '--get', 'remote.origin.url'], :path => @path.to_s, :raise_on_fail => false) if result.success? result.stdout end end # does the working tree have local modifications to tracked files? def dirty?(exclude_spec=true) result = git(['diff-index', '--exit-code', '--name-only', 'HEAD'], :path => @path.to_s, :raise_on_fail => false) if result.exit_code != 0 dirty_files = result.stdout.split("\n") dirty_files.delete_if { |f| f.start_with?('spec/') } if exclude_spec dirty_files.each do |file| logger.debug(_("Found local modifications in %{file_path}" % {file_path: File.join(@path, file)})) # Do this in a block so that the extra subprocess only gets invoked when needed. logger.debug1 { git(['diff-index', '-p', 'HEAD', '--', file], :path => @path.to_s, :raise_on_fail => false).stdout } end return dirty_files.size > 0 else return false end end end r10k-4.0.2/lib/r10k/git/stateful_repository.rb000066400000000000000000000062331460033767200210670ustar00rootroot00000000000000require 'r10k/git' require 'r10k/git/errors' require 'forwardable' require 'r10k/logging' # Manage how Git repositories are created and set to specific refs class R10K::Git::StatefulRepository include R10K::Logging # @!attribute [r] repo # @api private attr_reader :repo # @!attribute [r] cache # @api private attr_reader :cache extend Forwardable def_delegators :@repo, :head, :tracked_paths # Create a new shallow git working directory # # @param remote [String] The git remote to use for the repo # @param basedir [String] The path containing the Git repo # @param dirname [String] The directory name of the Git repo def initialize(remote, basedir, dirname) @remote = remote @cache = R10K::Git.cache.generate(@remote) @repo = R10K::Git.thin_repository.new(basedir, dirname, @cache) end def resolve(ref) @cache.sync if sync_cache?(ref) @cache.resolve(ref) end # Returns true if the sync actually updated the repo, false otherwise def sync(ref, force=true, exclude_spec=true) @cache.sync if sync_cache?(ref) sha = @cache.resolve(ref) if sha.nil? raise R10K::Git::UnresolvableRefError.new(_("Unable to sync repo to unresolvable ref '%{ref}'") % {ref: ref}, :git_dir => @repo.git_dir) end workdir_status = status(ref, exclude_spec) updated = true case workdir_status when :absent logger.debug(_("Cloning %{repo_path} and checking out %{ref}") % {repo_path: @repo.path, ref: ref }) @repo.clone(@remote, {:ref => sha}) when :mismatched logger.debug(_("Replacing %{repo_path} and checking out %{ref}") % {repo_path: @repo.path, ref: ref }) @repo.path.rmtree @repo.clone(@remote, {:ref => sha}) when :outdated logger.debug(_("Updating %{repo_path} to %{ref}") % {repo_path: @repo.path, ref: ref }) @repo.checkout(sha, {:force => force}) when :dirty if force logger.warn(_("Overwriting local modifications to %{repo_path}") % {repo_path: @repo.path}) logger.debug(_("Updating %{repo_path} to %{ref}") % {repo_path: @repo.path, ref: ref }) @repo.checkout(sha, {:force => force}) else logger.warn(_("Skipping %{repo_path} due to local modifications") % {repo_path: @repo.path}) updated = false end else logger.debug(_("%{repo_path} is already at Git ref %{ref}") % {repo_path: @repo.path, ref: ref }) updated = false end updated end def status(ref, exclude_spec=true) if !@repo.exist? :absent elsif !@cache.exist? :mismatched elsif !@repo.git_dir.exist? :mismatched elsif !@repo.git_dir.directory? :mismatched elsif !(@repo.origin == @remote) :mismatched elsif @repo.head.nil? :mismatched elsif @repo.dirty?(exclude_spec) :dirty elsif !(@repo.head == @cache.resolve(ref)) :outdated elsif @cache.ref_type(ref) == :branch && !@cache.synced? :outdated else :insync end end # @api private def sync_cache?(ref) return true if !@cache.exist? return true if ref == 'HEAD' return true if !([:commit, :tag].include? @cache.ref_type(ref)) return false end end r10k-4.0.2/lib/r10k/initializers.rb000066400000000000000000000065261460033767200166710ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/git' require 'r10k/git/cache' require 'r10k/forge/module_release' require 'r10k/tarball' module R10K module Initializers class BaseInitializer include R10K::Logging def initialize(settings) @settings = settings end private def with_setting(key) if !@settings[key].nil? yield @settings[key] end end end class GlobalInitializer < BaseInitializer def call with_setting(:purgedirs) do |_| logger.warn(_("the purgedirs key in r10k.yaml is deprecated. it is currently ignored.")) end with_setting(:logging) { |value| LoggingInitializer.new(value).call } with_setting(:deploy) { |value| DeployInitializer.new(value).call } with_setting(:cachedir) { |value| R10K::Git::Cache.settings[:cache_root] = value } with_setting(:cachedir) { |value| R10K::Forge::ModuleRelease.settings[:cache_root] = value } with_setting(:cachedir) { |value| R10K::Tarball.settings[:cache_root] = value } with_setting(:pool_size) { |value| R10K::Puppetfile.settings[:pool_size] = value } with_setting(:proxy) { |value| R10K::Tarball.settings[:proxy] = value } with_setting(:git) { |value| GitInitializer.new(value).call } with_setting(:forge) { |value| ForgeInitializer.new(value).call } with_setting(:tarball) { |value| TarballInitializer.new(value).call } end end class LoggingInitializer < BaseInitializer def call with_setting(:level) { |value| R10K::Logging.level = value } with_setting(:disable_default_stderr) { |value| R10K::Logging.disable_default_stderr = value } with_setting(:outputs) { |value| R10K::Logging.add_outputters(value) } end end class DeployInitializer < BaseInitializer def call with_setting(:puppet_path) { |value| R10K::Settings.puppet_path = value } with_setting(:puppet_conf) { |value| R10K::Settings.puppet_conf = value } end end class GitInitializer < BaseInitializer def call with_setting(:provider) { |value| R10K::Git.provider = value } with_setting(:username) { |value| R10K::Git.settings[:username] = value } with_setting(:private_key) { |value| R10K::Git.settings[:private_key] = value } with_setting(:proxy) { |value| R10K::Git.settings[:proxy] = value } with_setting(:repositories) { |value| R10K::Git.settings[:repositories] = value } with_setting(:oauth_token) { |value| R10K::Git.settings[:oauth_token] = value } with_setting(:github_app_id) { |value| R10K::Git.settings[:github_app_id] = value } with_setting(:github_app_key) { |value| R10K::Git.settings[:github_app_key] = value } with_setting(:github_app_ttl) { |value| R10K::Git.settings[:github_app_ttl] = value } end end class ForgeInitializer < BaseInitializer def call with_setting(:baseurl) { |value| PuppetForge.host = value } with_setting(:proxy) { |value| PuppetForge::Connection.proxy = value } with_setting(:authorization_token) { |value| PuppetForge::Connection.authorization = value } end end class TarballInitializer < BaseInitializer def call with_setting(:proxy) { |value| R10K::Tarball.settings[:proxy] = value } end end end end r10k-4.0.2/lib/r10k/instance_cache.rb000066400000000000000000000016331460033767200171040ustar00rootroot00000000000000module R10K # This class implements a generic object memoization container. It caches # new objects and returns cached objects based on the instantiation arguments. class InstanceCache # Initialize a new registry with a given class # # @param klass [Class] The class to memoize # @param method [Symbol] The method name to use when creating objects. # Defaults to :new. def initialize(klass, method = :new) @klass = klass @method = method @instances = {} end # Create a new object, or return a memoized object. # # @param args [*Object] The arguments to pass to the initialize method # # @return [Object] A memoized instance of the registered class def generate(*args) @instances[args] ||= @klass.send(@method, *args) end # Clear all memoized objects def clear! @instances = {} end end end r10k-4.0.2/lib/r10k/keyed_factory.rb000066400000000000000000000020261460033767200170020ustar00rootroot00000000000000module R10K # This implements a factory by storing classes indexed with a given key and # creates objects based on that key. class KeyedFactory # @!attribute [r] implementations # @return [Hash] A hash of keys and the associated # implementations that this factory can generate. attr_reader :implementations def initialize @implementations = {} end def register(key, klass) if @implementations.has_key?(key) raise DuplicateImplementationError, _("Class already registered for %{key}") % {key: key} else @implementations[key] = klass end end def retrieve(key) @implementations[key] end def generate(key, *args) if (impl = @implementations[key]) impl.new(*args) else raise UnknownImplementationError, _("No class registered for %{key}") % {key: key} end end class DuplicateImplementationError < StandardError; end class UnknownImplementationError < StandardError; end end end r10k-4.0.2/lib/r10k/logging.rb000066400000000000000000000125411460033767200156030ustar00rootroot00000000000000require 'r10k' require 'forwardable' require 'log4r' require 'log4r/configurator' require 'r10k/logging/terminaloutputter' module R10K::Logging LOG_LEVELS = %w{DEBUG2 DEBUG1 DEBUG INFO NOTICE WARN ERROR FATAL} SYSLOG_LEVELS_MAP = { 'DEBUG2' => 'DEBUG', 'DEBUG1' => 'DEBUG', 'DEBUG' => 'DEBUG', 'INFO' => 'INFO', 'NOTICE' => 'INFO', 'WARN' => 'WARN', 'ERROR' => 'ERROR', 'FATAL' => 'FATAL', }.freeze def logger_name self.class.to_s end def logger if @logger.nil? name = logger_name if Log4r::Logger[name] @logger = Log4r::Logger[name] else @logger = Log4r::Logger.new(name) @logger.add(R10K::Logging.outputter) R10K::Logging.outputters.each do |output| @logger.add(output) end end end @logger end class << self # Convert the input to a valid Log4r log level # # @param input [String, TrueClass] The level to parse. If TrueClass then # Log4r::INFO will be returned (indicating a generic raised verbosity), # if a string it will be parsed either as a numeric value or a textual # log level. # @api private # @return [Integer, NilClass] The numeric log level, or nil if the log # level is invalid. def parse_level(input) case input when TrueClass Log4r::INFO when /\A\d+\Z/ Integer(input) when String const_name = input.upcase if LOG_LEVELS.include?(const_name) begin Log4r.const_get(const_name.to_sym) rescue NameError end end end end def level=(val) level = parse_level(val) if level.nil? raise ArgumentError, _("Invalid log level '%{val}'. Valid levels are %{log_levels}") % {val: val, log_levels: LOG_LEVELS.map(&:downcase).inspect} end outputter.level = level unless @disable_default_stderr @level = level if level < Log4r::INFO outputter.formatter = debug_formatter else outputter.formatter = default_formatter end end def disable_default_stderr=(val) @disable_default_stderr = val outputter.level = val ? Log4r::OFF : @level end def add_outputters(outputs) outputs.each do |output| type = output.fetch(:type) # Support specifying both short as well as full names type = type.to_s[0..-10] if type.to_s.downcase.end_with? 'outputter' name = output.fetch(:name, 'r10k') if output[:level] level = parse_level(output[:level]) if level.nil? raise ArgumentError, _("Invalid log level '%{val}'. Valid levels are %{log_levels}") % { val: output[:level], log_levels: LOG_LEVELS.map(&:downcase).inspect } end else level = self.level end only_at = output[:only_at] only_at&.map! do |val| lv = parse_level(val) if lv.nil? raise ArgumentError, _("Invalid log level '%{val}'. Valid levels are %{log_levels}") % { val: val, log_levels: LOG_LEVELS.map(&:downcase).inspect } end lv end parameters = output.fetch(:parameters, {}).merge({ level: level }) begin # Try to load the outputter file if possible require "log4r/outputter/#{type.to_s.downcase}outputter" rescue LoadError false end outputtertype = Log4r.constants .select { |klass| klass.to_s.end_with? 'Outputter' } .find { |klass| klass.to_s.downcase == "#{type.to_s.downcase}outputter" } raise ArgumentError, "Unable to find a #{output[:type]} outputter." unless outputtertype outputter = Log4r.const_get(outputtertype).new(name, parameters) outputter.only_at(*only_at) if only_at # Handle log4r's syslog mapping correctly outputter.map_levels_by_name_to_syslog(SYSLOG_LEVELS_MAP) if outputter.respond_to? :map_levels_by_name_to_syslog @outputters << outputter Log4r::Logger.global.add outputter end end extend Forwardable def_delegators :@outputter, :use_color, :use_color= # @!attribute [r] level # @return [Integer] The current log level. Lower numbers correspond # to more verbose log levels. attr_reader :level # @!attribute [r] formatter # @api private # @return [Log4r::Formatter] attr_reader :formatter # @!attribute [r] outputter # @api private # @return [Log4r::Outputter] attr_reader :outputter # @!attribute [r] outputters # @api private # @return [Array[Log4r::Outputter]] attr_reader :outputters # @!attribute [r] disable_default_stderr # @api private # @return [Boolean] attr_reader :disable_default_stderr def default_formatter Log4r::PatternFormatter.new(:pattern => '%l\t -> %m') end def debug_formatter Log4r::PatternFormatter.new(:pattern => '[%d - %l] %m') end def default_outputter R10K::Logging::TerminalOutputter.new('terminal', $stderr, :level => self.level, :formatter => formatter) end end Log4r::Configurator.custom_levels(*LOG_LEVELS) Log4r::Logger.global.level = Log4r::ALL @level = Log4r::WARN @formatter = default_formatter @outputter = default_outputter @outputters = [] @disable_default_stderr = false end r10k-4.0.2/lib/r10k/logging/000077500000000000000000000000001460033767200152535ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/logging/terminaloutputter.rb000066400000000000000000000011201460033767200214010ustar00rootroot00000000000000require 'colored2' require 'r10k/logging' require 'log4r/outputter/iooutputter' module R10K module Logging class TerminalOutputter < Log4r::IOOutputter COLORS = [ nil, :cyan, :cyan, :green, nil, nil, :yellow, :red, :red, ] attr_accessor :use_color private def format(logevent) string = super if @use_color color = COLORS[logevent.level] color ? string.send(color) : string else string end end end end end r10k-4.0.2/lib/r10k/module.rb000066400000000000000000000041141460033767200154370ustar00rootroot00000000000000require 'r10k' module R10K::Module # Register an module implementation for later generation def self.register(klass) @klasses ||= [] @klasses << klass end # Look up the implementing class and instantiate an object # # This method takes the arguments for normal object generation and checks all # inheriting classes to see if they implement the behavior needed to create # the requested object. It selects the first class that can implement an object # with `name, args`, and generates an object of that class. # # @param [String] name The unique name of the module # @param [String] basedir The root to install the module in # @param [Hash] args An arbitary Hash that specifies the implementation # @param [R10K::Environment] environment Optional environment that this module is a part of # # @return [Object < R10K::Module] A member of the implementing subclass def self.new(name, basedir, args, environment=nil) with_implementation(name, args) do |implementation| implementation.new(name, basedir, args, environment) end end # Takes the same signature as Module.new but returns an metadata module def self.from_metadata(name, basedir, args, environment=nil) with_implementation(name, args) do |implementation| R10K::Module::Definition.new(name, dirname: basedir, args: args, implementation: implementation, environment: environment) end end def self.with_implementation(name, args, &block) if implementation = @klasses.find { |klass| klass.implement?(name, args) } block.call(implementation) else raise _("Module %{name} with args %{args} doesn't have an implementation. (Are you using the right arguments?)") % {name: name, args: args.inspect} end end require 'r10k/module/base' require 'r10k/module/git' require 'r10k/module/svn' require 'r10k/module/local' require 'r10k/module/forge' require 'r10k/module/definition' require 'r10k/module/tarball' end r10k-4.0.2/lib/r10k/module/000077500000000000000000000000001460033767200151125ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/module/base.rb000066400000000000000000000127011460033767200163520ustar00rootroot00000000000000require 'r10k/module' require 'r10k/logging' require 'puppet_forge' # This class defines a common interface for module implementations. class R10K::Module::Base include R10K::Logging # @!attribute [r] title # @return [String] The forward slash separated owner and name of the module attr_reader :title # @!attribute [r] name # @return [String] The name of the module attr_reader :name # @param [r] dirname # @return [String] The name of the directory containing this module attr_reader :dirname # @deprecated alias :basedir :dirname # @!attribute [r] owner # @return [String, nil] The owner of the module if one is specified attr_reader :owner # @!attribute [r] path # @return [Pathname] The full path of the module attr_reader :path # @!attribute [r] environment # @return [R10K::Environment, nil] The parent environment of the module attr_reader :environment # @!attribute [rw] origin # @return [String] Where the module was sourced from. E.g., "Puppetfile" attr_accessor :origin # @!attribute [rw] spec_deletable # @return [Boolean] set this to true if the spec dir can be safely removed, ie in the moduledir attr_accessor :spec_deletable # There's been some churn over `author` vs `owner` and `full_name` over # `title`, so in the short run it's easier to support both and deprecate one # later. alias :author :owner alias :full_name :title # @param title [String] # @param dirname [String] # @param args [Hash] def initialize(title, dirname, args, environment=nil) @title = PuppetForge::V3.normalize_name(title) @dirname = dirname @args = args @owner, @name = parse_title(@title) @path = Pathname.new(File.join(@dirname, @name)) @environment = environment @overrides = args.delete(:overrides) || {} @spec_deletable = true @exclude_spec = true @exclude_spec = @overrides.dig(:modules, :exclude_spec) unless @overrides.dig(:modules, :exclude_spec).nil? if args.has_key?(:exclude_spec) logger.debug2 _("Overriding :exclude_spec setting with per module setting for #{@title}") @exclude_spec = args.delete(:exclude_spec) end @origin = 'external' # Expect Puppetfile or R10k::Environment to set this to a specific value @requested_modules = @overrides.dig(:modules, :requested_modules) || [] @should_sync = (@requested_modules.empty? || @requested_modules.include?(@name)) end # @deprecated # @return [String] The full filesystem path to the module. def full_path path.to_s end # Delete the spec dir if @exclude_spec is true and @spec_deletable is also true def maybe_delete_spec_dir if @exclude_spec if @spec_deletable delete_spec_dir else logger.info _("Spec dir for #{@title} will not be deleted because it is not in the moduledir") end end end # Actually remove the spec dir def delete_spec_dir spec_path = @path + 'spec' if spec_path.symlink? spec_path = spec_path.realpath end if spec_path.directory? logger.debug2 _("Deleting spec data at #{spec_path}") # Use the secure flag for the #rm_rf method to avoid security issues # involving TOCTTOU(time of check to time of use); more details here: # https://ruby-doc.org/stdlib-2.7.0/libdoc/fileutils/rdoc/FileUtils.html#method-c-rm_rf # Additionally, #rm_rf also has problems in windows with with symlink targets # also being deleted; this should be revisted if Windows becomes higher priority. FileUtils.rm_rf(spec_path, secure: true) else logger.debug2 _("No spec dir detected at #{spec_path}, skipping deletion") end end # Synchronize this module with the indicated state. # @param [Hash] opts Deprecated # @return [Boolean] true if the module was updated, false otherwise def sync(opts={}) raise NotImplementedError end def should_sync? if @should_sync logger.info _("Deploying module to %{path}") % {path: path} true else logger.debug1(_("Only updating modules %{modules}, skipping module %{name}") % {modules: @requested_modules.inspect, name: name}) false end end # Return the desired version of this module # @abstract def version raise NotImplementedError end # Return the status of the currently installed module. # # This can return the following values: # # * :absent - there is no module installed # * :mismatched - there is a module installed but it must be removed and reinstalled # * :outdated - the correct module is installed but it needs to be updated # * :insync - the correct module is installed and up to date, or the module is actually a boy band. # # @return [Symbol] # @abstract def status raise NotImplementedError end # Deprecated def accept(visitor) visitor.visit(:module, self) end # Return the properties of the module # # @return [Hash] # @abstract def properties raise NotImplementedError end # Return the module's cachedir. Subclasses that implement a cache # will override this to return a real directory location. # # @return [String, :none] def cachedir :none end private def parse_title(title) if (match = title.match(/\A(\w+)\Z/)) [nil, match[1]] elsif (match = title.match(/\A(\w+)[-\/](\w+)\Z/)) [match[1], match[2]] else raise ArgumentError, _("Module name (%{title}) must match either 'modulename' or 'owner/modulename'") % {title: title} end end end r10k-4.0.2/lib/r10k/module/definition.rb000066400000000000000000000032701460033767200175710ustar00rootroot00000000000000require 'r10k/module' class R10K::Module::Definition < R10K::Module::Base attr_reader :version def initialize(name, dirname:, args:, implementation:, environment: nil) @original_name = name @original_args = args.dup @implementation = implementation @version = implementation.statically_defined_version(name, args) super(name, dirname, args, environment) end def to_implementation mod = @implementation.new(@title, @dirname, @original_args, @environment) mod.origin = origin mod.spec_deletable = spec_deletable mod end # syncing is a noop for module definitions # Returns false to inidicate the module was not updated def sync(args = {}) logger.debug1(_("Not updating module %{name}, assuming content unchanged") % {name: name}) false end def status :insync end def properties type = nil if @args[:type] type = @args[:type] elsif @args[:ref] || @args[:commit] || @args[:branch] || @args[:tag] type = 'git' elsif @args[:svn] # This logic is clear and included for completeness sake, though at # this time module definitions do not support SVN versions. type = 'svn' else type = 'forge' end { expected: version, # We can't get the value for `actual` here because that requires the # implementation (and potentially expensive operations by the # implementation). Some consumers will check this value, if it exists # and if not, fall back to the expected version. That is the correct # behavior when assuming modules are unchanged, and why `actual` is set # to `nil` here. actual: nil, type: type } end end r10k-4.0.2/lib/r10k/module/forge.rb000066400000000000000000000131761460033767200165510ustar00rootroot00000000000000require 'r10k/module' require 'r10k/errors' require 'r10k/module/metadata_file' require 'r10k/forge/module_release' require 'pathname' require 'fileutils' require 'puppet_forge/util' class R10K::Module::Forge < R10K::Module::Base R10K::Module.register(self) def self.implement?(name, args) args[:type].to_s == 'forge' end def self.statically_defined_version(name, args) args[:version] if args[:version].is_a?(String) end # @!attribute [r] metadata # @api private # @return [PuppetForge::Metadata] attr_reader :metadata # @!attribute [r] v3_module # @api private # @return [PuppetForge::V3::Module] The Puppet Forge module metadata attr_reader :v3_module include R10K::Util::Setopts def initialize(title, dirname, opts, environment=nil) super @metadata_file = R10K::Module::MetadataFile.new(path + 'metadata.json') @metadata = @metadata_file.read setopts(opts, { # Standard option interface :version => :expected_version, :source => ::R10K::Util::Setopts::Ignore, :type => ::R10K::Util::Setopts::Ignore, }, :raise_on_unhandled => false) # Validate version and raise on issue. Title is validated by base class. unless valid_version?(@expected_version) raise ArgumentError, _("Module version %{ver} is not a valid Forge module version") % {ver: @expected_version} end @expected_version ||= current_version || :latest @v3_module = PuppetForge::V3::Module.new(:slug => @title) end def valid_version?(version) version == :latest || version.nil? || PuppetForge::Util.version_valid?(version) end # @param [Hash] opts Deprecated # @return [Boolean] true if the module was updated, false otherwise def sync(opts={}) updated = false if should_sync? case status when :absent install updated = true when :outdated upgrade updated = true when :mismatched reinstall updated = true end maybe_delete_spec_dir end updated end def properties { :expected => expected_version, :actual => current_version, :type => :forge, } end # @return [String] The expected version that the module def expected_version if @expected_version == :latest begin if @v3_module.current_release @expected_version = @v3_module.current_release.version else raise PuppetForge::ReleaseNotFound, _("The module %{title} does not appear to have any published releases, cannot determine latest version.") % { title: @title } end rescue Faraday::ResourceNotFound => e raise PuppetForge::ReleaseNotFound, _("The module %{title} does not exist on %{url}.") % {title: @title, url: PuppetForge::V3::Release.conn.url_prefix}, e.backtrace end end @expected_version end # @return [String] The version of the currently installed module def current_version if insync? (@metadata ||= @metadata_file.read).nil? ? nil : @metadata.version else nil end end alias version current_version def exist? path.exist? end def insync? status == :insync end def deprecated? begin @v3_module.fetch && @v3_module.has_attribute?('deprecated_at') && !@v3_module.deprecated_at.nil? rescue Faraday::ResourceNotFound => e raise PuppetForge::ReleaseNotFound, _("The module %{title} does not exist on %{url}.") % {title: @title, url: PuppetForge::V3::Release.conn.url_prefix}, e.backtrace end end # Determine the status of the forge module. # # @return [Symbol] :absent If the directory doesn't exist # @return [Symbol] :mismatched If the module is not a forge module, or # isn't the right forge module # @return [Symbol] :mismatched If the module was previously a git checkout # @return [Symbol] :outdated If the installed module is older than expected # @return [Symbol] :insync If the module is in the desired state def status if not self.exist? # The module is not installed return :absent elsif not File.exist?(@path + 'metadata.json') # The directory exists but doesn't have a metadata file; it probably # isn't a forge module. return :mismatched end if File.directory?(@path + '.git') return :mismatched end # The module is present and has a metadata file, read the metadata to # determine the state of the module. @metadata = @metadata_file.read(@path + 'metadata.json') if not @title.tr('/','-') == @metadata.full_module_name.tr('/','-') # This is a forge module but the installed module is a different author # than the expected author. return :mismatched end if expected_version && (expected_version != @metadata.version) return :outdated end return :insync end def install if deprecated? logger.warn "Puppet Forge module '#{@v3_module.slug}' has been deprecated, visit https://forge.puppet.com/#{@v3_module.slug.tr('-','/')} for more information." end parent_path = @path.parent if !parent_path.exist? parent_path.mkpath end module_release = R10K::Forge::ModuleRelease.new(@title, expected_version) module_release.install(@path) end alias upgrade install def uninstall FileUtils.rm_rf full_path end def reinstall uninstall install end private # Override the base #parse_title to ensure we have a fully qualified name def parse_title(title) if (match = title.match(/\A(\w+)[-\/](\w+)\Z/)) [match[1], match[2]] else raise ArgumentError, _("Forge module names must match 'owner/modulename', instead got #{title}") end end end r10k-4.0.2/lib/r10k/module/git.rb000066400000000000000000000113351460033767200162250ustar00rootroot00000000000000require 'r10k/module' require 'r10k/git' require 'r10k/git/stateful_repository' require 'forwardable' class R10K::Module::Git < R10K::Module::Base R10K::Module.register(self) def self.implement?(name, args) args.has_key?(:git) || args[:type].to_s == 'git' end # Will be called if self.implement? above returns true. Will return # the version info, if version is statically defined in the modules # declaration. def self.statically_defined_version(name, args) if !args[:type] && (args[:ref] || args[:tag] || args[:commit]) if args[:ref] && args[:ref].to_s.match(/[0-9a-f]{40}/) args[:ref] else args[:tag] || args[:commit] end elsif args[:type].to_s == 'git' && args[:version] && args[:version].to_s.match(/[0-9a-f]{40}/) args[:version] end end # @!attribute [r] repo # @api private # @return [R10K::Git::StatefulRepository] attr_reader :repo # @!attribute [r] desired_ref # @api private # @return [String] attr_reader :desired_ref # @!attribute [r] default_ref # @api private # @return [String] attr_reader :default_ref # @!attribute [r] default_override_ref # @api private # @return [String] attr_reader :default_override_ref include R10K::Util::Setopts def initialize(title, dirname, opts, environment=nil) super setopts(opts, { # Standard option interface :version => :desired_ref, :source => :remote, :type => ::R10K::Util::Setopts::Ignore, # Type-specific options :branch => :desired_ref, :tag => :desired_ref, :commit => :desired_ref, :ref => :desired_ref, :git => :remote, :default_branch => :default_branch, :default_branch_override => :default_override_ref, }, :raise_on_unhandled => false) @default_ref = @default_branch.nil? ? @overrides.dig(:modules, :default_ref) : @default_branch force = @overrides[:force] @force = force == false ? false : true if @desired_ref == :control_branch if @environment && @environment.respond_to?(:ref) @desired_ref = @environment.ref else logger.warn _("Cannot track control repo branch for content '%{name}' when not part of a git-backed environment, will use default if available." % {name: name}) end end @repo = R10K::Git::StatefulRepository.new(@remote, @dirname, @name) end def version validate_ref(@desired_ref, @default_ref, @default_override_ref) end def properties { :expected => version, :actual => (@repo.head || "(unresolvable)"), :type => :git, } end # @param [Hash] opts Deprecated # @return [Boolean] true if the module was updated, false otherwise def sync(opts={}) force = opts[:force] || @force if should_sync? updated = @repo.sync(version, force, @exclude_spec) else updated = false end maybe_delete_spec_dir updated end def status @repo.status(version) end def cachedir @repo.cache.sanitized_dirname end def validate_ref_defined if @desired_ref.nil? && @default_ref.nil? && @default_override_ref.nil? msg = "No ref defined for module #{@name}. Add a ref to the module definition " msg << "or set git:default_ref in the r10k.yaml config to configure a global default ref." raise ArgumentError, msg end end private def validate_ref(desired, default, default_override) if desired && desired != :control_branch && @repo.resolve(desired) return desired elsif default_override && @repo.resolve(default_override) return default_override elsif default && @repo.resolve(default) return default else msg = ["Unable to manage Puppetfile content '%{name}':"] vars = {name: @name} if desired == :control_branch msg << "Could not resolve control repo branch" elsif desired msg << "Could not resolve desired ref '%{desired}'" vars[:desired] = desired else msg << "Could not determine desired ref" end if default_override msg << "or resolve the default branch override '%{default_override}'," vars[:default_override] = default_override end if default msg << "or resolve default ref '%{default}'" vars[:default] = default else msg << "and no default provided. r10k no longer hardcodes 'master' as the default ref." msg << "Consider setting a ref per module in the Puppetfile or setting git:default_ref" msg << "in your r10k config." end raise ArgumentError, _(msg.join(' ')) % vars end end end r10k-4.0.2/lib/r10k/module/local.rb000066400000000000000000000017021460033767200165310ustar00rootroot00000000000000require 'r10k/module' # A dummy module type that can be used to "protect" Puppet modules that exist # inside of the Puppetfile "moduledir" location. Local modules will not be # modified, and will not be purged when r10k removes unmanaged modules. class R10K::Module::Local < R10K::Module::Base R10K::Module.register(self) def self.implement?(name, args) args[:local] || args[:type].to_s == 'local' end def self.statically_defined_version(*) "0.0.0" end def version self.class.statically_defined_version end def properties { :expected => "0.0.0 (local)", :actual => "0.0.0 (local)", :type => :forge, } end def status :insync end # @param [Hash] opts Deprecated # @return [Boolean] false, because local modules are always considered in-sync def sync(opts={}) logger.debug1 _("Module %{title} is a local module, always indicating synced.") % {title: title} false end end r10k-4.0.2/lib/r10k/module/metadata_file.rb000066400000000000000000000015501460033767200202170ustar00rootroot00000000000000require 'r10k/module' require 'r10k/errors' require 'puppet_forge' class R10K::Module::MetadataFile # @param metadata_path [Pathname] The file path to the metadata def initialize(metadata_file_path) @metadata_file_path = metadata_file_path end # Does the metadata file itself exist? def exist? @metadata_file_path.file? and @metadata_file_path.readable? end # @return [Puppet::ModuleTool::Metadata ] The metadata object created by the metadatafile def read(metadata_file_path = @metadata_file_path) if self.exist? metadata_file_path.open do |f| begin metadata = PuppetForge::Metadata.new metadata.update(JSON.load(f), false) rescue JSON::ParserError => e exception = R10K::Error.wrap(e, _("Could not read metadata.json")) raise exception end end end end end r10k-4.0.2/lib/r10k/module/svn.rb000066400000000000000000000054631460033767200162550ustar00rootroot00000000000000require 'r10k/module' require 'r10k/svn/working_dir' require 'r10k/util/setopts' class R10K::Module::SVN < R10K::Module::Base R10K::Module.register(self) def self.implement?(name, args) args.has_key?(:svn) || args[:type].to_s == 'svn' end def self.statically_defined_version(name, args) nil end # @!attribute [r] expected_revision # @return [String] The SVN revision that the repo should have checked out attr_reader :expected_revision alias expected_version expected_revision # @!attribute [r] full_path # @return [Pathname] The filesystem path to the SVN repo attr_reader :full_path # @!attribute [r] username # @return [String, nil] The SVN username to be passed to the underlying SVN commands # @api private attr_reader :username # @!attribute [r] password # @return [String, nil] The SVN password to be passed to the underlying SVN commands # @api private attr_reader :password # @!attribute [r] working_dir # @return [R10K::SVN::WorkingDir] # @api private attr_reader :working_dir include R10K::Util::Setopts def initialize(name, dirname, opts, environment=nil) super setopts(opts, { # Standard option interface :source => :url, :version => :expected_revision, :type => ::R10K::Util::Setopts::Ignore, # Type-specific options :svn => :url, :rev => :expected_revision, :revision => :expected_revision, :username => :self, :password => :self }, :raise_on_unhandled => false) @working_dir = R10K::SVN::WorkingDir.new(@path, :username => @username, :password => @password) end def status if not self.exist? :absent elsif not @working_dir.is_svn? :mismatched elsif not @url == @working_dir.url :mismatched elsif not @expected_revision == @working_dir.revision :outdated else :insync end end # @param [Hash] opts Deprecated # @return [Boolean] true if the module was updated, false otherwise def sync(opts={}) updated = false if should_sync? case status when :absent install updated = true when :mismatched reinstall updated = true when :outdated update updated = true end maybe_delete_spec_dir end updated end def exist? path.exist? end def properties { :expected => expected_revision, :actual => (@working_dir.revision rescue "(unresolvable)"), :type => :svn, } end private def install FileUtils.mkdir @dirname unless File.directory? @dirname @working_dir.checkout(@url, @expected_revision) end def uninstall path.rmtree end def reinstall uninstall install end def update @working_dir.update(@expected_revision) end end r10k-4.0.2/lib/r10k/module/tarball.rb000066400000000000000000000045051460033767200170640ustar00rootroot00000000000000require 'r10k/module' require 'r10k/util/setopts' require 'r10k/tarball' # This class defines a tarball source module implementation class R10K::Module::Tarball < R10K::Module::Base R10K::Module.register(self) def self.implement?(name, args) args.is_a?(Hash) && args[:type].to_s == 'tarball' rescue false end def self.statically_defined_version(name, args) args[:version] || args[:checksum] end # @!attribute [r] tarball # @api private # @return [R10K::Tarball] attr_reader :tarball include R10K::Util::Setopts def initialize(name, dirname, opts, environment=nil) super setopts(opts, { # Standard option interface :source => :self, :version => :checksum, :type => ::R10K::Util::Setopts::Ignore, :overrides => :self, # Type-specific options :checksum => :self, }) @tarball = R10K::Tarball.new(name, @source, checksum: @checksum) end # Return the status of the currently installed module. # # @return [Symbol] def status if not path.exist? :absent elsif not (tarball.cache_valid? && tarball.insync?(path.to_s)) :mismatched else :insync end end # Synchronize this module with the indicated state. # @param [Hash] opts Deprecated # @return [Boolean] true if the module was updated, false otherwise def sync(opts={}) tarball.get unless tarball.cache_valid? if should_sync? case status when :absent tarball.unpack(path.to_s) when :mismatched path.rmtree tarball.unpack(path.to_s) end maybe_delete_spec_dir true else false end end # Return the desired version of this module def version @checksum || '(present)' end # Return the properties of the module # # @return [Hash] # @abstract def properties { :expected => version, :actual => ((state = status) == :insync) ? version : state, :type => :tarball, } end # Tarball caches are files, not directories. An important purpose of this # method is to indicate where the cache "path" is, for locking/parallelism, # so for the Tarball module type, the relevant path location is returned. # # @return [String] The path this module will cache its tarball source to def cachedir tarball.cache_path end end r10k-4.0.2/lib/r10k/module_loader/000077500000000000000000000000001460033767200164405ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/module_loader/puppetfile.rb000066400000000000000000000241651460033767200211520ustar00rootroot00000000000000require 'r10k/errors' require 'r10k/logging' require 'r10k/module' require 'r10k/module_loader/puppetfile/dsl' require 'pathname' module R10K module ModuleLoader class Puppetfile include R10K::Logging DEFAULT_MODULEDIR = 'modules' DEFAULT_PUPPETFILE_NAME = 'Puppetfile' attr_accessor :default_branch_override, :environment attr_reader :modules, :moduledir, :puppetfile_path, :managed_directories, :desired_contents, :purge_exclusions, :environment_name # @param basedir [String] The path that contains the moduledir & # Puppetfile by default. May be an environment, project, or # simple directory. # @param puppetfile [String] The path to the Puppetfile, either an # absolute full path or a relative path with regards to the basedir. # @param moduledir [String] The path to the moduledir, either an # absolute full path or a relative path with regards to the basedir. # @param forge [String] The url (without protocol) to the Forge # @param overrides [Hash] Configuration for loaded modules' behavior # @param environment [R10K::Environment] When provided, the environment # in which loading takes place # @param module_exclude_regex [Regex] A regex to exclude modules from # installation. Helpful in CI environments. def initialize(basedir:, moduledir: DEFAULT_MODULEDIR, puppetfile: DEFAULT_PUPPETFILE_NAME, overrides: {}, environment: nil, module_exclude_regex: nil) @basedir = cleanpath(basedir) @moduledir = resolve_path(@basedir, moduledir) @puppetfile_path = resolve_path(@basedir, puppetfile) @overrides = overrides @environment = environment @module_exclude_regex = module_exclude_regex @environment_name = @environment&.name @default_branch_override = @overrides.dig(:environments, :default_branch_override) @allow_puppetfile_forge = @overrides.dig(:forge, :allow_puppetfile_override) @existing_module_metadata = [] @existing_module_versions_by_name = {} @modules = [] @managed_directories = [] @desired_contents = [] @purge_exclusions = [] end def load with_readable_puppetfile(@puppetfile_path) do self.load! end end def load! logger.info _("Using Puppetfile '%{puppetfile}'") % {puppetfile: @puppetfile_path} logger.debug _("Using moduledir '%{moduledir}'") % {moduledir: @moduledir} dsl = R10K::ModuleLoader::Puppetfile::DSL.new(self) dsl.instance_eval(puppetfile_content(@puppetfile_path), @puppetfile_path) validate_no_duplicate_names(@modules) @modules = filter_modules(@modules, @module_exclude_regex) if @module_exclude_regex managed_content = @modules.group_by(&:dirname) @managed_directories = determine_managed_directories(managed_content) @desired_contents = determine_desired_contents(managed_content) @purge_exclusions = determine_purge_exclusions(@managed_directories) { modules: @modules, managed_directories: @managed_directories, desired_contents: @desired_contents, purge_exclusions: @purge_exclusions } rescue SyntaxError, LoadError, ArgumentError, NameError => e raise R10K::Error.wrap(e, _("Failed to evaluate %{path}") % {path: @puppetfile_path}) end def load_metadata with_readable_puppetfile(@puppetfile_path) do self.load_metadata! end end def load_metadata! dsl = R10K::ModuleLoader::Puppetfile::DSL.new(self, metadata_only: true) dsl.instance_eval(puppetfile_content(@puppetfile_path), @puppetfile_path) @existing_module_versions_by_name = @existing_module_metadata.map {|mod| [ mod.name, mod.version ] }.to_h empty_load_output.merge(modules: @existing_module_metadata) rescue ScriptError, StandardError => e logger.warn _("Unable to preload Puppetfile because of %{msg}" % { msg: e.message }) @existing_module_metadata = [] @existing_module_versions_by_name = {} end def add_module_metadata(name, info) install_path, metadata_info, _ = parse_module_definition(name, info) mod = R10K::Module.from_metadata(name, install_path, metadata_info, @environment) @existing_module_metadata << mod end ## ## set_forge, set_moduledir, and add_module are used directly by the DSL class ## # @param [String] forge def set_forge(forge) if @allow_puppetfile_forge logger.debug _("Using Forge from Puppetfile: %{forge}") % { forge: forge } PuppetForge.host = forge else logger.debug _("Ignoring Forge declaration in Puppetfile, using value from settings: %{forge}.") % { forge: PuppetForge.host } end end # @param [String] moduledir def set_moduledir(moduledir) @moduledir = resolve_path(@basedir, moduledir) end # @param [String] name # @param [Hash, String, Symbol, nil] info Calling with # anything but a Hash is deprecated. The DSL will now convert # String and Symbol versions to Hashes of the shape # { version: } # # String inputs should be valid module versions, the Symbol # `:latest` is allowed, as well as `nil`. # # Non-Hash inputs are only ever used by Forge modules. In # future versions this method will require the caller (the # DSL class, not the Puppetfile author) to do this conversion # itself. # def add_module(name, info) install_path, metadata_info, spec_deletable = parse_module_definition(name, info) mod = R10K::Module.from_metadata(name, install_path, metadata_info, @environment) mod.origin = :puppetfile mod.spec_deletable = spec_deletable # Do not save modules if they would conflict with the attached # environment if @environment && @environment.module_conflicts?(mod) return @modules end # If this module's metadata has a static version, and that version # matches the existing module declaration, and it ostensibly # has already has been deployed to disk, use it. Otherwise create a # regular module to sync. unless mod.version && mod.version == @existing_module_versions_by_name[mod.name] && File.directory?(mod.path) mod = mod.to_implementation end @modules << mod end private def empty_load_output { modules: [], managed_directories: [], desired_contents: [], purge_exclusions: [] } end def with_readable_puppetfile(puppetfile_path, &block) if File.readable?(puppetfile_path) block.call else logger.debug _("Puppetfile %{path} missing or unreadable") % {path: puppetfile_path.inspect} empty_load_output end end def parse_module_definition(name, info) # The only valid (deprecated) way a module can be defined with a # non-hash info is if it is a Forge module. if !info.is_a?(Hash) info = { type: 'forge', version: info } end info[:overrides] = @overrides if @default_branch_override info[:default_branch_override] = @default_branch_override end spec_deletable = false if install_path = info.delete(:install_path) install_path = resolve_path(@basedir, install_path) validate_install_path(install_path, name) else install_path = @moduledir spec_deletable = true end return [ install_path, info, spec_deletable ] end def filter_modules(modules, exclude_regex) modules.reject { |mod| mod.name =~ /#{exclude_regex}/ } end # @param [Array] modules def validate_no_duplicate_names(modules) dupes = modules .group_by { |mod| mod.name } .select { |_, mods| mods.size > 1 } .map(&:first) unless dupes.empty? msg = _('Puppetfiles cannot contain duplicate module names.') msg += ' ' msg += _("Remove the duplicates of the following modules: %{dupes}" % { dupes: dupes.join(' ') }) raise R10K::Error.new(msg) end end def resolve_path(base, path) if Pathname.new(path).absolute? cleanpath(path) else cleanpath(File.join(base, path)) end end def validate_install_path(path, modname) unless /^#{Regexp.escape(@basedir)}.*/ =~ path raise R10K::Error.new("Puppetfile cannot manage content '#{modname}' outside of containing environment: #{path} is not within #{@basedir}") end true end def determine_managed_directories(managed_content) managed_content.keys.reject { |dir| dir == @basedir } end # Returns an array of the full paths to all the content being managed. # @return [Array] def determine_desired_contents(managed_content) managed_content.flat_map do |install_path, mods| mods.collect { |mod| File.join(install_path, mod.name) } end end def determine_purge_exclusions(managed_dirs) if environment && environment.respond_to?(:desired_contents) managed_dirs + environment.desired_contents else managed_dirs end end # .cleanpath is as close to a canonical path as we can do without touching # the filesystem. The .realpath methods will choke if some of the # intermediate paths are missing, even though in some cases we will create # them later as needed. def cleanpath(path) Pathname.new(path).cleanpath.to_s end # For testing purposes only def puppetfile_content(path) File.read(path) end end end end r10k-4.0.2/lib/r10k/module_loader/puppetfile/000077500000000000000000000000001460033767200206155ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/module_loader/puppetfile/dsl.rb000066400000000000000000000017461460033767200217340ustar00rootroot00000000000000module R10K module ModuleLoader class Puppetfile class DSL # A barebones implementation of the Puppetfile DSL # # @api private def initialize(librarian, metadata_only: false) @librarian = librarian @metadata_only = metadata_only end def mod(name, args = nil) if args.is_a?(Hash) opts = args else opts = { type: 'forge', version: args } end if @metadata_only @librarian.add_module_metadata(name, opts) else @librarian.add_module(name, opts) end end def forge(location) @librarian.set_forge(location) end def moduledir(location) @librarian.set_moduledir(location) end def method_missing(method, *args) raise NoMethodError, _("unrecognized declaration '%{method}'") % {method: method} end end end end end r10k-4.0.2/lib/r10k/puppetfile.rb000066400000000000000000000141631460033767200163340ustar00rootroot00000000000000require 'thread' require 'pathname' require 'r10k/module' require 'r10k/util/purgeable' require 'r10k/errors' require 'r10k/content_synchronizer' require 'r10k/module_loader/puppetfile/dsl' require 'r10k/module_loader/puppetfile' module R10K # Deprecated, use R10K::ModuleLoader::Puppetfile#load to load content, # provide the `:modules` key of the returned Hash to # R10K::ContentSynchronizer (either the `serial_sync` or `concurrent_sync`) # and the remaining keys (`:managed_directories`, `:desired_contents`, and # `:purge_exclusions`) to R10K::Util::Cleaner. class Puppetfile include R10K::Settings::Mixin def_setting_attr :pool_size, 4 include R10K::Logging # @!attribute [r] forge # @return [String] The URL to use for the Puppet Forge attr_reader :forge # @!attribute [r] basedir # @return [String] The base directory that contains the Puppetfile attr_reader :basedir # @!attribute [r] environment # @return [R10K::Environment] Optional R10K::Environment that this Puppetfile belongs to. attr_reader :environment # @!attribute [rw] force # @return [Boolean] Overwrite any locally made changes attr_accessor :force # @!attribute [r] overrides # @return [Hash] Various settings overridden from normal configs attr_reader :overrides # @!attribute [r] loader # @return [R10K::ModuleLoader::Puppetfile] The internal module loader attr_reader :loader # @param [String] basedir # @param [Hash, String, nil] options_or_moduledir The directory to install the modules or a Hash of options. # Usage as moduledir is deprecated. Only use as options, defaults to nil # @param [String, nil] puppetfile_path Deprecated - The path to the Puppetfile, defaults to nil # @param [String, nil] puppetfile_name Deprecated - The name of the Puppetfile, defaults to nil # @param [Boolean, nil] force Deprecated - Shall we overwrite locally made changes? def initialize(basedir, options_or_moduledir = nil, deprecated_path_arg = nil, deprecated_name_arg = nil, deprecated_force_arg = nil) @basedir = basedir if options_or_moduledir.is_a? Hash options = options_or_moduledir deprecated_moduledir_arg = nil else options = {} deprecated_moduledir_arg = options_or_moduledir end @force = deprecated_force_arg || options.delete(:force) || false @moduledir = deprecated_moduledir_arg || options.delete(:moduledir) || File.join(basedir, 'modules') puppetfile_name = deprecated_name_arg || options.delete(:puppetfile_name) || 'Puppetfile' puppetfile_path = deprecated_path_arg || options.delete(:puppetfile_path) @puppetfile = puppetfile_path || puppetfile_name @environment = options.delete(:environment) @overrides = options.delete(:overrides) || {} @default_branch_override = @overrides.dig(:environments, :default_branch_override) @forge = 'forgeapi.puppet.com' @loader = ::R10K::ModuleLoader::Puppetfile.new( basedir: @basedir, moduledir: @moduledir, puppetfile: @puppetfile, overrides: @overrides, environment: @environment ) @loaded_content = { modules: [], managed_directories: [], desired_contents: [], purge_exclusions: [] } @loaded = false end # @param [String] default_branch_override The default branch to use # instead of one specified in the module declaration, if applicable. # Deprecated, use R10K::ModuleLoader::Puppetfile directly and pass # the default_branch_override as an option on initialization. def load(default_branch_override = nil) if self.loaded? return @loaded_content else if !File.readable?(puppetfile_path) logger.debug _("Puppetfile %{path} missing or unreadable") % {path: puppetfile_path.inspect} else self.load!(default_branch_override) end end end # @param [String] default_branch_override The default branch to use # instead of one specified in the module declaration, if applicable. # Deprecated, use R10K::ModuleLoader::Puppetfile directly and pass # the default_branch_override as an option on initialization. def load!(default_branch_override = nil) if default_branch_override && (default_branch_override != @default_branch_override) logger.warn("Mismatch between passed and initialized default branch overrides, preferring passed value.") @loader.default_branch_override = default_branch_override end @loaded_content = @loader.load! @loaded = true @loaded_content end def loaded? @loaded end def modules @loaded_content[:modules] end # @see R10K::ModuleLoader::Puppetfile#add_module for upcoming signature changes def add_module(name, args) @loader.add_module(name, args) end def set_moduledir(dir) @loader.set_moduledir(dir) end def set_forge(forge) @loader.set_forge(forge) end def moduledir @loader.moduledir end def puppetfile_path @loader.puppetfile_path end def environment=(env) @loader.environment = env @environment = env end include R10K::Util::Purgeable def managed_directories self.load @loaded_content[:managed_directories] end # Returns an array of the full paths to all the content being managed. # @note This implements a required method for the Purgeable mixin # @return [Array] def desired_contents self.load @loaded_content[:desired_contents] end def purge_exclusions self.load @loaded_content[:purge_exclusions] end def accept(visitor) pool_size = self.settings[:pool_size] if pool_size > 1 R10K::ContentSynchronizer.concurrent_accept(modules, visitor, self, pool_size, logger) else R10K::ContentSynchronizer.serial_accept(modules, visitor, self) end end def sync pool_size = self.settings[:pool_size] if pool_size > 1 R10K::ContentSynchronizer.concurrent_sync(modules, pool_size, logger) else R10K::ContentSynchronizer.serial_sync(modules) end end private def real_basedir Pathname.new(basedir).cleanpath.to_s end DSL = R10K::ModuleLoader::Puppetfile::DSL end end r10k-4.0.2/lib/r10k/settings.rb000066400000000000000000000266501460033767200160230ustar00rootroot00000000000000require 'etc' module R10K module Settings require 'r10k/settings/container' require 'r10k/settings/mixin' require 'r10k/settings/collection' require 'r10k/settings/definition' require 'r10k/settings/list' class << self # Path to puppet executable attr_accessor :puppet_path # Path to puppet.conf attr_accessor :puppet_conf end def self.git_settings R10K::Settings::Collection.new(:git, [ Definition.new(:default_ref, { :desc => "User-defined default ref from which to deploy modules when not otherwise specified; nil unless configured via the r10k.yaml config.", :default => nil}), EnumDefinition.new(:provider, { :desc => "The Git provider to use. Valid values: 'shellgit', 'rugged'", :normalize => lambda { |input| input.to_sym }, :enum => [:shellgit, :rugged], }), Definition.new(:username, { :desc => "The username to use for Git SSH remotes that do not specify a user. Only used by the 'rugged' Git provider. Default: the current user", :default => lambda { Etc.getlogin }, }), Definition.new(:private_key, { :desc => "The path to the SSH private key for Git SSH remotes. Only used by the 'rugged' Git provider.", }), Definition.new(:oauth_token, { :desc => "The path to a token file for Git OAuth remotes. Only used by the 'rugged' Git provider." }), Definition.new(:github_app_id, { :desc => "The Github App id for Git SSL remotes. Only used by the 'rugged' Git provider." }), Definition.new(:github_app_key, { :desc => "The Github App private key for Git SSL remotes. Only used by the 'rugged' Git provider." }), Definition.new(:github_app_ttl, { :desc => "The ttl expiration for SSL tokens. Only used by the 'rugged' Git provider.", :default => "120", }), URIDefinition.new(:proxy, { :desc => "An optional proxy server to use when interacting with Git sources via HTTP(S).", :default => :inherit, }), List.new(:repositories, lambda { R10K::Settings::Collection.new(nil, [ Definition.new(:remote, { :desc => "Remote source that repository-specific settings should apply to.", }), Definition.new(:private_key, { :desc => "The path to the SSH private key for Git SSH remotes. Only used by the 'rugged' Git provider.", :default => :inherit, }), Definition.new(:oauth_token, { :desc => "The path to a token file for Git OAuth remotes. Only used by the 'rugged' Git provider.", :default => :inherit }), Definition.new(:github_app_id, { :desc => "The Github App id for Git SSL remotes. Only used by the 'rugged' Git provider.", :default => :inherit }), Definition.new(:github_app_key, { :desc => "The Github App private key for Git SSL remotes. Only used by the 'rugged' Git provider.", :default => :inherit }), Definition.new(:github_app_ttl, { :desc => "The ttl expiration for Git SSL tokens. Only used by the 'rugged' Git provider.", :default => :inherit }), URIDefinition.new(:proxy, { :desc => "An optional proxy server to use when interacting with Git sources via HTTP(S).", :default => :inherit, }), Definition.new(:ignore_branch_prefixes, { :desc => "Array of strings used to prefix branch names that will not be deployed as environments.", }), ]) }, { :desc => "Repository specific configuration.", :default => [], }), ]) end def self.forge_settings R10K::Settings::Collection.new(:forge, [ URIDefinition.new(:proxy, { :desc => "An optional proxy server to use when downloading modules from the forge.", :default => :inherit, }), URIDefinition.new(:baseurl, { :desc => "The URL to the Puppet Forge to use for downloading modules." }), Definition.new(:authorization_token, { :desc => "The token for Puppet Forge authorization. Leave blank for unauthorized or license-based connections." }), Definition.new(:allow_puppetfile_override, { :desc => "Whether to use `forge` declarations in the Puppetfile as an override of `baseurl`.", :default => false, :validate => lambda do |value| unless !!value == value raise ArgumentError, "`allow_puppetfile_override` can only be a boolean value, not '#{value}'" end end }) ]) end def self.deploy_settings R10K::Settings::Collection.new(:deploy, [ Definition.new(:write_lock, { :desc => "Whether r10k deploy actions should be locked out in case r10k is being managed by another application. The value should be a string containing the reason for the write lock.", :validate => lambda do |value| if value && !value.is_a?(String) raise ArgumentError, "The write_lock setting should be a string containing the reason for the write lock, not a #{value.class}" end end }), EnumDefinition.new(:purge_levels, { :desc => "Controls how aggressively r10k will purge unmanaged content from the target directory. Should be a list of values indicating at what levels unmanaged content should be purged. Options are 'deployment', 'environment', and 'puppetfile'. For backwards compatibility, the default is ['deployment', 'puppetfile'].", :multi => true, :enum => [:deployment, :environment, :puppetfile], :default => [:deployment, :puppetfile], :normalize => lambda do |input| if input.respond_to?(:collect) input.collect { |val| val.to_sym } else # Convert single values to a list of one symbolized value. [input.to_sym] end end, }), Definition.new(:purge_allowlist, { :desc => "A list of filename patterns to be excluded from any purge operations. Patterns are matched relative to the root of each deployed environment, if you want a pattern to match recursively you need to use the '**' glob in your pattern. Basic shell style globs are supported.", :default => [], }), Definition.new(:generate_types, { :desc => "Controls whether to generate puppet types after deploying an environment. Defaults to false.", :default => false, :normalize => lambda do |input| input.to_s == 'true' end, }), Definition.new(:puppet_path, { :desc => "Path to puppet executable. Defaults to /opt/puppetlabs/bin/puppet.", :default => '/opt/puppetlabs/bin/puppet', :validate => lambda do |value| unless File.executable? value raise ArgumentError, "The specified puppet executable #{value} is not executable" end end }), Definition.new(:puppet_conf, { :desc => "Path to puppet.conf. Defaults to /etc/puppetlabs/puppet/puppet.conf.", :default => '/etc/puppetlabs/puppet/puppet.conf', :validate => lambda do |value| unless File.readable? value raise ArgumentError, "The specified puppet.conf #{value} is not readable" end end }), Definition.new(:exclude_spec, { :desc => "Whether or not to deploy the spec dir of a module. Defaults to true.", :default => true, :validate => lambda do |value| unless !!value == value raise ArgumentError, "`exclude_spec` can only be a boolean value, not '#{value}'" end end })]) end def self.logging_settings R10K::Settings::Collection.new(:logging, [ Definition.new(:level, { desc: 'What logging level should R10k run on if not specified at runtime.', validate: lambda do |value| if R10K::Logging.parse_level(value).nil? raise ArgumentError, "`level` must be a valid log level. Valid levels are #{R10K::Logging::LOG_LEVELS.map(&:downcase).inspect}" end end }), Definition.new(:outputs, { desc: 'Additional log outputs to use.', validate: lambda do |value| unless value.is_a?(Array) raise ArgumentError, "The `outputs` setting should be an array of outputs, not a #{value.class}" end end }), Definition.new(:disable_default_stderr, { desc: 'Disable the default stderr logging output', default: false, validate: lambda do |value| unless !!value == value raise ArgumentError, "`disable_default_stderr` can only be a boolean value, not '#{value}'" end end }) ]) end def self.global_settings R10K::Settings::Collection.new(:global, [ Definition.new(:sources, { :desc => "Where r10k should retrieve sources when deploying environments. Only used for r10k environment deployment.", }), Definition.new(:purgedirs, { :desc => "The purgedirs setting was deprecated in r10k 1.0.0 and is no longer respected.", }), Definition.new(:cachedir, { :desc => "Where r10k should store cached Git repositories.", }), Definition.new(:postrun, { :desc => "The command r10k should run after deploying environments or modules.", :validate => lambda do |value| if !value.is_a?(Array) raise ArgumentError, "The postrun setting should be an array of strings, not a #{value.class}" end end }), Definition.new(:pool_size, { :desc => "The amount of threads used to concurrently install modules. The default value is 1: install one module at a time.", :default => 4, :validate => lambda do |value| if !value.is_a?(Integer) raise ArgumentError, "The pool_size setting should be an integer, not a #{value.class}" end if !(value > 0) raise ArgumentError, "The pool_size setting should be greater than zero." end end }), URIDefinition.new(:proxy, { :desc => "Proxy to use for all r10k operations which occur over HTTP(S).", :default => lambda { [ ENV['HTTPS_PROXY'], ENV['https_proxy'], ENV['HTTP_PROXY'], ENV['http_proxy'] ].find { |value| value } }, }), R10K::Settings.forge_settings, R10K::Settings.git_settings, R10K::Settings.deploy_settings, R10K::Settings.logging_settings ]) end end end r10k-4.0.2/lib/r10k/settings/000077500000000000000000000000001460033767200154655ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/settings/collection.rb000066400000000000000000000061261460033767200201520ustar00rootroot00000000000000require 'r10k/settings/helpers' require 'r10k/settings/definition' require 'r10k/util/setopts' require 'r10k/util/symbolize_keys' require 'r10k/errors' module R10K module Settings # Define a group of settings, which can be single definitions or nested # collections. class Collection include R10K::Settings::Helpers # @!attribute [r] name # @return [String] The name of this collection attr_reader :name # @param name [Symbol] The name of the collection # @param settings [Array] All settings in this collection def initialize(name, settings) @name = name @settings = {} # iterate through settings and adopt them settings.each do |s| s.parent = self @settings[s.name] = s end end # Assign new values, perform validation checks, and return the final # values for this collection def evaluate(newvalues) assign(newvalues) validate resolve end # Assign a hash of values to the settings in this collection. # # If the passed hash contains any invalid settings values, the names # of those settings are stored for use in the {#validate} method. # # @param newvalues [Hash] # @return [void] def assign(newvalues) return if newvalues.nil? R10K::Util::SymbolizeKeys.symbolize_keys!(newvalues) @settings.each_pair do |name, setting| if newvalues.key?(name) setting.assign(newvalues[name]) end end end # Validate all settings and return validation errors # # @return [nil, Hash] If all validation passed nil will be returned; if # validation failed then a hash of those errors will be returned. def validate errors = {} @settings.each_pair do |name, setting| begin setting.validate rescue => error errors[name] = error end end if !errors.empty? if @name msg = _("Validation failed for '%{name}' settings group") % {name: @name} else msg = _("Validation failed for settings group") end raise ValidationError.new(msg, :errors => errors) end end # Evaluate all settings and return a frozen hash of the final values. # @return [Hash] def resolve rv = {} @settings.each_pair do |name, setting| rv[name] = setting.resolve end rv.freeze end # Access individual settings via a Hash-like interface. def [](name) @settings[name] end class ValidationError < R10K::Error attr_reader :errors def initialize(mesg, options = {}) super @errors = options[:errors] end def format struct = [] struct << "#{message}:" @errors.each_pair do |name, nested| struct << indent(structure_exception(name, nested)) end struct.join("\n") end end end end end r10k-4.0.2/lib/r10k/settings/container.rb000066400000000000000000000045421460033767200200010ustar00rootroot00000000000000require 'set' # Defines a collection for application settings # # This implements a hierarchical interface to application settings. Containers # can define an optional parent container that will be used for default options # if those options aren't set on the given container. class R10K::Settings::Container # @!attribute [r] valid_keys # @return [Set] All valid keys defined on the container or parent container. attr_accessor :valid_keys # @param parent [R10K::Settings::Container] An optional parent container def initialize(parent = nil) @parent = parent @valid_keys = Set.new @settings = {} end # Look up a value in the container. The lookup checks the current container, # and then falls back to the parent container if it's given. # # @param key [Symbol] The lookup key # # @return [Object, nil] The retrieved value if present. # # @raise [R10K::Settings::Container::InvalidKey] If the looked up key isn't # a valid key. def [](key) validate_key! key if @settings[key] @settings[key] elsif @parent && (pkey = @parent[key]) begin @settings[key] = pkey.dup rescue TypeError @settings[key] = pkey end @settings[key] end end # Set a value on the container # # @param key [Symbol] The lookup key # @param value [Object] The value to store in the container # # @raise [R10K::Settings::Container::InvalidKey] If the looked up key isn't # a valid key. def []=(key, value) validate_key! key @settings[key] = value end # Define a valid container key # # @note This should only be used by {#R10K::Settings::ClassSettings} # # @param key [Symbol] # @return [void] def add_valid_key(key) @valid_keys.add(key) end # Determine if a key is a valid setting. # # @param key [Symbol] # # @return [true, false] def valid_key?(key) if @valid_keys.include?(key) true elsif @parent and @parent.valid_key?(key) @valid_keys.add(key) true end end # Clear all existing settings in this container. Valid settings are left alone. # @return [void] def reset! @settings = {} end private def validate_key!(key) unless valid_key?(key) raise InvalidKey, _("Key %{key} is not a valid key") % {key: key} end end # @api private class InvalidKey < StandardError; end end r10k-4.0.2/lib/r10k/settings/definition.rb000066400000000000000000000077241460033767200201540ustar00rootroot00000000000000require 'r10k/settings/helpers' require 'r10k/util/setopts' module R10K module Settings # Define a single setting and additional attributes like descriptions, # default values, and validation. class Definition require 'r10k/settings/uri_definition' require 'r10k/settings/enum_definition' include R10K::Settings::Helpers include R10K::Util::Setopts # @!attribute [r] name # @return [String] The name of this setting attr_reader :name # @!attribute [r] value # @return [Object] An explicitly set value. This should only be used if # an optional default value should not be used; otherwise use {#resolve}. attr_reader :value # @!attribute [r] desc # @return [String] An optional documentation string for this setting. attr_reader :desc # @param name [Symbol] The name of the setting for this definition. # @param opts [Hash] Additional options for this definition to control # validation, normalization, and the like. # # @option opts [Proc, Object] :default An optional proc or object for # this setting. If no value has been set and the default is a Proc then # it will be called and the result will be returned, otherwise if the # value is not set the default value itself is returned. # # @options opts [Proc] :validate An optional proc that can be used to # validate an assigned value. Default values are not assigned. # # @options opts [Proc] :normalize An optional proc that can be used to # normalize an explicitly assigned value. def initialize(name, opts = {}) @name = name setopts(opts, allowed_initialize_opts) end # Assign new values, perform validation checks, and return the final # values for this collection def evaluate(newvalue) assign(newvalue) validate resolve end # Store an explicit value for this definition # # If a :normalize hook has been given then it will be called with the # new value and the returned value will be stored. # # @param newvalue [Object] The value to store for this setting # @return [void] def assign(newvalue) if @normalize @value = @normalize.call(newvalue) else @value = newvalue end end # Call any validation hooks for this definition. # # The :validate hook will be called if the hook has been set and an # explicit value has been assigned to this definition. Validation # failures should be indicated by the :validate hook raising an exception. # # @raise [Exception] An exception class indicating that validation failed. # @return [nil] def validate if @value && @validate @validate.call(@value) end nil end # Compute the final value of this setting. If a value has not been # assigned the default value will be used. # # @return [Object] The final value of this definition. def resolve if !@value.nil? @value elsif !@default.nil? if @default == :inherit # walk all the way up to root, starting with grandparent ancestor = parent while ancestor = ancestor.parent return ancestor[@name].resolve if ancestor.respond_to?(:[]) && ancestor[@name] end elsif @default.is_a?(Proc) @default.call else @default end end end private # Subclasses may define additional params that are accepted at # initialization; they should override this method to add any # additional fields that should be respected. def allowed_initialize_opts { :desc => true, :default => true, :validate => true, :normalize => true, } end end end end r10k-4.0.2/lib/r10k/settings/enum_definition.rb000066400000000000000000000016121460033767200211660ustar00rootroot00000000000000require 'r10k/settings/definition' module R10K module Settings class EnumDefinition < R10K::Settings::Definition def validate if @value if @multi && @value.respond_to?(:select) invalid = @value.select { |val| !@enum.include?(val) } if invalid.size > 0 raise ArgumentError, _("Setting %{name} may only contain %{enums}; the disallowed values %{invalid} were present") % {name: @name, enums: @enum.inspect, invalid: invalid.inspect} end else if !@enum.include?(@value) raise ArgumentError, _("Setting %{name} should be one of %{enums}, not '%{value}'") % {name: @name, enums: @enum.inspect, value: @value} end end end end private def allowed_initialize_opts super.merge({:enum => true, :multi => true}) end end end end r10k-4.0.2/lib/r10k/settings/helpers.rb000066400000000000000000000020371460033767200174560ustar00rootroot00000000000000require 'r10k/errors' require 'r10k/settings/collection' module R10K module Settings module Helpers def self.included(klass) klass.send(:include, InstanceMethods) klass.send(:extend, ClassMethods) end module InstanceMethods # Assign a parent collection to this setting. Parent may only be # assigned once. # # @param new_parent [R10K::Settings::Collection] Parent collection def parent=(new_parent) unless @parent.nil? raise R10K::Error.new(_("%{class} instances cannot be reassigned to a new parent.") % {class: self.class} ) end unless new_parent.is_a?(R10K::Settings::Collection) || new_parent.is_a?(R10K::Settings::List) raise R10K::Error.new(_("%{class} instances may only belong to a settings collection or list.") % {class: self.class} ) end @parent = new_parent end def parent @parent end end module ClassMethods end end end end r10k-4.0.2/lib/r10k/settings/list.rb000066400000000000000000000061201460033767200167640ustar00rootroot00000000000000require 'r10k/settings/helpers' require 'r10k/settings/collection' require 'r10k/errors' require 'r10k/util/setopts' module R10K module Settings # A container for an arbitrarily long list of other settings. class List include R10K::Settings::Helpers include R10K::Util::Setopts # @!attribute [r] name # @return [String] The name of this collection attr_reader :name # @param name [Symbol] The name of the setting for this definition. # @param item_proc [#call] An object whose #call method will return a # new instance of another R10K::Settings class to hold each item # added to this list. # @param opts [Hash] Additional options for this definition to control # validation, normalization, and the like. # # @options opts [String] :desc Extended description of this setting. # @options opts [Array] :default Initial/default contents of the list. def initialize(name, item_proc, opts = {}) @name = name @item_proc = item_proc @items = [] setopts(opts, allowed_initialize_opts) end # Takes an array of key/value pairs and assigns each into a # new instance created by invoking @item_proc. # # @param items [Array] List of items to add to this list. def assign(items) return if items.nil? items.each do |values| new_item = @item_proc.call new_item.parent = self new_item.assign(values) @items << new_item end end # Validate all items in the list and return validation errors # # @return [nil, Hash] If all validation passed nil will be returned; if # validation failed then a hash of those errors will be returned. def validate errors = {} @items.each_with_index do |item, idx| begin item.validate rescue => error errors[idx+1] = error end end if !errors.empty? raise ValidationError.new(_("Validation failed for '%{name}' settings list") % {name: @name}, :errors => errors) end end # Evaluate all items in the list and return a frozen array of the final values. # @return [Array] def resolve @items.collect { |item| item.resolve }.freeze end class ValidationError < R10K::Error attr_reader :errors def initialize(mesg, options = {}) super @errors = options[:errors] end def format struct = [] struct << "#{message}:" @errors.each do |item, error| struct << indent(structure_exception("Item #{item}", error)) end struct.join("\n") end end private # Subclasses may define additional params that are accepted at # initialization; they should override this method to add any # additional fields that should be respected. def allowed_initialize_opts { :desc => true, :default => true, } end end end end r10k-4.0.2/lib/r10k/settings/loader.rb000066400000000000000000000060031460033767200172570ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/errors' require 'yaml' module R10K module Settings # Look for the r10k configuration file in standard locations. # # r10k.yaml is checked for in the following locations: # - $PWD/r10k.yaml # - /etc/puppetlabs/r10k/r10k.yaml # - /etc/r10k.yaml class Loader def self.search(override = nil) new.search(override) end include R10K::Logging attr_reader :loadpath CONFIG_FILE = 'r10k.yaml' DEFAULT_LOCATION = File.join('/etc/puppetlabs/r10k', CONFIG_FILE) OLD_DEFAULT_LOCATION = File.join('/etc', CONFIG_FILE) def initialize @loadpath = [] populate_loadpath end # Find the first valid config file. # # @param override [String, nil] An optional path that when is truthy # will be preferred over all other files, to make it easy to # optionally supply an explicit configuration file that will always # be used when set. # @return [String, nil] The path to the first valid configfile, or nil # if no file was found. def search(override = nil) return override if override # If both default files are present, issue a warning. if (File.file? DEFAULT_LOCATION) && (File.file? OLD_DEFAULT_LOCATION) logger.warn _("Both %{default_path} and %{old_default_path} configuration files exist.") % {default_path: DEFAULT_LOCATION, old_default_path: OLD_DEFAULT_LOCATION} logger.warn _("%{default_path} will be used.") % {default_path: DEFAULT_LOCATION} end path = @loadpath.find {|filename| File.file? filename} if path == OLD_DEFAULT_LOCATION logger.warn _("The r10k configuration file at %{old_default_path} is deprecated.") % {old_default_path: OLD_DEFAULT_LOCATION} logger.warn _("Please move your r10k configuration to %{default_path}.") % {default_path: DEFAULT_LOCATION} end path end def read(override = nil) path = search(override) if path.nil? raise ConfigError, _("No configuration file given, no config file found in current directory, and no global config present") end begin contents = ::YAML.load_file(path) rescue => e raise ConfigError, _("Couldn't load config file: %{error_msg}") % {error_msg: e.message} end if !contents raise ConfigError, _("File exists at #{path} but doesn't contain any YAML") % {path: path} end R10K::Util::SymbolizeKeys.symbolize_keys!(contents, true) contents end private def populate_loadpath # Add the current directory for r10k.yaml @loadpath << File.join(Dir.getwd, CONFIG_FILE) # Add the AIO location for of r10k.yaml @loadpath << DEFAULT_LOCATION # Add the old default location last. @loadpath << OLD_DEFAULT_LOCATION @loadpath end class ConfigError < R10K::Error end end end end r10k-4.0.2/lib/r10k/settings/mixin.rb000066400000000000000000000026061460033767200171420ustar00rootroot00000000000000module R10K::Settings::Mixin def self.included(klass) klass.send(:include, InstanceMethods) klass.send(:extend, ClassMethods) end module InstanceMethods # @return [R10K::Settings::Container] A settings container for the given instance. def settings @settings ||= R10K::Settings::Container.new(self.class.settings) end end module ClassMethods # Define a setting and optional default on the extending class. # # @param key [Symbol] # @param default [Object] # # @return [void] def def_setting_attr(key, default = nil) defaults.add_valid_key(key) defaults[key] = default if default end # A singleton settings container for storing immutable default configuration # on the extending class. # # @return [R10K::Settings::Container] def defaults @defaults ||= R10K::Settings::Container.new end # A singleton settings container for storing manual setting configurations # on the extending class. # # @return [R10K::Settings::Container] def settings @settings ||= R10K::Settings::Container.new(defaults) end # Allow subclasses to use the settings of the parent class as default values # # @return [void] def inherited(subclass) subclass.instance_eval do @settings = R10K::Settings::Container.new(superclass.settings) end end end end r10k-4.0.2/lib/r10k/settings/uri_definition.rb000066400000000000000000000007101460033767200210170ustar00rootroot00000000000000require 'r10k/settings/definition' require 'uri' module R10K module Settings class URIDefinition < R10K::Settings::Definition def validate if @value begin URI.parse(@value) rescue URI::Error raise ArgumentError, _("Setting %{name} requires a URL but '%{value}' could not be parsed as a URL") % {name: @name, value: @value} end end super end end end end r10k-4.0.2/lib/r10k/source.rb000066400000000000000000000016471460033767200154620ustar00rootroot00000000000000require 'r10k' require 'r10k/keyed_factory' require 'r10k/util/symbolize_keys' module R10K module Source def self.factory @factory ||= R10K::KeyedFactory.new end def self.register(key, klass) factory.register(key, klass) end def self.retrieve(key) factory.retrieve(key) end def self.generate(type, basedir, name, options = {}) factory.generate(type, basedir, name, options) end def self.from_hash(name, hash) R10K::Util::SymbolizeKeys.symbolize_keys!(hash) basedir = hash.delete(:basedir) type = hash.delete(:type) type = type.is_a?(String) ? type.to_sym : type generate(type, name, basedir, hash) end require 'r10k/source/base' require 'r10k/source/hash' require 'r10k/source/git' require 'r10k/source/svn' require 'r10k/source/yaml' require 'r10k/source/yamldir' require 'r10k/source/exec' end end r10k-4.0.2/lib/r10k/source/000077500000000000000000000000001460033767200151255ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/source/base.rb000066400000000000000000000056761460033767200164020ustar00rootroot00000000000000require 'r10k/logging' # This class defines a common interface for source implementations. # # @since 1.3.0 class R10K::Source::Base include R10K::Logging # @!attribute [r] basedir # @return [String] The path this source will place environments in attr_reader :basedir # @!attribute [r] name # @return [String] The short name for this environment source attr_reader :name # @!attribute [r] prefix # @return [String, nil] The prefix for the environments basedir. # Defaults to nil. attr_reader :prefix # @!attribute [r] puppetfile_name # @return [String, nil] The Name of the puppetfile # Defaults to nil. attr_reader :puppetfile_name # Initialize the given source. # # @param name [String] The identifier for this source. # @param basedir [String] The base directory where the generated environments will be created. # @param options [Hash] An additional set of options for this source. The # semantics of this hash may depend on the source implementation. # # @option options [Boolean, String] :prefix If a String this becomes the prefix. # If true, will use the source name as the prefix. All sources should respect this option. # Defaults to false for no environment prefix. # @option options [String] :strip_component If a string, this value will be # removed from the beginning of each generated environment's name, if # present. If the string is contained within two "/" characters, it will # be treated as a regular expression. def initialize(name, basedir, options = {}) @name = name @basedir = Pathname.new(basedir).cleanpath.to_s @prefix = options.delete(:prefix) @strip_component = options.delete(:strip_component) @puppetfile_name = options.delete(:puppetfile_name) @options = options end # Perform any actions needed for loading environments that may have side # effects. # # Actions done during preloading may include things like updating caches or # performing network queries. If an environment has not been preloaded but # {#environments} is invoked, it should return the best known state of # environments or return an empty list. # # @api public # @abstract # @return [void] def preload! end # Perform actions to reload environments after the `preload!`. Similar # to preload!, and likely to include network queries and rerunning # environment generation. # # @api public # @abstract # @return [void] def reload! end # Enumerate the environments associated with this SVN source. # # @api public # @abstract # @return [Array] An array of environments created # from this source. def environments raise NotImplementedError, _("%{class} has not implemented method %{method}") % {class: self.class, method: __method__} end def accept(visitor) visitor.visit(:source, self) do environments.each do |env| env.accept(visitor) end end end end r10k-4.0.2/lib/r10k/source/exec.rb000066400000000000000000000033221460033767200163760ustar00rootroot00000000000000require 'r10k/util/subprocess' require 'json' require 'yaml' class R10K::Source::Exec < R10K::Source::Hash R10K::Source.register(:exec, self) def initialize(name, basedir, options = {}) unless @command = options[:command] raise ConfigError, _('Environment source %{name} missing required parameter: command') % {name: name} end # We haven't set the environments option yet. We will do that by # overloading the #environments method. super(name, basedir, options) end def environments_hash @environments_hash ||= set_environments_hash(run_environments_command) end private def run_environments_command subproc = R10K::Util::Subprocess.new([@command]) subproc.raise_on_fail = true subproc.logger = self.logger procresult = subproc.execute begin environments = JSON.parse(procresult.stdout) rescue JSON::ParserError => json_err begin environments = YAML.safe_load(procresult.stdout) rescue Psych::SyntaxError => yaml_err raise R10K::Error, _("Error parsing command output for exec source %{name}:\n" \ "Not valid JSON: %{j_msg}\n" \ "Not valid YAML: %{y_msg}\n" \ "Stdout:\n%{out}") % {name: name, j_msg: json_err.message, y_msg: yaml_err.message, out: procresult.stdout} end end unless R10K::Source::Hash.valid_environments_hash?(environments) raise R10K::Error, _("Environment source %{name} command %{cmd} did not return valid environment data.\n" \ 'Returned: %{data}') % {name: name, cmd: @command, data: environments} end # Return the resulting environments hash environments end end r10k-4.0.2/lib/r10k/source/git.rb000066400000000000000000000143071460033767200162420ustar00rootroot00000000000000require 'r10k/git' require 'r10k/environment' require 'r10k/environment/name' # This class implements a source for Git environments. # # A Git source generates environments by locally caching the given Git # repository and enumerating the branches for the Git repository. Branches # are mapped to environments without modification. # # @since 1.3.0 class R10K::Source::Git < R10K::Source::Base R10K::Source.register(:git, self) # Register git as the default source R10K::Source.register(nil, self) # @!attribute [r] remote # @return [String] The URL to the remote git repository attr_reader :remote # @!attribute [r] cache # @api private # @return [R10K::Git::Cache] The git cache associated with this source attr_reader :cache # @!attribute [r] settings # @return [Hash] Additional settings that configure how # the source should behave. attr_reader :settings # @!attribute [r] invalid_branches # @return [String] How Git branch names that cannot be cleanly mapped to # Puppet environments will be handled. attr_reader :invalid_branches # @!attribute [r] ignore_branch_prefixes # @return [Array] Array of strings used to remove repository branches # that will be deployed as environments. attr_reader :ignore_branch_prefixes # @!attribute [r] filter_command # @return [String] Command to run to filter branches attr_reader :filter_command # Initialize the given source. # # @param name [String] The identifier for this source. # @param basedir [String] The base directory where the generated environments will be created. # @param options [Hash] An additional set of options for this source. # # @option options [Boolean, String] :prefix If a String this becomes the prefix. # If true, will use the source name as the prefix. # Defaults to false for no environment prefix. # @option options [String] :remote The URL to the base directory of the SVN repository # @option options [Hash] :remote Additional settings that configure how the # source should behave. def initialize(name, basedir, options = {}) super @environments = [] @remote = options[:remote] @invalid_branches = (options[:invalid_branches] || 'correct_and_warn') @ignore_branch_prefixes = options[:ignore_branch_prefixes] @filter_command = options[:filter_command] @cache = R10K::Git.cache.generate(@remote) end # Update the git cache for this git source to get the latest list of environments. # # @return [void] def preload! logger.debug _("Fetching '%{remote}' to determine current branches.") % {remote: @remote} @cache.sync rescue => e raise R10K::Error.wrap(e, _("Unable to determine current branches for Git source '%{name}' (%{basedir})") % {name: @name, basedir: @basedir}) end alias fetch_remote preload! # Load the git remote and create environments for each branch. If the cache # has not been fetched, this will return an empty list. # # @return [Array] def environments if not @cache.cached? [] elsif @environments.empty? @environments = generate_environments() else @environments end end def reload! @cache.sync! @environments = generate_environments() end def generate_environments envs = [] environment_names.each do |en| if en.valid? envs << R10K::Environment::Git.new(en.name, @basedir, en.dirname, {remote: remote, ref: en.original_name, puppetfile_name: puppetfile_name, overrides: @options[:overrides]}) elsif en.correct? logger.warn _("Environment %{env_name} contained non-word characters, correcting name to %{corrected_env_name}") % {env_name: en.name.inspect, corrected_env_name: en.dirname} envs << R10K::Environment::Git.new(en.name, @basedir, en.dirname, {remote: remote, ref: en.original_name, puppetfile_name: puppetfile_name, overrides: @options[:overrides]}) elsif en.validate? logger.error _("Environment %{env_name} contained non-word characters, ignoring it.") % {env_name: en.name.inspect} end end envs end # List all environments that should exist in the basedir for this source # @note This is required by {R10K::Util::Basedir} # @return [Array] def desired_contents environments.map {|env| env.dirname } end def filter_branches_by_regexp(branches, ignore_prefixes) filter = Regexp.new("^#{Regexp.union(ignore_prefixes)}") branches = branches.reject do |branch| result = filter.match(branch) if result logger.warn _("Branch %{branch} filtered out by ignore_branch_prefixes %{ibp}") % {branch: branch, ibp: @ignore_branch_prefixes} end result end branches end def filter_branches_by_command(branches, command) branches.select do |branch| result = system({'GIT_DIR' => @cache.git_dir.to_s, 'R10K_BRANCH' => branch, 'R10K_NAME' => @name.to_s}, command) unless result logger.warn _("Branch `%{name}:%{branch}` filtered out by filter_command %{cmd}") % {name: @name, branch: branch, cmd: command} end result end end private def environment_names opts = {prefix: @prefix, invalid: @invalid_branches, source: @name, strip_component: @strip_component} branch_names = @cache.branches if @ignore_branch_prefixes && !@ignore_branch_prefixes.empty? branch_names = filter_branches_by_regexp(branch_names, @ignore_branch_prefixes) end if @filter_command && !@filter_command.empty? branch_names = filter_branches_by_command(branch_names, @filter_command) end branch_names.map do |branch_name| R10K::Environment::Name.new(branch_name, opts) end end end r10k-4.0.2/lib/r10k/source/hash.rb000066400000000000000000000154511460033767200164030ustar00rootroot00000000000000# This class implements an environment source based on recieving a hash of # environments # # @since 3.4.0 # # DESCRIPTION # # This class implements environments defined by a hash having the following # schema: # # --- # type: object # additionalProperties: # type: object # properties: # type: # type: string # basedir: # type: string # modules: # type: object # additionalProperties: # type: object # moduledir: # type: string # additionalProperties: # type: string # # The top-level keys in the hash are environment names. Keys in individual # environments should be the same as those which would be given to define a # single source in r10k.yaml. Additionally, the "modules" key (and moduledir) # can be used to designate module content for the environment, independent of # the base source parameters. # # Example: # # --- # production: # type: git # remote: 'https://github.com/reidmv/control-repo.git' # ref: '1.0.0' # modules: # geoffwilliams-r_profile: '1.1.0' # geoffwilliams-r_role: '2.0.0' # # development: # type: git # remote: 'https://github.com/reidmv/control-repo.git' # ref: 'master' # modules: # geoffwilliams-r_profile: '1.1.0' # geoffwilliams-r_role: '2.0.0' # # USAGE # # The following is an example implementation class showing how to use the # R10K::Source::Hash abstract base class. Assume an r10k.yaml file such as: # # --- # sources: # proof-of-concept: # type: demo # basedir: '/etc/puppetlabs/code/environments' # # Class implementation: # # class R10K::Source::Demo < R10K::Source::Hash # R10K::Source.register(:demo, self) # # def initialize(name, basedir, options = {}) # # This is just a demo class, so we hard-code an example :environments # # hash here. In a real class, we might do something here such as # # perform an API call to retrieve an :environments hash. # options[:environments] = { # 'production' => { # 'remote' => 'https://git.example.com/puppet/control-repo.git', # 'ref' => 'release-141', # 'modules' => { # 'puppetlabs-stdlib' => '6.1.0', # 'puppetlabs-ntp' => '8.1.0', # 'example-myapp1' => { # 'git' => 'https://git.example.com/puppet/example-myapp1.git', # 'ref' => 'v1.3.0', # }, # }, # }, # 'development' => { # 'remote' => 'https://git.example.com/puppet/control-repo.git', # 'ref' => 'master', # 'modules' => { # 'puppetlabs-stdlib' => '6.1.0', # 'puppetlabs-ntp' => '8.1.0', # 'example-myapp1' => { # 'git' => 'https://git.example.com/puppet/example-myapp1.git', # 'ref' => 'v1.3.1', # }, # }, # }, # } # # # All we need to do is supply options with the :environments hash. # # The R10K::Source::Hash parent class takes care of the rest. # super(name, basedir, options) # end # end # # Example output: # # [root@master:~] % r10k deploy environment production -pv # INFO -> Using Puppetfile '/etc/puppetlabs/code/environments/production/Puppetfile' # INFO -> Using Puppetfile '/etc/puppetlabs/code/environments/development/Puppetfile' # INFO -> Deploying environment /etc/puppetlabs/code/environments/production # INFO -> Environment production is now at 74ea2e05bba796918e4ff1803018c526337ef5f3 # INFO -> Deploying Environment content /etc/puppetlabs/code/environments/production/modules/stdlib # INFO -> Deploying Environment content /etc/puppetlabs/code/environments/production/modules/ntp # INFO -> Deploying Environment content /etc/puppetlabs/code/environments/production/modules/myapp1 # INFO -> Deploying Puppetfile content /etc/puppetlabs/code/environments/production/modules/ruby_task_helper # INFO -> Deploying Puppetfile content /etc/puppetlabs/code/environments/production/modules/bolt_shim # INFO -> Deploying Puppetfile content /etc/puppetlabs/code/environments/production/modules/apply_helpers # class R10K::Source::Hash < R10K::Source::Base # @param hash [Hash] A hash to validate. # @return [Boolean] False if the hash is obviously invalid. A true return # means _maybe_ it's valid. def self.valid_environments_hash?(hash) # TODO: more robust schema valiation hash.is_a?(Hash) end # @param name [String] The identifier for this source. # @param basedir [String] The base directory where the generated environments will be created. # @param options [Hash] An additional set of options for this source. The # semantics of this hash may depend on the source implementation. # # @option options [Boolean, String] :prefix If a String this becomes the prefix. # If true, will use the source name as the prefix. All sources should respect this option. # Defaults to false for no environment prefix. # @option options [Hash] :environments The hash definition of environments def initialize(name, basedir, options = {}) super(name, basedir, options) end # Set the environment hash for the source. The environment hash is what the # source uses to generate enviroments. # @param hash [Hash] The hash to sanitize and use as the source's environments. # Should be formatted for use with R10K::Environment#from_hash. def set_environments_hash(hash) @environments_hash = hash.reduce({}) do |memo,(name,opts)| R10K::Util::SymbolizeKeys.symbolize_keys!(opts) memo.merge({ name => opts.merge({ basedir: @basedir, dirname: R10K::Environment::Name.new(name, {prefix: @prefix, source: @name, strip_component: @strip_component}).dirname }) }) end end # Return the sanitized environments hash for this source. The environments # hash should contain objects formatted for use with R10K::Environment#from_hash. # If the hash does not exist it will be built based on @options. def environments_hash @environments_hash ||= set_environments_hash(@options.fetch(:environments, {})) end def environments @environments ||= environments_hash.map do |name, hash| R10K::Environment.from_hash(name, hash.merge({overrides: @options[:overrides]})) end end # List all environments that should exist in the basedir for this source # @note This is required by {R10K::Util::Basedir} # @return [Array] def desired_contents environments.map {|env| env.dirname } end end r10k-4.0.2/lib/r10k/source/svn.rb000066400000000000000000000113231460033767200162600ustar00rootroot00000000000000require 'r10k/svn' require 'r10k/environment' require 'r10k/environment/name' require 'r10k/util/setopts' # This class implements a source for SVN environments. # # An SVN source generates environments by enumerating the branches and trunk # for a given SVN remote. SVN repositories must conform to the conventional # SVN repository structure with the directories trunk/, branches/, and # optionally tags/ in the root of the repository. The trunk/ directory is # specifically mapped to the production environment, branches are created # as environments with the name of the given branch. # # @see http://svnbook.red-bean.com/en/1.7/svn.branchmerge.maint.html # @since 1.3.0 class R10K::Source::SVN < R10K::Source::Base R10K::Source.register(:svn, self) # @!attribute [r] remote # @return [String] The URL to the base directory of the SVN repository attr_reader :remote # @!attribute [r] svn_remote # @api private # @return [R10K::SVN::Remote] attr_reader :svn_remote # @!attribute [r] username # @return [String, nil] The SVN username to be passed to the underlying SVN commands # @api private attr_reader :username # @!attribute [r] password # @return [String, nil] The SVN password to be passed to the underlying SVN commands # @api private attr_reader :password # @!attribute [r] ignore_branch_prefixes # @return [Array] Array of strings used to remove repository branches # that will be deployed as environments. attr_reader :ignore_branch_prefixes include R10K::Util::Setopts # Initialize the given source. # # @param name [String] The identifier for this source. # @param basedir [String] The base directory where the generated environments will be created. # @param options [Hash] An additional set of options for this source. # # @option options [Boolean] :prefix Whether to prefix the source name to the # environment directory names. Defaults to false. # @option options [String] :remote The URL to the base directory of the SVN repository # @option options [String] :username The SVN username # @option options [String] :password The SVN password # @option options [String] :puppetfile_name The puppetfile name def initialize(name, basedir, options = {}) super setopts(options, {:remote => :self, :username => :self, :password => :self, :puppetfile_name => :self }) @environments = [] @svn_remote = R10K::SVN::Remote.new(@remote, :username => @username, :password => @password) @ignore_branch_prefixes = options[:ignore_branch_prefixes] end def reload! @environments = generate_environments() end # Enumerate the environments associated with this SVN source. # # @return [Array] An array of environments created # from this source. def environments if @environments.empty? @environments = generate_environments() end @environments end # Generate a list of currently available SVN environments # # @todo respect environment name corrections # # @api protected # @return [Array] An array of environments created # from this source. def generate_environments names_and_paths.map do |(branch, path)| options = { :remote => path, :username => @username, :password => @password, :puppetfile_name => puppetfile_name } R10K::Environment::SVN.new(branch.name, @basedir, branch.dirname, options) end end # List all environments that should exist in the basedir for this source # @note This is required by {R10K::Util::Basedir} # @return [Array] def desired_contents @environments.map {|env| env.dirname } end def filter_branches(branches, ignore_prefixes) filter = Regexp.new("^(#{ignore_prefixes.join('|')})") branches = branches.reject do |branch| result = filter.match(branch) if result logger.warn _("Branch %{branch} filtered out by ignore_branch_prefixes %{ibp}") % {branch: branch, ibp: @ignore_branch_prefixes} end result end branches end private def names_and_paths branches = [] opts = {prefix: @prefix, correct: false, validate: false, source: @name, strip_component: @strip_component} branches << [R10K::Environment::Name.new('production', opts), "#{@remote}/trunk"] additional_branch_names = @svn_remote.branches if @ignore_branch_prefixes && !@ignore_branch_prefixes.empty? additional_branch_names = filter_branches(additional_branch_names, @ignore_branch_prefixes) end additional_branch_names.each do |branch| branches << [R10K::Environment::Name.new(branch, opts), "#{@remote}/branches/#{branch}"] end branches end end r10k-4.0.2/lib/r10k/source/yaml.rb000066400000000000000000000012521460033767200164140ustar00rootroot00000000000000class R10K::Source::Yaml < R10K::Source::Hash R10K::Source.register(:yaml, self) def initialize(name, basedir, options = {}) config = options[:config] || '/etc/puppetlabs/r10k/environments.yaml' begin contents = ::YAML.load_file(config) rescue => e raise R10K::ConfigError, _("Couldn't open environments file %{file}: %{err}") % {file: config, err: e.message} end # Set the environments key for the parent class to consume options[:environments] = contents # All we need to do is supply options with the :environments hash. # The R10K::Source::Hash parent class takes care of the rest. super(name, basedir, options) end end r10k-4.0.2/lib/r10k/source/yamldir.rb000066400000000000000000000022541460033767200171160ustar00rootroot00000000000000class R10K::Source::Yamldir < R10K::Source::Hash R10K::Source.register(:yamldir, self) def initialize(name, basedir, options = {}) config = options[:config] || '/etc/puppetlabs/r10k/environments.d' unless File.directory?(config) raise R10K::Deployment::Config::ConfigError, _("Error opening %{dir}: config must be a directory") % {dir: config} end unless File.readable?(config) raise R10K::Deployment::Config::ConfigError, _("Error opening %{dir}: permission denied") % {dir: config} end environment_data = Dir.glob(File.join(config, '*.yaml')).reduce({}) do |memo,path| name = File.basename(path, '.yaml') begin contents = ::YAML.load_file(path) rescue => e raise R10K::Deployment::Config::ConfigError, _("Error loading %{path}: %{err}") % {path: path, err: e.message} end memo.merge({name => contents }) end # Set the environments key for the parent class to consume options[:environments] = environment_data # All we need to do is supply options with the :environments hash. # The R10K::Source::Hash parent class takes care of the rest. super(name, basedir, options) end end r10k-4.0.2/lib/r10k/svn.rb000066400000000000000000000001441460033767200147570ustar00rootroot00000000000000module R10K module SVN require 'r10k/svn/working_dir' require 'r10k/svn/remote' end end r10k-4.0.2/lib/r10k/svn/000077500000000000000000000000001460033767200144335ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/svn/remote.rb000066400000000000000000000026341460033767200162600ustar00rootroot00000000000000require 'r10k/util/subprocess' require 'r10k/util/setopts' # Inspect and interact with SVN remote repositories # # @api private # @since 1.3.0 class R10K::SVN::Remote include R10K::Util::Setopts def initialize(baseurl, opts = {}) @baseurl = baseurl setopts(opts, {:username => :self, :password => :self}) end # @todo validate that the path to trunk exists in the remote def trunk "#{@baseurl}/trunk" end # @todo gracefully handle cases where no branches exist def branches argv = ['ls', "#{@baseurl}/branches"] argv.concat(auth) text = svn(argv) text.lines.map do |line| line.chomp! line.gsub!(%r[/$], '') line end end private # Format authentication information for SVN command args, if applicable def auth auth = [] if @username auth << "--username" << @username auth << "--password" << @password end auth end include R10K::Logging # Wrap SVN commands # # @param argv [Array] # @param opts [Hash] # # @option opts [Pathname] :cwd The directory to run the command in # # @return [String] The stdout from the given command def svn(argv, opts = {}) argv.unshift('svn', '--non-interactive') subproc = R10K::Util::Subprocess.new(argv) subproc.raise_on_fail = true subproc.logger = self.logger subproc.cwd = opts[:cwd] result = subproc.execute result.stdout end end r10k-4.0.2/lib/r10k/svn/working_dir.rb000066400000000000000000000055731460033767200173100ustar00rootroot00000000000000require 'r10k/util/subprocess' require 'r10k/util/setopts' module R10K module SVN # Manage an SVN working copy. # # If SVN authentication is required, both username and password must be specified. # # @api private # @since 1.2.0 class WorkingDir include R10K::Util::Setopts # @attribute [r] path # @return [Pathname] The full path to the SVN working directory # @api private attr_reader :path # @!attribute [r] username # @return [String, nil] The SVN username, if provided # @api private attr_reader :username # @!attribute [r] password # @return [String, nil] The SVN password, if provided # @api private attr_reader :password # @param path [Pathname] # @param opts [Hash] # # @option opts [String] :username # @option opts [String] :password def initialize(path, opts = {}) @path = path setopts(opts, {:username => :self, :password => :self}) if !!(@username) ^ !!(@password) raise ArgumentError, _("Both username and password must be specified") end end # Is the directory at this path actually an SVN repository? def is_svn? dot_svn = @path + '.svn' dot_svn.exist? end def revision info.slice(/^Revision: (\d+)$/, 1) end def url info.slice(/^URL: (.*)$/, 1) end def root info.slice(/^Repository Root: (.*)$/, 1) end def update(revision = nil) argv = %w[update] argv << '-r' << revision if revision argv.concat(auth) svn(argv, :cwd => @path) end def checkout(url, revision = nil) argv = ['checkout', url] argv << '-r' << revision if revision argv << @path.basename.to_s argv.concat(auth) argv << '-q' svn(argv, :cwd => @path.parent) end private def info argv = %w[info] argv.concat(auth) svn(argv, :cwd => @path) end # Format authentication information for SVN command args, if applicable def auth auth = [] if @username auth << "--username" << @username auth << "--password" << @password end auth end include R10K::Logging # Wrap SVN commands # # @param argv [Array] # @param opts [Hash] # # @option opts [Pathname] :cwd The directory to run the command in # # @return [String] The stdout from the given command def svn(argv, opts = {}) argv.unshift('svn', '--non-interactive') subproc = R10K::Util::Subprocess.new(argv) subproc.raise_on_fail = true subproc.logger = self.logger subproc.cwd = opts[:cwd] result = subproc.execute result.stdout end end end end r10k-4.0.2/lib/r10k/tarball.rb000066400000000000000000000133641460033767200156020ustar00rootroot00000000000000require 'fileutils' require 'find' require 'minitar' require 'tempfile' require 'uri' require 'zlib' require 'r10k/settings' require 'r10k/settings/mixin' require 'r10k/util/platform' require 'r10k/util/cacheable' require 'r10k/util/downloader' module R10K class Tarball include R10K::Settings::Mixin include R10K::Util::Cacheable include R10K::Util::Downloader def_setting_attr :proxy # Defaults to global proxy setting def_setting_attr :cache_root, R10K::Util::Cacheable.default_cachedir # @!attribute [rw] name # @return [String] The tarball's name attr_accessor :name # @!attribute [rw] source # @return [String] The tarball's source attr_accessor :source # @!attribute [rw] checksum # @return [String] The tarball's expected sha256 digest attr_accessor :checksum # @param name [String] The name of the tarball content # @param source [String] The source for the tarball content # @param checksum [String] The sha256 digest of the tarball content def initialize(name, source, checksum: nil) @name = name @source = source @checksum = checksum # At this time, the only checksum type supported is sha256. In the future, # we may decide to support other algorithms if a use case arises. TBD. checksum_algorithm = :SHA256 end # @return [String] Directory. Where the cache_basename file will be created. def cache_dirname File.join(settings[:cache_root], 'tarball') end # The final cache_path should match one of the templates: # # - {cachedir}/{checksum}.tar.gz # - {cachedir}/{source}.tar.gz # # @return [String] File. The full file path the tarball will be cached to. def cache_path File.join(cache_dirname, cache_basename) end # @return [String] The basename of the tarball cache file. def cache_basename if checksum.nil? sanitized_dirname(source) + '.tar.gz' else checksum + '.tar.gz' end end # Extract the cached tarball to the target directory. # # @param target_dir [String] Where to unpack the tarball def unpack(target_dir) file = File.open(cache_path, 'rb') reader = Zlib::GzipReader.new(file) begin Minitar.unpack(reader, target_dir) ensure reader.close end end # @param target_dir [String] The directory to check if is in sync with the # tarball content # @param ignore_untracked_files [Boolean] If true, consider the target # dir to be in sync as long as all tracked content matches. # # @return [Boolean] def insync?(target_dir, ignore_untracked_files: false) target_tree_entries = Find.find(target_dir).map(&:to_s) - [target_dir] each_tarball_entry do |entry| found = target_tree_entries.delete(File.join(target_dir, entry.full_name.chomp('/'))) return false if found.nil? next if entry.directory? return false unless file_digest(found) == reader_digest(entry) end if ignore_untracked_files # We wouldn't have gotten this far if there were discrepancies in # tracked content true else # If there are still files in target_tree_entries, then there is # untracked content present in the target tree. If not, we're in sync. target_tree_entries.empty? end end # Download the tarball from @source to @cache_path def get Tempfile.open(cache_basename) do |tempfile| tempfile.binmode src_uri = URI.parse(source) temp_digest = case src_uri.scheme when 'file', nil copy(src_uri.path, tempfile) when %r{^[a-z]$} # Windows drive letter copy(src_uri.to_s, tempfile) when %r{^https?$} download(src_uri, tempfile) else raise "Unexpected source scheme #{src_uri.scheme}" end # Verify the download unless (checksum == temp_digest) || checksum.nil? raise 'Downloaded file does not match checksum' end # Move the download to cache_path FileUtils::mkdir_p(cache_dirname) begin FileUtils.mv(tempfile.path, cache_path) rescue Errno::EACCES # It may be the case that permissions don't permit moving the file # into place, but do permit overwriting an existing in-place file. FileUtils.cp(tempfile.path, cache_path) end end end # Checks the cached tarball's digest against the expected checksum. Returns # false if no cached file is present. If the tarball has no expected # checksum, any cached file is assumed to be valid. # # @return [Boolean] def cache_valid? return false unless File.exist?(cache_path) return true if checksum.nil? checksum == file_digest(cache_path) end # List all of the files contained in the tarball and their paths. This is # useful for implementing R10K::Purgable # # @return [Array] A normalized list of file paths contained in the archive def paths names = Array.new each_tarball_entry { |entry| names << Pathname.new(entry).cleanpath.to_s } names - ['.'] end def cache_checksum raise R10K::Error, _("Cache not present at %{path}") % {path: cache_path} unless File.exist?(cache_path) file_digest(cache_path) end private def each_tarball_entry(&block) File.open(cache_path, 'rb') do |file| Zlib::GzipReader.wrap(file) do |reader| Archive::Tar::Minitar::Input.each_entry(reader) do |entry| yield entry end end end end end end r10k-4.0.2/lib/r10k/util/000077500000000000000000000000001460033767200146025ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/util/attempt.rb000066400000000000000000000040711460033767200166070ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/errors/formatting' require 'r10k/util/setopts' require 'colored2' module R10K module Util # Attempt a series of dependent nested tasks and cleanly handle errors. # # @api private class Attempt include R10K::Logging include R10K::Util::Setopts # @!attribute [r] status # @return [Symbol] The status of this task attr_reader :status def initialize(initial, opts = {}) @initial = initial @tries = [] @status = :notrun setopts(opts, {:trace => :self}) end # Run this attempt to completion. # # @todo determine the structure of the ret # @return [Object] The aggregate result of all work performed. def run @status = :running result = apply(@initial, @tries) @status = :ok if @status == :running result end # Add another action to take for this attempt # # @yieldparam [Object] The result of the previous action. # @yieldreturn [Object, Array, NilClass] The result of this action. # If the value is an object, it will be passed to the next attempt. If # the value is an Array then each element will be individually passed # to the next try. If the value is false or nil then no further action # will be taken. def try(&block) @tries << block self end def ok? @status == :ok end private def apply(input, tries) return input if tries.empty? case input when Array apply_all(input, tries) when NilClass, FalseClass input else apply_one(input, tries) end end def apply_all(values, tries) values.map { |v| apply_one(v, tries) } end def apply_one(value, tries) apply(tries.first.call(value), tries.drop(1)) rescue => e @status = :failed logger.error R10K::Errors::Formatting.format_exception(e, @trace) e end end end end r10k-4.0.2/lib/r10k/util/basedir.rb000066400000000000000000000037011460033767200165410ustar00rootroot00000000000000require 'r10k/deployment' require 'r10k/logging' require 'r10k/util/purgeable' module R10K module Util # Represents a directory that can purge unmanaged contents # # @todo pick a better name than basedir. Expect this class to be renamed. # # @api private class Basedir include R10K::Util::Purgeable include R10K::Logging # Create a new Basedir by selecting sources from a deployment that match # the specified path. # # @param path [String] # @param deployment [R10K::Deployment] # # @return [R10K::Util::Basedir] def self.from_deployment(path, deployment) sources = deployment.sources.select { |source| source.managed_directory == path } new(path, sources) end # @param path [String] The path to the directory to manage # @param sources [Array<#desired_contents>] A list of objects that may create filesystem entries def initialize(path, sources) if sources.is_a? R10K::Deployment raise ArgumentError, _("Expected Array<#desired_contents>, got R10K::Deployment") end @path = path @sources = sources end # Return the path of the basedir # @note This implements a required method for the Purgeable mixin # @return [Array] def managed_directories [@path] end # List all environments that should exist in this basedir # @note This implements a required method for the Purgeable mixin # @return [Array] def desired_contents @sources.flat_map do |src| src.desired_contents.collect { |env| File.join(@path, env) } end end def purge! @sources.each do |source| logger.debug1 _("Source %{source_name} in %{path} manages contents %{contents}") % {source_name: source.name, path: @path, contents: source.desired_contents.inspect} end super end end end end r10k-4.0.2/lib/r10k/util/cacheable.rb000066400000000000000000000020141460033767200170130ustar00rootroot00000000000000module R10K module Util # Utility mixin for classes that need to implement caches # # @abstract Classes using this mixin need to implement {#managed_directory} and # {#desired_contents} module Cacheable # Provide a default cachedir location. This is consumed by R10K::Settings # for appropriate global default values. # # @return [String] Path to the default cache directory def self.default_cachedir(basename = 'cache') if R10K::Util::Platform.windows? File.join(ENV['LOCALAPPDATA'], 'r10k', basename) else File.join(ENV['HOME'] || '/root', '.r10k', basename) end end # Reformat a string into something that can be used as a directory # # @param string [String] An identifier to create a sanitized dirname for # @return [String] A sanitized dirname for the given string def sanitized_dirname(string) string.gsub(/(\w+:\/\/)(.*)(@)/, '\1').gsub(/[^@\w\.-]/, '-') end end end end r10k-4.0.2/lib/r10k/util/cleaner.rb000066400000000000000000000007651460033767200165500ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/util/purgeable' module R10K module Util class Cleaner include R10K::Logging include R10K::Util::Purgeable attr_reader :managed_directories, :desired_contents, :purge_exclusions def initialize(managed_directories, desired_contents, purge_exclusions = []) @managed_directories = managed_directories @desired_contents = desired_contents @purge_exclusions = purge_exclusions end end end end r10k-4.0.2/lib/r10k/util/commands.rb000066400000000000000000000020051460033767200167250ustar00rootroot00000000000000module R10K module Util module Commands module_function # Find the full path of a shell command. # # On POSIX platforms, the PATHEXT environment variable will be unset, so # the first command named 'cmd' will be returned. # # On Windows platforms, the PATHEXT environment variable will contain a # semicolon delimited list of executable file extensions, so the first # command with a matching path extension will be returned. # # @param cmd [String] The name of the command to search for # @return [String, nil] The path to the file if found, nil otherwise def which(cmd) exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : [''] ENV['PATH'].split(File::PATH_SEPARATOR).each do |dir| exts.each do |ext| path = File.join(dir, "#{cmd}#{ext}") if File.executable?(path) && File.file?(path) return path end end end nil end end end end r10k-4.0.2/lib/r10k/util/downloader.rb000066400000000000000000000105511460033767200172670ustar00rootroot00000000000000require 'digest' require 'net/http' module R10K module Util # Utility mixin for classes that need to download files module Downloader # Downloader objects need to checksum downloaded or saved content. The # algorithm used to perform this checksumming (and therefore the kinds of # checksums returned by various methods) is reported by this method. # # @return [Symbol] The checksum algorithm the downloader uses def checksum_algorithm @checksum_algorithm ||= :SHA256 end private # Set the checksum algorithm the downloader should use. It should be a # symbol, and a valid Ruby 'digest' library algorithm. The default is # :SHA256. # # @param algorithm [Symbol] The checksum algorithm the downloader should use def checksum_algorithm=(algorithm) @checksum_algorithm = algorithm end CHUNK_SIZE = 64 * 1024 # 64 kb # @param src_uri [URI] The URI to download from # @param dst_file [String] The file or path to save to # @return [String] The downloaded file's hex digest def download(src_uri, dst_file) digest = Digest(checksum_algorithm).new http_get(src_uri) do |resp| File.open(dst_file, 'wb') do |output_stream| resp.read_body do |chunk| output_stream.write(chunk) digest.update(chunk) end end end digest.hexdigest end # @param src_file The file or path to copy from # @param dst_file The file or path to copy to # @return [String] The copied file's sha256 hex digest def copy(src_file, dst_file) digest = Digest(checksum_algorithm).new File.open(src_file, 'rb') do |input_stream| File.open(dst_file, 'wb') do |output_stream| until input_stream.eof? chunk = input_stream.read(CHUNK_SIZE) output_stream.write(chunk) digest.update(chunk) end end end digest.hexdigest end # Start a Net::HTTP::Get connection, then yield the Net::HTTPSuccess object # to the caller's block. Follow redirects if Net::HTTPRedirection responses # are encountered, and use a proxy if directed. # # @param uri [URI] The URI to download the file from # @param redirect_limit [Integer] How many redirects to permit before failing # @param proxy [URI, String] The URI to use as a proxy def http_get(uri, redirect_limit: 10, proxy: nil, &block) raise "HTTP redirect too deep" if redirect_limit.zero? session = Net::HTTP.new(uri.host, uri.port, *proxy_to_array(proxy)) session.use_ssl = true if uri.scheme == 'https' session.start begin session.request_get(uri) do |response| case response when Net::HTTPRedirection redirect = response['location'] session.finish return http_get(URI.parse(redirect), redirect_limit: redirect_limit - 1, proxy: proxy, &block) when Net::HTTPSuccess yield response else raise "Unexpected response code #{response.code}: #{response}" end end ensure session.finish if session.active? end end # Helper method to translate a proxy URI to array arguments for # Net::HTTP#new. A nil argument returns nil array elements. def proxy_to_array(proxy_uri) if proxy_uri px = proxy_uri.is_a?(URI) ? proxy_uri : URI.parse(proxy_uri) [px.host, px.port, px.user, px.password] else [nil, nil, nil, nil] end end # Return the sha256 digest of the file at the given path # # @param path [String] The path to the file # @return [String] The file's sha256 hex digest def file_digest(path) File.open(path) do |file| reader_digest(file) end end # Return the sha256 digest of the readable data # # @param reader [String] An object that responds to #read # @return [String] The read data's sha256 hex digest def reader_digest(reader) digest = Digest(checksum_algorithm).new while chunk = reader.read(CHUNK_SIZE) digest.update(chunk) end digest.hexdigest end end end end r10k-4.0.2/lib/r10k/util/exec_env.rb000066400000000000000000000015441460033767200167270ustar00rootroot00000000000000module R10K module Util # Utility methods for dealing with environment variables module ExecEnv module_function # Swap out all environment settings # # @param env [Hash] The new environment to use # @return [void] def reset(env) env.each_pair do |key, value| ENV[key] = value end (ENV.keys - env.keys).each do |key| ENV.delete(key) end end # Add the specified settings to the env for the supplied block # # @param env [Hash] The values to add to the environment # @param block [Proc] The code to call with the modified environnment # @return [void] def withenv(env, &block) original = ENV.to_hash reset(original.merge(env)) block.call ensure reset(original) end end end end r10k-4.0.2/lib/r10k/util/license.rb000066400000000000000000000012721460033767200165530ustar00rootroot00000000000000require 'r10k/errors' require 'r10k/features' module R10K module Util module License extend R10K::Logging def self.load if R10K::Features.available?(:pe_license) logger.debug2 _("pe_license feature is available, loading PE license key") begin return PELicense.load_license_key rescue PELicense::InvalidLicenseError => e raise R10K::Error.wrap(e, _("Invalid PE license detected: %{error_msg}") % {error_msg: e.message} ) end else logger.debug2 _("pe_license feature is not available, PE only Puppet modules will not be downloadable.") nil end end end end end r10k-4.0.2/lib/r10k/util/platform.rb000066400000000000000000000015111460033767200167510ustar00rootroot00000000000000require 'rbconfig' module R10K module Util module Platform FIPS_FILE = "/proc/sys/crypto/fips_enabled" def self.platform # Test JRuby first to handle JRuby on Windows as well. if self.jruby? :jruby elsif self.windows? :windows else :posix end end # We currently only suport FIPS mode on redhat 7, where it is # toggled via a file. def self.fips? if File.exist?(FIPS_FILE) File.read(FIPS_FILE).chomp == "1" else false end end def self.windows? RbConfig::CONFIG['host_os'] =~ /mswin|win32|dos|mingw|cygwin/i end def self.jruby? RUBY_PLATFORM == "java" end def self.posix? !windows? && !jruby? end end end end r10k-4.0.2/lib/r10k/util/purgeable.rb000066400000000000000000000135501460033767200171010ustar00rootroot00000000000000require 'r10k/logging' require 'fileutils' module R10K module Util # Mixin for purging stale directory contents. # # @abstract Classes using this mixin need to implement {#managed_directory} and # {#desired_contents} module Purgeable include R10K::Logging HIDDEN_FILE = /\.[^.]+/ FN_MATCH_OPTS = File::FNM_PATHNAME | File::FNM_DOTMATCH # @deprecated # # @!method logger # @abstract Including classes must provide a logger method # @return [Log4r::Logger] # @!method desired_contents # @abstract Including classes must implement this method to list the # expected filenames of managed_directories # @return [Array] The full paths to all the content this object is managing # @!method managed_directories # @abstract Including classes must implement this method to return an array of # paths that can be purged # @return [Array] The paths to the directories to be purged # @return [Array] The present directory entries in `self.managed_directories` def current_contents(recurse) dirs = self.managed_directories dirs.flat_map do |dir| if recurse glob_exp = File.join(dir, '**', '{*,.[^.]*}') else glob_exp = File.join(dir, '*') end Dir.glob(glob_exp) end end # @deprecated Unused helper function # # @return [Array] Directory contents that are expected but not present def pending_contents(recurse) desired_contents - current_contents(recurse) end def matches?(test, path) if test == path true elsif File.fnmatch?(test, path, FN_MATCH_OPTS) true else false end end # A method to collect potentially purgeable content without searching into # ignored directories when recursively searching. # # @param dir [String, Pathname] The directory to search for purgeable content # @param exclusion_gobs [Array] A list of file paths or File globs # to exclude from recursion (These are generated by the classes that # mix this module into them and are typically programatically generated) # @param allowed_gobs [Array] A list of file paths or File globs to exclude # from recursion (These are passed in by the caller of purge! and typically # are user supplied configuration values) # @param desireds_not_to_recurse_into [Array] A list of file paths not to # recurse into. These are programatically generated, these exist to maintain # backwards compatibility with previous implementations that used File.globs # for "recursion", ie "**/{*,.[^.]*}" which would not recurse into dot directories. # @param recurse [Boolean] Whether or not to recurse into child directories that do # not match other filters. # # @return [Array] Contents which may be purged. def potentially_purgeable(dir, exclusion_globs, allowed_globs, desireds_not_to_recurse_into, recurse) children = Pathname.new(dir).children.reject do |path| path = path.to_s if exclusion_match = exclusion_globs.find { |exclusion| matches?(exclusion, path) } logger.debug2 _("Not purging %{path} due to internal exclusion match: %{exclusion_match}") % {path: path, exclusion_match: exclusion_match} elsif allowlist_match = allowed_globs.find { |allowed| matches?(allowed, path) } logger.debug _("Not purging %{path} due to whitelist match: %{allowlist_match}") % {path: path, allowlist_match: allowlist_match} else desired_match = desireds_not_to_recurse_into.grep(path).first end !!exclusion_match || !!allowlist_match || !!desired_match end children.flat_map do |child| if File.directory?(child) && !File.symlink?(child) && recurse potentially_purgeable(child, exclusion_globs, allowed_globs, desireds_not_to_recurse_into, recurse) << child.to_s else child.to_s end end end # @return [Array] Directory contents that are present but not expected def stale_contents(recurse, exclusions, whitelist) dirs = self.managed_directories desireds = self.desired_contents hidden_desireds, regular_desireds = desireds.partition do |desired| HIDDEN_FILE.match(File.basename(desired)) end initial_purgelist = dirs.flat_map do |dir| potentially_purgeable(dir, exclusions, whitelist, hidden_desireds, recurse) end initial_purgelist.reject do |path| regular_desireds.any? { |desired| matches?(desired, path) } end end # Forcibly remove all unmanaged content in `self.managed_directories` def purge!(opts={}) recurse = opts[:recurse] || false whitelist = opts[:whitelist] || [] exclusions = self.respond_to?(:purge_exclusions) ? purge_exclusions : [] stale = stale_contents(recurse, exclusions, whitelist) if stale.empty? logger.debug1 _("No unmanaged contents in %{managed_dirs}, nothing to purge") % {managed_dirs: managed_directories.join(', ')} else stale.each do |fpath| begin FileUtils.rm_r(fpath, :secure => true) logger.info _("Removing unmanaged path %{path}") % {path: fpath} rescue Errno::ENOENT # Don't log on ENOENT since we may encounter that from recursively deleting # this item's parent earlier in the purge. rescue logger.debug1 _("Unable to remove unmanaged path: %{path}") % {path: fpath} end end end end end end end r10k-4.0.2/lib/r10k/util/setopts.rb000066400000000000000000000046031460033767200166330ustar00rootroot00000000000000require 'r10k/logging' module R10K module Util # Allow for easy setting of instance options based on a hash # # This emulates the behavior of Ruby 2.0 named arguments, but since r10k # supports Ruby 1.8.7+ we cannot use that functionality. module Setopts class Ignore; end include R10K::Logging private # @param opts [Hash] # @param allowed [Hash] # # @example # opts = {:one => "one value"} # allowed => {:one => :self} # setopts(opts, allowed) # @one # => "one value" # # @example # opts = {:uno => "one value"} # allowed => {:one => :one, :uno => :one} # setopts(opts, allowed) # @one # => "one value" # # @example # # opts = {:trace => "something"} # allowed = {:trace => nil} # setopts(opts, allowed) # @trace # => nil # def setopts(opts, allowed, raise_on_unhandled: true) processed_vars = {} opts.each_pair do |key, value| if allowed.key?(key) # Ignore nil options and explicit ignore param next unless rhs = allowed[key] next if rhs == ::R10K::Util::Setopts::Ignore var = case rhs when :self, TrueClass # tr here is because instance variables cannot have hyphens in their names. "@#{key}".tr('-','_').to_sym else # tr here same as previous "@#{rhs}".tr('-','_').to_sym end if processed_vars.include?(var) # This should be a raise, but that would be a behavior change and # should happen on a SemVer boundry. logger.warn _("%{class_name} parameters '%{a}' and '%{b}' conflict. Specify one or the other, but not both" \ % {class_name: self.class.name, a: processed_vars[var], b: key}) end instance_variable_set(var, value) processed_vars[var] = key else err_str = _("%{class_name} cannot handle option '%{key}'") % {class_name: self.class.name, key: key} if raise_on_unhandled raise ArgumentError, err_str else logger.warn(err_str) end end end end end end end r10k-4.0.2/lib/r10k/util/subprocess.rb000066400000000000000000000050331460033767200173200ustar00rootroot00000000000000require 'r10k/logging' require 'r10k/util/platform' module R10K module Util # The subprocess namespace implements an interface similar to childprocess. # The interface has been simplified to make it easier to use and does not # depend on native code. # # @api private class Subprocess require 'r10k/util/subprocess/runner' require 'r10k/util/subprocess/result' require 'r10k/util/subprocess/subprocess_error' # @return [Class < R10K::Util::Subprocess::Runner] def self.runner if R10K::Util::Platform.windows? R10K::Util::Subprocess::Runner::Windows elsif R10K::Util::Platform.jruby? R10K::Util::Subprocess::Runner::JRuby else R10K::Util::Subprocess::Runner::POSIX end end include R10K::Logging # @!attribute [r] argv # @return [Array] The command to be executed attr_reader :argv # @!attribute [rw] raise_on_fail # Determine whether #execute raises an error when the command exits # with a non-zero exit status. # @return [true, false] attr_accessor :raise_on_fail # @!attribute [rw] cwd # @return [String] The directory to be used as the cwd when executing # the command. attr_accessor :cwd # @!attribute [w] logger # Allow calling processes to take ownership of execution logs by passing # their own logger to the command being executed. attr_writer :logger # Prepare the subprocess invocation. # # @param argv [Array] The argument vector to execute def initialize(argv) @argv = argv @raise_on_fail = false end # Execute the given command and return the result of evaluation. # # @api public # @raise [R10K::Util::Subprocess::SubprocessError] if raise_on_fail is # true and the command exited with a non-zero status. # @return [R10K::Util::Subprocess::Result] def execute subprocess = self.class.runner.new(@argv) subprocess.cwd = @cwd if @cwd logmsg = _("Starting process: %{args}") % {args: @argv.inspect} logmsg << "(cwd: #{@cwd})" if @cwd logger.debug2(logmsg) result = subprocess.run logger.debug2(_("Finished process:\n%{result}") % {result: result.format}) if @raise_on_fail && result.failed? raise SubprocessError.new(_("Command exited with non-zero exit code"), :result => result) end result end end end end r10k-4.0.2/lib/r10k/util/subprocess/000077500000000000000000000000001460033767200167725ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/util/subprocess/result.rb000066400000000000000000000017501460033767200206400ustar00rootroot00000000000000# @api private class R10K::Util::Subprocess::Result # @!attribute [r] argv # @return [Array] attr_reader :argv # @!attribute [r] cmd # @return [String] attr_reader :cmd # @!attribute [r] stdout # @return [String] attr_reader :stdout # @!attribute [r] stderr # @return [String] attr_reader :stderr # @!attribute [r] exit_code # @return [Integer] attr_reader :exit_code def initialize(argv, stdout, stderr, exit_code) @argv = argv @cmd = argv.join(' ') @stdout = stdout.chomp @stderr = stderr.chomp @exit_code = exit_code end def format(with_cmd = true) msg = [] if with_cmd msg << "Command: #{@cmd}" end if !@stdout.empty? msg << "Stdout:" msg << @stdout end if !@stderr.empty? msg << "Stderr:" msg << @stderr end msg << "Exit code: #{@exit_code}" msg.join("\n") end def failed? exit_code != 0 end def success? exit_code == 0 end end r10k-4.0.2/lib/r10k/util/subprocess/runner.rb000066400000000000000000000011361460033767200206310ustar00rootroot00000000000000# Define an abstract interface for external command runners. # # @api private class R10K::Util::Subprocess::Runner require 'r10k/util/subprocess/runner/windows' require 'r10k/util/subprocess/runner/posix' require 'r10k/util/subprocess/runner/jruby' # @!attribute [rw] cwd # @return [String] The directory to be used as the cwd when executing # the command. attr_accessor :cwd # @!attribute [r] result # @return [R10K::Util::Subprocess::Result] attr_reader :result def initialize(argv) raise NotImplementedError end def run raise NotImplementedError end end r10k-4.0.2/lib/r10k/util/subprocess/runner/000077500000000000000000000000001460033767200203035ustar00rootroot00000000000000r10k-4.0.2/lib/r10k/util/subprocess/runner/jruby.rb000066400000000000000000000014651460033767200217710ustar00rootroot00000000000000require 'open3' require 'r10k/util/subprocess/runner' # Run processes under JRuby. # # This implementation relies on Open3.capture3 to run commands and capture # results. In contrast to the POSIX runner this cannot be used in an # asynchronous manner as-is; implementing that will probably mean launching a # thread and invoking #capture3 in that thread. class R10K::Util::Subprocess::Runner::JRuby < R10K::Util::Subprocess::Runner def initialize(argv) @argv = argv end def run spawn_opts = @cwd ? {:chdir => @cwd} : {} stdout, stderr, status = Open3.capture3(*@argv, spawn_opts) @result = R10K::Util::Subprocess::Result.new(@argv, stdout, stderr, status.exitstatus) rescue Errno::ENOENT, Errno::EACCES => e @result = R10K::Util::Subprocess::Result.new(@argv, '', e.message, 255) end end r10k-4.0.2/lib/r10k/util/subprocess/runner/posix.rb000066400000000000000000000047651460033767200220060ustar00rootroot00000000000000require 'r10k/util/subprocess/runner' require 'r10k/util/subprocess/runner/pump' require 'fcntl' # Implement a POSIX command runner by using fork/exec. # # This implementation is optimized to run commands in the background, and has # a few noteworthy implementation details. # # First off, when the child process is forked, it calls setsid() to detach from # the controlling TTY. This has two main ramifications: sending signals will # never be send to the forked process, and the forked process does not have # access to stdin. # # @api private class R10K::Util::Subprocess::Runner::POSIX < R10K::Util::Subprocess::Runner def initialize(argv) @argv = argv mkpipes end def run # Create a pipe so that the parent can verify that the child process # successfully executed. The pipe will be closed on a successful exec(), # and will contain an error message on failure. exec_r, exec_w = pipe @stdout_pump = R10K::Util::Subprocess::Runner::Pump.new(@stdout_r) @stderr_pump = R10K::Util::Subprocess::Runner::Pump.new(@stderr_r) pid = fork do exec_r.close execute_child(exec_w) end exec_w.close @stdout_pump.start @stderr_pump.start execute_parent(exec_r, pid) @result end private def execute_child(exec_w) if @cwd Dir.chdir @cwd end # Create a new session for the forked child. This prevents children from # ever being the foreground process on a TTY, which is almost always what # we want in r10k. Process.setsid # Reopen file descriptors STDOUT.reopen(@stdout_w) STDERR.reopen(@stderr_w) executable = @argv.shift exec([executable, executable], *@argv) rescue SystemCallError => e exec_w.write("#{e.class}: #{e.message}") exit(254) end def execute_parent(exec_r, pid) @stdout_w.close @stderr_w.close stdout = '' stderr = '' if !exec_r.eof? stderr = exec_r.read || "exec() failed" _, @status = Process.waitpid2(pid) else _, @status = Process.waitpid2(pid) @stdout_pump.wait @stderr_pump.wait stdout = @stdout_pump.string stderr = @stderr_pump.string end exec_r.close @stdout_r.close @stderr_r.close @result = R10K::Util::Subprocess::Result.new(@argv, stdout, stderr, @status.exitstatus) end def mkpipes @stdout_r, @stdout_w = pipe @stderr_r, @stderr_w = pipe end def pipe ::IO.pipe.tap do |pair| pair.each { |p| p.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC) } end end end r10k-4.0.2/lib/r10k/util/subprocess/runner/pump.rb000066400000000000000000000023421460033767200216120ustar00rootroot00000000000000require 'r10k/util/subprocess/runner' # Perform nonblocking reads on a streaming IO instance. # # @api private class R10K::Util::Subprocess::Runner::Pump # !@attribute [r] string # @return [String] The output collected from the IO device attr_reader :string # @!attribute [r] min_delay # @return [Float] The minimum time to wait while polling the IO device attr_accessor :min_delay # @!attribute [r] max_delay # @return [Float] The maximum time to wait while polling the IO device attr_accessor :max_delay def initialize(io) @io = io @thread = nil @string = '' @run = true @min_delay = 0.05 @max_delay = 1.0 end def start @thread = Thread.new { pump } end def halt! @run = false @thread.join end # Block until the pumping thread reaches EOF on the IO object. def wait @thread.join end private def pump backoff = @min_delay while @run begin @string << @io.read_nonblock(4096) backoff /= 2 if backoff > @min_delay rescue Errno::EWOULDBLOCK, Errno::EAGAIN backoff *= 2 if backoff < @max_delay IO.select([@io], [], [], backoff) rescue EOFError @run = false end end end end r10k-4.0.2/lib/r10k/util/subprocess/runner/windows.rb000066400000000000000000000014661460033767200223310ustar00rootroot00000000000000require 'open3' require 'r10k/util/subprocess/runner' # Run processes on Windows. # # This implementation relies on Open3.capture3 to run commands and capture # results. In contrast to the POSIX runner this cannot be used in an # asynchronous manner as-is; implementing that will probably mean launching a # thread and invoking #capture3 in that thread. class R10K::Util::Subprocess::Runner::Windows < R10K::Util::Subprocess::Runner def initialize(argv) @argv = argv end def run spawn_opts = @cwd ? {:chdir => @cwd} : {} stdout, stderr, status = Open3.capture3(*@argv, spawn_opts) @result = R10K::Util::Subprocess::Result.new(@argv, stdout, stderr, status.exitstatus) rescue Errno::ENOENT, Errno::EACCES => e @result = R10K::Util::Subprocess::Result.new(@argv, '', e.message, 255) end end r10k-4.0.2/lib/r10k/util/subprocess/subprocess_error.rb000066400000000000000000000007261460033767200227250ustar00rootroot00000000000000require 'r10k/errors' require 'r10k/util/subprocess' require 'r10k/util/setopts' class R10K::Util::Subprocess::SubprocessError < R10K::Error # !@attribute [r] result # @return [R10K::Util::Subprocess::Result] attr_reader :result include R10K::Util::Setopts def initialize(mesg, options = {}) super setopts(options, {:result => true}) end def message msg = [] msg << "#{super}:" msg << @result.format msg.join("\n") end end r10k-4.0.2/lib/r10k/util/symbolize_keys.rb000066400000000000000000000021061460033767200201760ustar00rootroot00000000000000module R10K module Util module SymbolizeKeys module_function # Convert all String keys to Symbol keys # # @param hash [Hash] The data structure to convert # @param recurse [Boolean] Whether to recursively symbolize keys in nested # hash values. Defaults to false. # @raise [TypeError] If a String key collides with an existing Symbol key # @return [void] def symbolize_keys!(hash, recurse = false) hash.keys.each do |key| if key.is_a?(String) if hash.key?(key.to_sym) raise TypeError, _("An existing interned key for %{key} exists, cannot overwrite") % {key: key} end hash[key.to_sym] = hash.delete(key) key = key.to_sym end value = hash[key] if recurse if value.is_a?(Hash) symbolize_keys!(value, true) elsif value.is_a?(Array) value.map { |item| symbolize_keys!(item, true) if item.is_a?(Hash) } end end end end end end end r10k-4.0.2/lib/r10k/version.rb000066400000000000000000000004031460033767200156340ustar00rootroot00000000000000module R10K # When updating to a new major (X) or minor (Y) version, include `#major` or # `#minor` (respectively) in your commit message to trigger the appropriate # release. Otherwise, a new patch (Z) version will be released. VERSION = '4.0.2' end r10k-4.0.2/locales/000077500000000000000000000000001460033767200137245ustar00rootroot00000000000000r10k-4.0.2/locales/config.yaml000066400000000000000000000014721460033767200160610ustar00rootroot00000000000000--- # This is the project-specific configuration file for setting up # fast_gettext for your project. gettext: # This is used for the name of the .pot and .po files; they will be # called .pot? project_name: 'r10k' # This is used in comments in the .pot and .po files to indicate what # project the files belong to and should bea little more desctiptive than # package_name: r10k # The locale that the default messages in the .pot file are in default_locale: en # The email used for sending bug reports. bugs_address: docs@puppetlabs.com # The holder of the copyright. copyright_holder: Puppet, Inc. # Patterns for +Dir.glob+ used to find all files that might contain # translatable content, relative to the project root directory source_files: - 'lib/**/*.rb' r10k-4.0.2/locales/r10k.pot000066400000000000000000000452041460033767200152320ustar00rootroot00000000000000# SOME DESCRIPTIVE TITLE. # Copyright (C) 2023 Puppet, Inc. # This file is distributed under the same license as the r10k package. # FIRST AUTHOR , 2023. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: r10k 3.9.3-373-g117587fb\n" "\n" "Report-Msgid-Bugs-To: docs@puppetlabs.com\n" "POT-Creation-Date: 2023-07-26 22:34+0000\n" "PO-Revision-Date: 2023-07-26 22:34+0000\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n" #: ../lib/r10k/action/deploy/deploy_helpers.rb:16 ../lib/r10k/settings/loader.rb:63 msgid "No configuration file given, no config file found in current directory, and no global config present" msgstr "" #: ../lib/r10k/action/deploy/deploy_helpers.rb:30 msgid "Making changes to deployed environments has been administratively disabled." msgstr "" #: ../lib/r10k/action/deploy/deploy_helpers.rb:31 msgid "Reason: %{write_lock}" msgstr "" #: ../lib/r10k/action/deploy/environment.rb:99 msgid "Environment(s) \\'%{environments}\\' cannot be found in any source and will not be deployed." msgstr "" #: ../lib/r10k/action/deploy/environment.rb:119 msgid "Executing postrun command." msgstr "" #: ../lib/r10k/action/deploy/environment.rb:133 msgid "Environment %{env_dir} does not match environment name filter, skipping" msgstr "" #: ../lib/r10k/action/deploy/environment.rb:141 msgid "Deploying environment %{env_path}" msgstr "" #: ../lib/r10k/action/deploy/environment.rb:144 msgid "Environment %{env_dir} is now at %{env_signature}" msgstr "" #: ../lib/r10k/action/deploy/environment.rb:148 msgid "Environment %{env_dir} is new, updating all modules" msgstr "" #: ../lib/r10k/action/deploy/module.rb:82 msgid "Running postrun command for environments: %{envs_to_run}." msgstr "" #: ../lib/r10k/action/deploy/module.rb:92 msgid "No environments were modified, not executing postrun command." msgstr "" #: ../lib/r10k/action/deploy/module.rb:104 msgid "Only updating modules in environment(s) %{opt_env} skipping environment %{env_path}" msgstr "" #: ../lib/r10k/action/deploy/module.rb:106 msgid "Updating modules %{modules} in environment %{env_path}" msgstr "" #: ../lib/r10k/action/puppetfile/check.rb:25 msgid "Syntax OK" msgstr "" #: ../lib/r10k/action/runner.rb:63 ../lib/r10k/deployment/config.rb:42 msgid "Overriding config file setting '%{key}': '%{old_val}' -> '%{new_val}'" msgstr "" #: ../lib/r10k/action/runner.rb:105 msgid "Reading configuration from %{config_path}" msgstr "" #: ../lib/r10k/action/runner.rb:108 msgid "No config file explicitly given and no default config file could be found, default settings will be used." msgstr "" #: ../lib/r10k/content_synchronizer.rb:33 msgid "Updating modules with %{pool_size} threads" msgstr "" #: ../lib/r10k/content_synchronizer.rb:46 msgid "Error during concurrent deploy of a module: %{message}" msgstr "" #: ../lib/r10k/content_synchronizer.rb:86 msgid "Module %{mod_name} failed to synchronize due to %{message}" msgstr "" #: ../lib/r10k/content_synchronizer.rb:92 msgid "Module thread %{id} exiting: %{message}" msgstr "" #: ../lib/r10k/deployment.rb:90 msgid "Environment collision at %{env_path} between %{source}:%{env_name} and %{osource}:%{oenv_name}" msgstr "" #: ../lib/r10k/deployment.rb:118 msgid "Unable to load sources; the supplied configuration does not define the 'sources' key" msgstr "" #: ../lib/r10k/environment/base.rb:89 ../lib/r10k/environment/base.rb:105 ../lib/r10k/environment/base.rb:114 ../lib/r10k/source/base.rb:83 msgid "%{class} has not implemented method %{method}" msgstr "" #: ../lib/r10k/environment/name.rb:83 msgid "Improper configuration value given for strip_component setting in %{src} source. Value must be a string, a /regex/, false, or omitted. Got \"%{val}\" (%{type})" msgstr "" #: ../lib/r10k/environment/with_modules.rb:60 msgid "Environment and %{src} both define the \"%{name}\" module" msgstr "" #: ../lib/r10k/environment/with_modules.rb:71 msgid "Unexpected value for `module_conflicts` setting in %{env} environment: %{val}" msgstr "" #: ../lib/r10k/feature.rb:27 msgid "Testing to see if feature %{name} is available." msgstr "" #: ../lib/r10k/feature.rb:30 msgid "Feature %{name} %{message} available." msgstr "" #: ../lib/r10k/feature.rb:37 msgid "Attempting to load library '%{lib}' for feature %{name}" msgstr "" #: ../lib/r10k/feature.rb:41 msgid "Error while loading library %{lib} for feature %{name}: %{error_msg}" msgstr "" #: ../lib/r10k/feature.rb:47 msgid "Evaluating proc %{block} to test for feature %{name}" msgstr "" #: ../lib/r10k/feature.rb:49 msgid "Proc %{block} for feature %{name} returned %{output}" msgstr "" #: ../lib/r10k/forge/module_release.rb:197 msgid "Unpacking %{tarball_cache_path} to %{target_dir} (with tmpdir %{tmp_path})" msgstr "" #: ../lib/r10k/forge/module_release.rb:199 msgid "Valid files unpacked: %{valid_files}" msgstr "" #: ../lib/r10k/forge/module_release.rb:201 msgid "These files existed in the module's tar file, but are invalid filetypes and were not unpacked: %{invalid_files}" msgstr "" #: ../lib/r10k/forge/module_release.rb:204 msgid "Symlinks are unsupported and were not unpacked from the module tarball. %{release_slug} contained these ignored symlinks: %{symlinks}" msgstr "" #: ../lib/r10k/git.rb:32 msgid "Rugged has been compiled without support for %{transport}; Git repositories will not be reachable via %{transport}." msgstr "" #: ../lib/r10k/git.rb:68 msgid "No Git providers are functional." msgstr "" #: ../lib/r10k/git.rb:85 msgid "No Git provider named '%{name}'." msgstr "" #: ../lib/r10k/git.rb:89 msgid "Git provider '%{name}' is not functional." msgstr "" #: ../lib/r10k/git.rb:96 msgid "Setting Git provider to %{provider}" msgstr "" #: ../lib/r10k/git.rb:104 msgid "No Git provider set." msgstr "" #: ../lib/r10k/git.rb:107 msgid "Setting Git provider to default provider %{name}" msgstr "" #: ../lib/r10k/git/alternates.rb:46 msgid "Cannot write %{file}; parent directory does not exist" msgstr "" #: ../lib/r10k/git/cache.rb:57 msgid "%{class}#path is deprecated; use #git_dir" msgstr "" #: ../lib/r10k/git/cache.rb:86 msgid "Creating new git cache for %{remote}" msgstr "" #: ../lib/r10k/git/rugged/bare_repository.rb:34 ../lib/r10k/git/rugged/working_repository.rb:28 msgid "Cloning '%{remote}' into %{path}" msgstr "" #: ../lib/r10k/git/rugged/bare_repository.rb:52 msgid "Fetching remote '%{remote_name}' at %{path}" msgstr "" #: ../lib/r10k/git/rugged/bare_repository.rb:56 msgid "Rugged versions prior to 0.24.0 do not support pruning stale branches during fetch, please upgrade your \\'rugged\\' gem. (Current version is: %{version})" msgstr "" #: ../lib/r10k/git/rugged/base_repository.rb:24 msgid "Unable to resolve %{pattern}: %{e} " msgstr "" #: ../lib/r10k/git/rugged/base_repository.rb:69 msgid "Remote URL is different from cache, updating %{orig} to %{update}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:28 msgid "Authentication failed for Git remote %{url}." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:52 msgid "Using per-repository private key %{key} for URL %{url}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:55 msgid "URL %{url} has no per-repository private key using '%{key}'." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:57 msgid "Git remote %{url} uses the SSH protocol but no private key was given" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:61 msgid "Unable to use SSH key auth for %{url}: private key %{private_key} is missing or unreadable" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:102 msgid "Using OAuth token from stdin for URL %{url}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:105 msgid "Using OAuth token from %{token_path} for URL %{url}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:107 msgid "%{path} is missing or unreadable, cannot load OAuth token" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:111 msgid "Supplied OAuth token contains invalid characters." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:135 msgid "URL %{url} includes the username %{username}, using that user for authentication." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:138 msgid "URL %{url} did not specify a user, using %{user} from configuration" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:141 msgid "URL %{url} did not specify a user, using current user %{user}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:148 msgid "Github App id contains invalid characters." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:149 msgid "Github App token ttl contains invalid characters." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:150 msgid "Github App key is missing or unreadable" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:155 msgid "Github App key is not a valid SSL private key" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:158 msgid "Github App key is not a valid SSL key" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:161 msgid "Using Github App id %{app_id} with SSL key from %{key_path}" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:179 msgid "Error using private key to get Github App access token from url" msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:200 msgid "Github App token contains invalid characters." msgstr "" #: ../lib/r10k/git/rugged/credentials.rb:202 msgid "Github App token generated, expires at: %{expire}" msgstr "" #: ../lib/r10k/git/rugged/thin_repository.rb:92 ../lib/r10k/git/shellgit/thin_repository.rb:69 msgid "Updated repo %{path} to include alternate object db path %{objects_dir}" msgstr "" #: ../lib/r10k/git/rugged/working_repository.rb:67 msgid "Checking out ref '%{ref}' (resolved to SHA '%{sha}') in repository %{path}" msgstr "" #: ../lib/r10k/git/rugged/working_repository.rb:87 msgid "Fetching remote '%{remote}' at %{path}" msgstr "" #: ../lib/r10k/git/rugged/working_repository.rb:131 ../lib/r10k/git/shellgit/working_repository.rb:101 msgid "Found local modifications in %{file_path}" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:45 msgid "Unable to sync repo to unresolvable ref '%{ref}'" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:53 msgid "Cloning %{repo_path} and checking out %{ref}" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:56 msgid "Replacing %{repo_path} and checking out %{ref}" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:60 ../lib/r10k/git/stateful_repository.rb:65 msgid "Updating %{repo_path} to %{ref}" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:64 msgid "Overwriting local modifications to %{repo_path}" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:68 msgid "Skipping %{repo_path} due to local modifications" msgstr "" #: ../lib/r10k/git/stateful_repository.rb:72 msgid "%{repo_path} is already at Git ref %{ref}" msgstr "" #: ../lib/r10k/initializers.rb:31 msgid "the purgedirs key in r10k.yaml is deprecated. it is currently ignored." msgstr "" #: ../lib/r10k/keyed_factory.rb:18 msgid "Class already registered for %{key}" msgstr "" #: ../lib/r10k/keyed_factory.rb:32 msgid "No class registered for %{key}" msgstr "" #: ../lib/r10k/logging.rb:73 ../lib/r10k/logging.rb:100 ../lib/r10k/logging.rb:109 msgid "Invalid log level '%{val}'. Valid levels are %{log_levels}" msgstr "" #: ../lib/r10k/module.rb:45 msgid "Module %{name} with args %{args} doesn't have an implementation. (Are you using the right arguments?)" msgstr "" #: ../lib/r10k/module/base.rb:120 msgid "Deploying module to %{path}" msgstr "" #: ../lib/r10k/module/base.rb:123 msgid "Only updating modules %{modules}, skipping module %{name}" msgstr "" #: ../lib/r10k/module/base.rb:179 msgid "Module name (%{title}) must match either 'modulename' or 'owner/modulename'" msgstr "" #: ../lib/r10k/module/definition.rb:28 msgid "Not updating module %{name}, assuming content unchanged" msgstr "" #: ../lib/r10k/module/forge.rb:50 msgid "Module version %{ver} is not a valid Forge module version" msgstr "" #: ../lib/r10k/module/forge.rb:98 msgid "The module %{title} does not appear to have any published releases, cannot determine latest version." msgstr "" #: ../lib/r10k/module/forge.rb:101 ../lib/r10k/module/forge.rb:130 msgid "The module %{title} does not exist on %{url}." msgstr "" #: ../lib/r10k/module/git.rb:78 msgid "Cannot track control repo branch for content '%{name}' when not part of a git-backed environment, will use default if available." msgstr "" #: ../lib/r10k/module/local.rb:37 msgid "Module %{title} is a local module, always indicating synced." msgstr "" #: ../lib/r10k/module/metadata_file.rb:25 msgid "Could not read metadata.json" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:68 msgid "Using Puppetfile '%{puppetfile}'" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:69 msgid "Using moduledir '%{moduledir}'" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:91 msgid "Failed to evaluate %{path}" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:108 msgid "Unable to preload Puppetfile because of %{msg}" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:126 msgid "Using Forge from Puppetfile: %{forge}" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:129 msgid "Ignoring Forge declaration in Puppetfile, using value from settings: %{forge}." msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:193 ../lib/r10k/puppetfile.rb:104 msgid "Puppetfile %{path} missing or unreadable" msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:235 msgid "Puppetfiles cannot contain duplicate module names." msgstr "" #: ../lib/r10k/module_loader/puppetfile.rb:237 msgid "Remove the duplicates of the following modules: %{dupes}" msgstr "" #: ../lib/r10k/module_loader/puppetfile/dsl.rb:37 msgid "unrecognized declaration '%{method}'" msgstr "" #: ../lib/r10k/settings/collection.rb:77 msgid "Validation failed for '%{name}' settings group" msgstr "" #: ../lib/r10k/settings/collection.rb:79 msgid "Validation failed for settings group" msgstr "" #: ../lib/r10k/settings/container.rb:92 msgid "Key %{key} is not a valid key" msgstr "" #: ../lib/r10k/settings/enum_definition.rb:13 msgid "Setting %{name} may only contain %{enums}; the disallowed values %{invalid} were present" msgstr "" #: ../lib/r10k/settings/enum_definition.rb:17 msgid "Setting %{name} should be one of %{enums}, not '%{value}'" msgstr "" #: ../lib/r10k/settings/helpers.rb:19 msgid "%{class} instances cannot be reassigned to a new parent." msgstr "" #: ../lib/r10k/settings/helpers.rb:23 msgid "%{class} instances may only belong to a settings collection or list." msgstr "" #: ../lib/r10k/settings/list.rb:66 msgid "Validation failed for '%{name}' settings list" msgstr "" #: ../lib/r10k/settings/loader.rb:45 msgid "Both %{default_path} and %{old_default_path} configuration files exist." msgstr "" #: ../lib/r10k/settings/loader.rb:46 msgid "%{default_path} will be used." msgstr "" #: ../lib/r10k/settings/loader.rb:52 msgid "The r10k configuration file at %{old_default_path} is deprecated." msgstr "" #: ../lib/r10k/settings/loader.rb:53 msgid "Please move your r10k configuration to %{default_path}." msgstr "" #: ../lib/r10k/settings/loader.rb:69 msgid "Couldn't load config file: %{error_msg}" msgstr "" #: ../lib/r10k/settings/uri_definition.rb:12 msgid "Setting %{name} requires a URL but '%{value}' could not be parsed as a URL" msgstr "" #: ../lib/r10k/source/exec.rb:10 msgid "Environment source %{name} missing required parameter: command" msgstr "" #: ../lib/r10k/source/exec.rb:36 msgid "" "Error parsing command output for exec source %{name}:\n" "Not valid JSON: %{j_msg}\n" "Not valid YAML: %{y_msg}\n" "Stdout:\n" "%{out}" msgstr "" #: ../lib/r10k/source/exec.rb:44 msgid "" "Environment source %{name} command %{cmd} did not return valid environment data.\n" "Returned: %{data}" msgstr "" #: ../lib/r10k/source/git.rb:75 msgid "Fetching '%{remote}' to determine current branches." msgstr "" #: ../lib/r10k/source/git.rb:78 msgid "Unable to determine current branches for Git source '%{name}' (%{basedir})" msgstr "" #: ../lib/r10k/source/git.rb:113 msgid "Environment %{env_name} contained non-word characters, correcting name to %{corrected_env_name}" msgstr "" #: ../lib/r10k/source/git.rb:122 msgid "Environment %{env_name} contained non-word characters, ignoring it." msgstr "" #: ../lib/r10k/source/git.rb:141 ../lib/r10k/source/svn.rb:115 msgid "Branch %{branch} filtered out by ignore_branch_prefixes %{ibp}" msgstr "" #: ../lib/r10k/source/git.rb:152 msgid "Branch `%{name}:%{branch}` filtered out by filter_command %{cmd}" msgstr "" #: ../lib/r10k/source/yaml.rb:10 msgid "Couldn't open environments file %{file}: %{err}" msgstr "" #: ../lib/r10k/source/yamldir.rb:8 msgid "Error opening %{dir}: config must be a directory" msgstr "" #: ../lib/r10k/source/yamldir.rb:12 msgid "Error opening %{dir}: permission denied" msgstr "" #: ../lib/r10k/source/yamldir.rb:20 msgid "Error loading %{path}: %{err}" msgstr "" #: ../lib/r10k/svn/working_dir.rb:43 msgid "Both username and password must be specified" msgstr "" #: ../lib/r10k/tarball.rb:167 msgid "Cache not present at %{path}" msgstr "" #: ../lib/r10k/util/basedir.rb:34 msgid "Expected Array<#desired_contents>, got R10K::Deployment" msgstr "" #: ../lib/r10k/util/basedir.rb:58 msgid "Source %{source_name} in %{path} manages contents %{contents}" msgstr "" #: ../lib/r10k/util/license.rb:11 msgid "pe_license feature is available, loading PE license key" msgstr "" #: ../lib/r10k/util/license.rb:15 msgid "Invalid PE license detected: %{error_msg}" msgstr "" #: ../lib/r10k/util/license.rb:18 msgid "pe_license feature is not available, PE only Puppet modules will not be downloadable." msgstr "" #: ../lib/r10k/util/purgeable.rb:91 msgid "Not purging %{path} due to internal exclusion match: %{exclusion_match}" msgstr "" #: ../lib/r10k/util/purgeable.rb:93 msgid "Not purging %{path} due to whitelist match: %{allowlist_match}" msgstr "" #: ../lib/r10k/util/purgeable.rb:137 msgid "No unmanaged contents in %{managed_dirs}, nothing to purge" msgstr "" #: ../lib/r10k/util/purgeable.rb:142 msgid "Removing unmanaged path %{path}" msgstr "" #: ../lib/r10k/util/purgeable.rb:147 msgid "Unable to remove unmanaged path: %{path}" msgstr "" #: ../lib/r10k/util/setopts.rb:60 msgid "%{class_name} parameters '%{a}' and '%{b}' conflict. Specify one or the other, but not both" msgstr "" #: ../lib/r10k/util/setopts.rb:67 msgid "%{class_name} cannot handle option '%{key}'" msgstr "" #: ../lib/r10k/util/subprocess.rb:70 msgid "Starting process: %{args}" msgstr "" #: ../lib/r10k/util/subprocess.rb:75 msgid "" "Finished process:\n" "%{result}" msgstr "" #: ../lib/r10k/util/subprocess.rb:78 msgid "Command exited with non-zero exit code" msgstr "" #: ../lib/r10k/util/symbolize_keys.rb:17 msgid "An existing interned key for %{key} exists, cannot overwrite" msgstr "" r10k-4.0.2/r10k.gemspec000066400000000000000000000026071460033767200144310ustar00rootroot00000000000000lib = File.expand_path('../lib/', __FILE__) $:.unshift lib unless $:.include?(lib) require 'r10k/version' Gem::Specification.new do |s| s.name = "r10k" s.version = R10K::VERSION s.platform = Gem::Platform::RUBY s.authors = "Adrien Thebo" s.email = "adrien@somethingsinistral.net" s.homepage = "https://github.com/puppetlabs/r10k" s.summary = "Puppet environment and module deployment" s.description = <<-DESCRIPTION R10K provides a general purpose toolset for deploying Puppet environments and modules. It implements the Puppetfile format and provides a native implementation of Puppet dynamic environments. DESCRIPTION s.required_ruby_version = '>= 2.6.0' s.license = 'Apache-2.0' s.add_dependency 'colored2', '3.1.2' s.add_dependency 'cri', '>= 2.15.10' s.add_dependency 'log4r', '1.1.10' s.add_dependency 'multi_json', '~> 1.10' s.add_dependency 'puppet_forge', '>= 4.1', '< 6' s.add_dependency 'gettext-setup', '>=0.24', '<2.0' s.add_dependency 'jwt', '>= 2.2.3', '< 2.8.0' s.add_dependency 'minitar', '~> 0.9' s.add_development_dependency 'rspec', '~> 3.1' s.add_development_dependency 'rake' s.add_development_dependency 'yard', '~> 0.9.11' s.files = %x[git ls-files].split($/).reject { |f| f.match(%r{^spec}) } s.require_path = 'lib' s.bindir = 'bin' s.executables = 'r10k' end r10k-4.0.2/r10k.yaml.example000066400000000000000000000137561460033767200154110ustar00rootroot00000000000000--- # This example configuration details the most commonly used configuration # options for the `r10k deploy` command. # # The 'cachedir' setting controls where cached content, such as mirrored Git # repositories, are stored on the local machine. This location should be # persistent, as environments and modules may rely on these files in order to # be updated. # # The default value is "~/.r10k" #cachedir: '/var/cache/r10k' # The 'proxy' setting configures an HTTP proxy to use for all HTTP/HTTPS # operations performed by r10k. This includes requests to the Puppet Forge # as well as any Git operations performed against an HTTP/HTTPS remote. # You can also configure specific proxies for only Git or Forge operations # using settings below. Authenticated proxies can be configured by providing # credentials in the URL, e.g. 'https://username:password@proxy.example.com:8888' #proxy: 'https://proxy.example.com:8888' # The 'sources' setting determines what repositories r10k will use when creating # Puppet environments. sources: # Each source should have a unique name, and preferrably should only use # alphanumeric characters and underscores. #operations: # The 'remote' setting determines the location of the VCS repository used # for creating environments. #remote: 'git@github.com:my-org/org-operations-modules' # The 'basedir' setting specifies where environments will be created for # this source. This setting should match the Puppet 'environmentpath' # setting in puppet.conf. # # See http://docs.puppetlabs.com/references/latest/configuration.html#environmentpath # for more information about the Puppet 'environmentpath' setting. #basedir: '/etc/puppetlabs/puppet/environments' # The Puppetfile filename in the repo, defaults to 'Puppetfile'. This # setting can be used, to allow custom Puppetfile names to allow hybrid # use of librarian-puppet (transitive dependency resolution) and r10k # (code management on the server). #puppetfile_name: 'Puppetfile.r10k' # One or more sources can be specified; each source is simple another entry # in the sources map. #qa: #remote: 'git@github.com:my-org/org-qa-modules' # Multiple sources can have the same 'basedir' setting, in case there is a # reason to push different sets of environments to the same Puppet master. # If two different sources have the same basedir care should be exercised # to make sure that two sources don't try create multiple environments with # the same name. See the 'prefix' setting for more information. #basedir: '/etc/puppetlabs/puppet/environments' # Sources can use the 'prefix' setting to avoid collisions between multiple # sources being created in the same directory. Setting prefix to true # prepends the source name and an underscore to each environment, so for # the qa source the "production" branch will create the "qa_production" # branch. # # The default value is 'false'. #prefix: true # Additional configuration can be supplied to configure how r10k uses Git # and what version of Git it uses. git: # As of 1.5.0 r10k can interact with Git repositories in two ways - by # shelling out to the 'git' executable, and by using libgit2 through the # 'rugged' library. #provider: 'shellgit' # Either 'shellgit' or 'rugged', defaults to 'shellgit' # The 'private_key' setting sets the the SSH private key to use for remote # Git repositories accessed over SSH. # # The 'private_key' setting is only used by the 'rugged' Git provider. #private_key: '/root/.ssh/id_rsa' # The 'username' setting sets the SSH username for remote Git repositories # when a username is not provided in the URL. The username defaults to the # current user. # # The 'username' setting is only used by the 'rugged' Git provider. #username: 'git' # The 'proxy' setting configures an HTTP proxy to use for all Git sources # that have an HTTP/HTTPS remote URL. This can be overridden on a # per-repository basis using the 'repositories' setting below. #proxy: 'https://proxy.example.com:8888' # The 'repositories' setting allows other git related settings to be # configured for each remote repository individually. The value of # this setting should be a list of repositories with a 'remote' key # to identify the specific repository to be configured. #repositories: # - remote: "ssh://tessier-ashpool.freeside/protected-repo.git" # private_key: "/etc/puppetlabs/r10k/ssh/id_rsa-protected-repo-deploy-key" # proxy: 'https://proxy.example.com:8888' # Configuration options for downloading modules from the Puppet Forge forge: # The 'proxy' setting specifies an optional HTTP proxy to use when making # requests to the Puppet Forge. This will override the top level 'proxy' # option or any proxy related environment variables when configured. #proxy: 'https://proxy.example.com:8888' # The 'baseurl' setting indicates where Forge modules should be installed # from. This defaults to 'https://forgeapi.puppetlabs.com' #baseurl: 'https://forgemirror.example.com' # Configuration options on how R10k should log its actions logging: # The 'level' setting sets the default log level to run R10k actions at. # This value will be overridden by any value set through the command line. #level: warn # Specify additional log outputs here, any log4r outputter can be used. # If no log level is specified then the output will use the global level. #outputs: # - type: file # level: debug # parameters: # filename: /var/log/r10k.log # trunc: true # - type: syslog # - type: email # only_at: [fatal] # parameters: # from: r10k@example.com # to: sysadmins@example.com # server: smtp.example.com # subject: Fatal R10k error occurred # The 'disable_default_stderr' setting specifies if the default output on # stderr should be active or not, in case R10k is to be run entirely # through scripts or cronjobs where console output is unwelcome. #disable_default_stderr: false r10k-4.0.2/spec/000077500000000000000000000000001460033767200132345ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/000077500000000000000000000000001460033767200151055ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/empty/000077500000000000000000000000001460033767200162435ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/empty/.empty000066400000000000000000000000001460033767200173700ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/integration/000077500000000000000000000000001460033767200174305ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/integration/git/000077500000000000000000000000001460033767200202135ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/integration/git/puppet-boolean-bare.tar000066400000000000000000000740001460033767200245650ustar00rootroot00000000000000puppet-boolean.git/0000755000175000017510000000000012470664074014073 5ustar adrienadrienpuppet-boolean.git/config0000755000175000017510000000025612452553372015267 0ustar adrienadrien[core] repositoryformatversion = 0 filemode = true bare = true [remote "origin"] url = git://github.com/adrienthebo/puppet-boolean fetch = +refs/*:refs/* mirror = true puppet-boolean.git/HEAD0000644000175000017510000000002712452553372014514 0ustar adrienadrienref: refs/heads/master puppet-boolean.git/refs/0000755000175000017510000000000012452554255015031 5ustar adrienadrienpuppet-boolean.git/refs/tags/0000755000175000017510000000000012452554255015767 5ustar adrienadrienpuppet-boolean.git/refs/heads/0000755000175000017510000000000012452577216016117 5ustar adrienadrienpuppet-boolean.git/refs/heads/0.9.x0000644000175000017510000000005112452577216016612 0ustar adrienadrien3084373e8d181cf2fea5b4ade2690ba22872bd67 puppet-boolean.git/objects/0000755000175000017510000000000012452553372015522 5ustar adrienadrienpuppet-boolean.git/objects/pack/0000755000175000017510000000000012452553372016440 5ustar adrienadrienpuppet-boolean.git/objects/pack/pack-35c321cb8da4f6f78c6a1b611d79ac9f95c9ce25.idx0000644000175000017510000000650412452553372025703 0ustar adrienadrienÿtOc  !#$$$$$%&&&&&'((()**++,--....//////0000000222222234444455666667777889::::::::;;<<<<>>?@BCCCCCCCCCDDFFGGHHHHHHHHHIIIIIIJJLLMMMMNNOPPPPPPPPQQQQQQRRRRRRRSSSSSSSKlV]"2¬IŠÇ!?®×qÁxO¥mðwÒ¼p‹.œ¿s…¸0žÑ'Ë¥¤kënmû ƒ¾IñKIu¥ßD<:Ì݈€<§³œC*6Y0šü8­»´üࢰ‘V¸u™ E[‡<æGš#È{4¼ÍÆlà &îäRÏŠÝ@,..òßô~6<ÿ@ÝyÞ“¿’!gõj²'b‚Bÿ>i:PҌߥàýÁ=&áË´Zã¥÷d2 ~cñ¥J%¡ïlœp¤ê¢©ÙC5îHv²m7~@o’œsR{Iœ/Ȧ^zË‚=ÈÜÕ¯8”Ù2ôjå˜G^½ûó®”ù$ä¶KZ‡Œ‡ú–Ø5ÂÈùtŸ½…ÙÌ‘ß1?Ѥ«]øK M÷€þ¿Óx* y]öº ¥}çß"æä{T´Ô\Ý-xçä®ÿÖ#ãúæÚêyËçË" ;,ÎV3JV'‘… ¢IþÿÝqøSqh¶¼Jå.·&É(ø¼¬kÕ™+ãpÝtÑ0„7>òþ¥´­âi ¢(r½g1Ž9Õc½†š_PlI®WΆe8$Et±îP’—¦G+YD~̃=’RGÃE\YhôqçX²’,I@)¯hš†2zkÛ…ž¤¾¶‹dùN³G‹½i.µð©‚…[bÌ}8¾ÎN½LB£‚5*j!Kæ.nKmWqP6{1êx:ŒOîP²YÍ^WÁŸ¥cÜF€’›ýÛóm •Ð X½adãäŽZÈ Ÿ´0jó˜™"<6* H0Cƒvÿ6ÝŽk­«Ìwøè®Ãb¸ ÖZ0±%økp£@’ }júÎÝŒûÄõÇpÅU¤qø<Í¿EÆggÐßH Ž@]¬Gt{&ØÂN«VÁÜ}‹W½..R˜u–/ÓEÍXºMâo§0*»ja%wm$½™-Î/âåèÄ‚|6r4ÀÝy›iKB”?ÄÞ/(Ê{ yaÙÐ{ÈzèÀ» ™þPÅ.ÐyyÎóEû ß|Eúr†Tss$Žyˆˆf)ú€ôvÁ'‰¦«ÅïO‹³š¡N}+†;¥SEg´qSêK…ãAê¤ûÄÝôÉ`¦µ¹0ÀÝo{iÞžhóümâðYTŠ´\’(¬”¡Šµ Û*×N[&àúý_Õ|-וfh: íªÒŽÝu÷§Üˆ¡š47g¸ôÑ¡ä%a]!€ö<ª3œaŽ?~‚Rê·ñ81¦œÒL[r¡ï÷±QGŽÖ³Ã4H+± m2F¥²ïbáiÖ/ÙÕX¼©ÂóÜÊà§èœw¾Ù~áö£÷vr;ÍgÛ¨µ{;±SOø ”âiÒt™°c2Z ÑœÛn5øä.*Ü™Y@!²1«`}¬®:éf‹KTÏUG3ôì¶_àZV5j€{SËUß½•B-Þ>¶£’lõRKØþÞˆä ¢ûAR¸›»Û‚ZÏpÀHñšƒZ‚¹òß`ƒÇz¶ ßøkI~¥º6èg{Ø!°<;=Ø«å—ç ‡º£M4¸1‡bC5#lÉËÙÎÿ»,;[$v, ¼àûån†Û¨ÐÄ3X\EŒ…ZÜð¹v΃`LþâÆJÞôŒ¤£î£l…©¸Æ™º#ôàÆuh®Ì9S~ÚÝ"Ý ‡çÁ>ª&Ès"£¨,—¯ 7%WÀ«Ûr¡±ŒÊÅ»Õ2 Ÿ«ø×§Œ™T£íý—äY$œKƒ®€ó -•@—v~3–—ì¯y’Lìï<öÍP¹Ù‚QDÆ`NF’Ǧ/Y¢–$B Í d/n•M-„ÄX„ïK3Â)îDX ÖQþÑ;YB3ëʸﯽnD“’R ýë’Cdž^Cúe.õ¾š ЏƒU÷bx_›ç_ y|ʵ´;‘_±Õoakÿû3e bÕwªëÔW™—S„e…)§‰1©/±ÄY!†"¦ÄR’   Ð+Œ+pìµË õ1®¢ 2þ %*» …\ä+Ö @àÿ&üãÒ–”Ô-T%WwjØ9ú É/&“Ñ»¿bÊ ûùÍy&Ë%À3¸$w*9Húm éhàèç2|¡, z[w,¯ á+¡5Ã!ˤö÷Œjay¬Ÿ•ÉÎ%¡rþëÓiRôðÐóõ.£>ç™%puppet-boolean.git/objects/pack/pack-35c321cb8da4f6f78c6a1b611d79ac9f95c9ce25.pack0000644000175000017510000003272412452553372026040 0ustar adrienadrienPACKS™xœ¥;NÅ0{Ÿb%j^;þIACGÇÖö:±”8Áq$¸=¯áÔ3š°¸)”˜½É!¥€¹hÔ%#MM–©ø)Ê(êÜXÌwœËÊ"O–¦b‚L˜T`² M0Ú”lÿüäVŠ4y•‚£¢¤vÊ—$ÅBMÑ'A×Xöo¹Wnð¹pÜáy®ãõÜ7KmóY[=G§õÖx¼joÔä,x”^J‘öm«cð¿"âƒûÌp\ë ¿.><(}ß Q[ëõ´ÑyŸñ^¿aüûí|¹fßš xœmŒK Â0@÷9Å\@™1I›‚ˆ+q!n<Á¤“¡…6•·· îÜ=¼W攀´vF ^š¶mHÔ’U!v½`M¬ÁEŒæÊsÊD=cÄPo°§¡²Ñ-HMmÅ'mQCð•á{é¦ΜáÔßa›9ûÇø\6ëvw@6xBO–`…Ñ,vìKI3\:~f8NÃÀY`{ë¾ô/ßÐ/7‡þå}ÖæÿaA=xœ¥;NÀ0ûœb%j`c¯BÐÐÑq;^'–'ØŽÄñqà è^1o¦WfÐd+2Ê’Ó’Ð — Æ™œ”ÆG23‘Æéò•K‡à’ˆI£•.Îf1Æ ]LÉê@ΰWˆ(þø*­¨# JsvFFÅiÁd­Ò“¿ûvVx5s¯Ã /kîoí<¸o¹¬-—ÜzõûSáþ ³´$¬ÔBÁ#ZÄi9#÷Îÿ’LŸ\W†ëÞw¨ü}sëð !Õó€âËžïç÷™¦ü3¸ëâ{°Ÿ>ŽäÖÆëÕ€l$ž xœ•ËA À;¯ØØ,°°ãÉ“ñÛJÒRÓÒÄçë¼N2}Ë<G”± ½% cÒ­eIĚȣzË–[‡1“ŠÇ`cÒ<1ËèÑaL¥?Rä,’£¿Ö Òà^87iÃ\ës‘:Óº\@Û`œÆh8!#ªŸ.µ÷ügS·úy•TÛê¾yÔý?Ûšxœ¥Ì;NÅ0…á>«‰úÂø‘I,!  c~Œ‹Ä¾8“†Õ“6@wôëÓ‘Î È&æl1¥É%v&dENkESÉ­¯LzÖÃÝw®¨’ÒS ~ô-Ä”rÀ0›ÀÖe…Êë~}ðÞ ÛdìÔ<‘¶ÆŒÚPŒNq jN.r΃?emÞR/\ácåÐày)òz´e-u9J-‡t¿=V–PfF4Á 'Ä!¶}/"ü¯“áûÂp?· :|¶Úàc«;¼§ò™ <¶ÿÑë-cÚ¯¾æjæÜJ«&xâšsvڜƠÿÕì­¶F~@¡Œïƒ PL%T Ðæc'Æ.Ž~ëØ°¬}OžÂêa¸Àã<æÔ{*7¸—@­ûÚ¨_Ùâðaì‘xœŽAN1 E÷9…/@'ž4#¡ ö]r'¶iD3ACz¦ÝדþÓ›»*$ĘH£RÖ…köëj…b]ñ7Õáðz¯çpør«äAatw¬ ü¶ýçþ% Y>Âÿ·À/5øŒŽžä½ÐŠÚFÝ*¯è ¤þÿ ñ!áaxœ¥ÎKNÃ0€á½O1 ²ãW,¡Š ¡²à±Œíqc)±+gzà lþå§ŸYŠ ³ ÉÄE#-²h‰sÖÈà½ó¤upÁ‹j ʤEÒh‹wFÏ2zrº(´g‹ŠŠK! ¼óÚœó¨Ôàk¥ØáñZùéè;ñZÛõ¨­Ì:˜…<³ÞæìCH—°Z.b˜Üb`™ ª7n¢&$‡L01‘lb¢è׈‘JJhÒªð#{z¦Q¹éçÎK×·­ÊãìË^ÛvÖVOøº6–»6.Úœø /TéÇQEø¯DÍDúU ·“Õ–ŠJ‚—xœ¥ÏQj!ÐO1HpÕ !$Ð|ö'ôîÌ쮵è$ço{†^àÁ“Î Úø9Í´NÎ9‹ÚVD¢"m1ùÅFtV}§ÎUÀû·PŒ†™• ÏÐãÑž¼±5‘J/Ù[‡õ̾v^œ·,×Ñ Ëžë6rÍCzz+Ë&ëM V›Úk­°•’Eø_ˆzpio†Q^ü‰¿‰ko÷ÛÇç]ý™RÛ‘xœ•ÏÛI1ÐÿT1 (yìærÑÄ&Éd7p“,É\ÁîÕ °‡'P¢6‚Þ’InÏMò‘vëKÁR‚‹˜cqâÂIÁH¿gÈgåU*ºîqÃLÚQkïtÌÖ |ð9&¼åY©ÃÇIqÀËQùuF|Ö~¬Úëâ‰÷çN|eœ^û`áI:)E­Ufú?b¤úCÄ;µñI°Úã€_ñ'± ÌÑ Žq'ìpÍqÑä/ñ -=X…’xœ¥ÎËmÃ0 л¦Ð5èD(‚v€ÜÚh‘ŒØR 3û§;t‡g]ÄãªiΙ7]BŽ#2°Žº$‚$ ¦¼ {R—j~‚…¨lÊ3¦‰€q ¬q# È‚¬Ž^¶·î¿¹©þg—­ùÏG±¯«b{©«ÔrY§c¨b7?Î!⸮Óä? ¸Üγ˜É¿÷ûd2ñ÷ƯC´âõ¯Ã:€{uQa™xœ¥ÌËmÄ Ð;ULY †á#EQ,e{‰Ò†ñ­ žUÚOzHïÉ`ÖÑkoClÃâ´‹ © O^[ïãâCt«&TßipÈÖ ”-ýÒ‘RÉ+.Ñ»ÌÁ8´yežTzÊÖÌeTnðµñÒáõ^åýìËVÛý¬­ž2Ò~i,o  ‘Lм`@T¹Gá%j.>¯óÇíz9Òx”þÓ µ·^ž;¯ugõ 8ºQÍ• xœ•ÎAƒ À;¯ØÔ€‚bÒ˜öÒ¤w?°È*$ lÿßú~`2\‰ÀÊ]Oã¦úÉÎ8*ƒn3´ËAïÖ¹aFTš&ß;o¥ÂÝ×HÖ@®Àõˆ|k%‡˜sl\ñì2ñj0¦ŸG­ \¤•Rl%¥ÈLÿ#V[õCÄ3½NJ”Öú-f<…R|'>l:D¡ŸxœQJ1Dÿç¹€C’î$Ý â?¼@wÒYœ™%Äû»+ø¿øYª¢¨9Ì\•šT[‚(ž»h§V¤PeN ùÆ­Ár•aÇt!‚ ‰ÄÒCôQ¸1$3¤’5æ¥ØŸ=ûÈÒ3±P†X$kSJØÕ4“fì¼È÷ü<‡{·iÃ}l6ŽÓ=_ïj•uþê×Ë.Û×ZÏýÅàB¦XÜ“'ï—Ý·yÿ»by³q±‡'»í˜çã‡ü6®k+“xœI 1E÷9E]À&ó"Á…àº:©hÀHWßßÖ#¸|Þ܉ e¯"™(j§ ÇP•‰F¡O¹h›Ý´Ô±ˆ;Í ^‹c ¡h¯Èz´Õ%™UÖ‰Ðgå’3®/pç×ÒáFLîú¼ÀyýÒ€ÿøúœ°½‡¼L8¢ÖÙ(m‚“tRŠcå¿/Ä£7&ØçÆÀ´ñ&>ËÚJP•xœKjÄ0÷>….¡?rK‚r€d— ¨¥VÆ$¶ŒGsÿñ&û!Û¢ofŒKkj ©Zbm(‰%´Y²X(Ò´çöá+R(šçìÁ‹šŠIm Yͧ†€^}úó5gó•ý)` Bžy&–RZQŒ5kmÊ÷qí‡ûºö5ßÜDzý,›{}}ßïûnã7ëíRúúæ#@ Äâ^ L']—1ìëéÓŽo{:Ô-ÛèÏßð_si¤¤xœ31…‚Ò‚‚Ô ×’ï& L_æ®ÉâR×{¦ÙIA|Cë¯"ɶâb•>áJÑŽŽ46Í_+„§xœ31…‚Ä¢ÄÜÔ’Ô"†Ëœóyþ—ìñRvûWqæ+.›@Ô夕T2ôÞ›—ñùO!]¶Ì™Äª±éX¦xœ340031QHÊÏÏIMÌÓ+JbÈúvd{Å¡É )¦xœ340031QHÊÏÏIMÌÓ+JbPgŸØÊ³Èóßÿ»…?‚ 3¶íñzZû©ü§ xœ340031QpöpôswõñwgvÈŸ4§8¨ÚsŽþ‰eqU§›lOÜ1„(òñtvõ veØ­c­R¦Ã½GžãÁï§ym·W\˜U⛟Rš“š–™“Êp¬4cäËàº[w•î.h~Ðn•TU«£‹¯«^nbQvJ~yCIµÚC~¢«ÃÞ©íß+®§4ÃÄr2“„œþÛeZH^ê¹/¹´ÿÁ߃¶j+ôFåë€6xœÛθqÂbåÇ¿žÝzUyúùi%nksaÆ^aÂ4 ÷¤xœ31…‚Ò‚‚Ô† ÉFQœçÜÎ3ýñDOKàÎÌHE$ñ 5®¿Š$ÛBŠ‹Uú„+E;:ÒØ4Sø:§xœ31…‚Ä¢ÄÜÔ’Ô"C±>Ë«É{™ÚfÅäx® ?ׯ”jQS”_ZTRÉÐ{o^Æç?¹>D†tMØ27f«Æ©6¦xœ340031QHÊÏÏIMÌÓ+JbHuœùSAø‘Wïì/ÿ>Ù«3ÍÎQ¼ë‚!xœÛθqÂb‘)2 »¶*ÜÖºî­öà×ßø«5º×­e ž¤xœ31…‚Ò‚‚Ô†Y&æé;¾\\øD51V±á›Í*ã^$ñ K7É¿Oz˜yMÿæÕˆ=+É~¾sê‘ø³§xœ31…‚Ä¢ÄÜÔ’Ô"C±>Ë«É{™ÚfÅäx® ?ׯ”jQS”_ZTRɰ«ßìEºdõ Å 6Ö¶7V?þœ«Œ1î¦xœ340031QHÊÏÏIMÌÓ+JbX±µÚzc°ýî)2/É”Ìä˜[q¯xœ31…Ғ̆۵ŽvæÝ`QK ÞqÿÚå†&û[¸ß Ǧxœ340031QHÊÏÏIMÌÓ+Jb¨zq`·ÂÌGõ.TVžûìú›û~?¡Eî„fxœáÿ··…§èœw¾Ù~áö£÷vr;ÍgÛ‘™ïÐ{î…xœáÿ··….·&É(ø¼¬kÕ™+ãpÝtÑ‘™ÇÆR§ xœ340031QpöpôswõñwgvÈŸ4§8¨ÚsŽþ‰eqU§›lOÜ1„(òñtvõ veØ­c­R¦Ã½GžãÁï§ym·W\˜U⛟Rš“š–™“Êp¬4cäËàº[w•î.h~Ðn•TU«£‹¯«^nbQvJ~yƒóv5Ž“?ö¬É¾:SûqÁ„»%M €@!'3‰!üàü¥ÉwÜ&Íþ{ûs.ÏÔ ¼{hI|¡xœ31…‚Ò‚‚Ô†Òiú—]ÏFìòe}”¿Ü@kwV¢ˆ*Ä_ G£xœ31…‚¢ü‚Ô¢’J†¬³wßfajbŠ:_pÀãã,Éæ¨&Ûl ¶¦xœ340031QHÊÏÏIMÌÓ+JbØÿ *Ì4«¡:øtèý½StïÙÉþp¿ï>xœÛÎ8‰q¢ª©§Ú»'A绤ï:èèéM`ütÿKÙÄKùDµÉž³BÇrBT¹:ºøºêå&e§ä—ç1è1oWã8©ñcÏšì«3µL¸[rÑÄr2“ÂÎ_š|Ç­aÒì¿·?çòL½À±°Ž,z¯xœ340031QðÍO)ÍIMËÌIeàvn·yæ>KùDµÉž³BÇrBT¹:ºøºêå&e§ä—ç1ˆ™Õ7ŠU¾ª°êñ°)Rðlœ‰(äd&1$Ë =8b­zÖ²¢âBÎê©W¯®<µJ¢)ÑévxœËÏŸ-~pþÒä;n “fÿ½ý9—gêÞˆ½¦ 7¡xœ31…‚Ò‚‚Ô%¹gŒOªC¶\‰¹«[ñüIïºÿ×$Ëpæ£xœ31…‚¢ü‚Ô¢’J†m‹'å|• òf¾ñï^ÇžþE¿ƒñ¦xœ340031QHÊÏÏIMÌÓ+Jb`Ó2‹4˜õÇbíî-,Ú01lGéLööæ¯xœ340031QðÍO)ÍIMËÌIe(Xì0‰§6ë×¹»=¿|=^p4”o‰!DU«£‹¯«^nbQvJ~yC€˜Y}£Xµá« «ÿw›"ÏÆ™BNfC²œÐƒ#Öªg-+*.䬞zõêÊS bR-®xœ31…œÌ$†d9¡G¬UÏZVT\ÈY=õêÕ•§›: ªxœ340031QpöpôswõñwgvÈŸ4§8¨ÚsŽþ‰eqU§›lOÜ1„(òñtvõ veØ­c­R¦Ã½GžãÁï§ym·W\˜U⛟Rš“š–™“Êp¬4cäËàº[w•î.h~Ðn•TU«£‹¯«^nbQvJ~yCIµÚC~¢«ÃÞ©íß+®§4¦41bÜÏã Ÿ÷»fN6ªzxÜ~¿£Lß/ PÈÉLb˜"³°k«Âm­ë~Ñj~ý¿Z£{"[\šÌ }u½Å™›F_²žÎpÛûûóº)?u=eÇî€nxœáÿúúÇ#ãúæÚêyËçË" ;,ÎV3JV‘ÛÿS,©xœ340031Q(.HMŽÏHÍ)H-Ò+Jb°Ää~Ø5&2ãKáóˆMõ½“t°5¸L.ðg,N­ÅÙ:Ø{}€±c À-z¯ÉE°Am÷n`mjÂı?.9HÔËi€­¢­µ“ªý®‚ŽJA^£»^­¦iJô‘2±¾[ Ÿª°zÈÖ›¼Øœ ©Rf àéud/«´ŽZWB7è ÖCwž¤íÌ9yŽlº%‚mã¤=©†Cô\ñG@_Tâô»@"Ò‹´@V,p›Y±TÏYùûXâ9ÝíÒ¼Ì6¶;¬·ù]VfÛ\ªHóÜgùÝ$ñÈzw~f@ž££&QÑå­ý„ Žjn¹G¦uGèìy#FàÈï9ÌÇ ‚Ö¨÷u<Ö¿ì$J}LL¼ˆ¼x•±N1 †÷<…·,ô®°*¨Ì ÄŽ|©ÛåœÈq*OO®­Éùÿ~ûc Ú³¸“@¼RŠ„lÍ‘¤„Ä`×Ým·^‰»¶«ú$møñ4 ›%úPÒHêJàPT0vLzoM ޏP‹etžà¦[[cJG” ìÓR œdľ²¤L¢ì[ßkÍ™tÊT¬ÙQq²ÎËm6Ï/[³¥}`•Jð³ÿ/ PÀW ò¹‘ ÆBXã/”#ÆÚšà­Ó¾|wf.7%Uq'‡³F¯šË]ß‚ú:t.ý¢GgW}>r±Üîü$§ÍÏ?ÃßñkœmoxœÛüƒy‚‹¡žÞF­IŒ%˜\¹“xœµVMoÜ6½óWL’Ã:‹õî]@‚¢M ø´.z(id¦H…Þ¨Eÿ{IíZvARbá5´3oÞ›/jŠÓÄá²±V³4â݃#Äî•a’mˆRÓbE“träÀΓ4MÎNì‚bO½u4eH óÄ~/ij±“W^\.GˆÅbž´µžõL]Ô‘2€5ˆ•Ýé8¨v 1GyËd¬¥V+ss"#°TfqYÝ©.ñëTß«6ê°§ëAyj£v¼W°0‹£Ò´L§™Â Ã9  ÊšÃ"“–-S³N\åDzö_?ËqÒ,ÎÊ·Ûk$•žŽlBÆÝn+!Çñ§¨Ó¦¤ðpâuX€7Ŭ¤¯ªÔÞð1龨䑽ù-u¶˜á·,ô¢2P»£üu'½{OÁE~û¥áh×üyÒÖƒ™‡~ÑärŠûñ”¼ªú¹ËAi9û–^_™NµHCÉc‚¥{X&‚¶-{/ÝüzåÛK+ØÌÞÐ_#ïø± ;:2¥ùoñgÓ­-Ù»¨`[ƒ‰©½V-wuãXvO«[Üž÷!©Ód¨“ÈÔè¯Ll)E’ŽfÈ)|FèUØx´[š מ7üØ7ˆv¶½åP£ÂWJúý¢mPªÌ²„£î)ɽÔ>iþ}e—[br =¯R à *ÓÄÖ7Ð6ÛXÜMÝ̵ÂíS …øx».E@Sn°,ššrè´Jè§TW'Ùâ¯$?Õyc¦d.v”×YÞJ%€i'¡²÷ ÙðQÙh®ZqDé[ŒG§´gþ‡Ê76 ç´$ÒÜ)ÄVþ5R±s×k.‹Oõ¥ƒ‹_iôò†ÏûóDäÚ<ÜÈ~S- ö=:HÌff¿ÙÑR§9Ý(øÐ§h&ÏJ¯´¯ÍØ=Zšn€ªn(š‘Ö+jÅ+AØ©ž„y¾Œ±»l¹’‡²û •go6X¯2±) ÿ ñ‹ÅMÕ†ûkŒh‹yˆ×`k;®haòÕáp£Â›}kǃìœB¯ ÜØÃôð-#¹+ïÑçÁIdÍ}#À!;{!®ú<|ƒ¼cäƒ}È5ÅDAt‹÷ˆkl e†GÛEb´E áîÆ»²ò¦  üû›c6P´K ¿âvd°~wdË«Ñý T­b4*è¾(Rvë?¤|¾ÒàáÏ,‡‘MÒ¸ŽÞ¤|ÍÎ?‘w°Áˆ<“E Ä a †ï[…lábÇ:+42\ÐSI¡Ê€9Ù#.ÚÂä’ѹ¿ì½Ô'rü„⹄xœuUÛnÛ8}÷W š%€W}7¶HÝn E‚6èK´8–¸¥H•«Þmÿ½3$mËq‘47Μ™9tø=*‡P q0¼œÐ…m5›5Zx÷I¾XÜÅbñÁZÂÀŸ'ºÀ, ˆ&D¡aU w1ëôVFpg‚Õ¶evºF­-ÿÁÐ)m½ºlÎÊ¿0 ë•A.†®Î⢼#q}á¿Á0v"€òl‰—pã³ ø¸ú› 6Y¸µÑ%è·Ù´Dc‡¡±¦Á!€]zÑ#•æ¬ÏFÑP8çw¾»|ÖäI€÷Cth˜>2'<=×üÍMTŒlM‹ç›‹‚ª:×Z™FGI˸)zHêòAë Ž{ó’CÒ'’3£/x÷|ÇtXKÉÑ…Sسê-ݨ¾-]f²S•1õÃèT ©®IÔ3_;º«½}¢æì †'xœ;)pƒ{B@@n~JiNª‚^I~N~z%—(+8æä(”%攦+”d$–($¥*$ççg¦¤¥¦(¨—•¦ª+$U*”¤–(d敤å%æOÞÌèkEHwP!íߘb$üò‹rs2«RœòósRó †M®a^Ï ²YGlÂf–@æÉר)ßIÏã€xœ»Áý–Ë57?¥4'U! ´  µ$‚KA*Z’™䥿¥p0ª:++¼••S~~NjbÞF_V2Æ­.µ xœUÍ¿ € €ñýžâ6G÷£©'h ÚÂôÁÌN…?é4ÿøø„÷ê…Q¥š—ùеø —m l¢ù»NFÌÊ…EØ`rÆá¢á¢þ®±ûc3@ôÑ†êø¡‰hlïŽNøÚ5Pîˆpxœ;)ðQ¡(µ°4³(UA½ ´  µ$¾B¿´$3G?)??'51OkB€\f^rNiJªBXE„•U(P…••DÉä,v³Éר‡WÑï‰Bxœ;)ð‚«>7?¥4'U! ´  µ$‚KA*Z’™cÍ•š—ÂÅ…ªÄÊ $ee唟Ÿ“š˜‡ÐáŸW’Ÿ“Ÿ^ÉRPPVpÌÉQ(KÌ)M-V(ÉH,QH,JUHÎÏ+ÎLI-JMQP/)*MUQHª„š¬™W’Z”—˜S“AC® “WG¢Ä/3'µç‡Qs ò7­NgeX• È׺b¡»Ì —"­jµyÈø“0ϧL`¬ÛÌ–f’±Œ@[÷ 3¹â`W»OØdЊâƒÃnâãŒ&Z£‚“Ƕ’1^RãÊÝn%˜êmã†j½Fuô\»ÝxúŸ×!$Yf Qóß°›ì¹nn—^a}ß∠äd¹`Ý{+kÓ¥x¼7òMÔ¶`Óc«Ó›Œ^àñÏl¡h# <(m(ÓxŸm Ö°Ãâú§*†Ç \/­­;nÊ+>ûÜÒ¥¬O¶g3Ò06ݶø K7á†?xœ5Ž1NA E% Š4HœàŸ€‚“P83^ÆÂkf<‰–†+p¤”9=»Ò¦ïÿ÷ûü÷ô3}¿Z9P¨CÝ;ë‚Ì“gˆ7#E,•;.ERA"ÃLŸ ó6“Ê—ØNîÊd‡3éXI±]!˨ÍÏ’¹ud™&ICãïE:Òèqôy#*·XÖ˺jQ(÷} qÛ&ÝU)1N{ãvM)Æš¸ ÷ëëÃíñ݈Tëà xœÛüƒy‚«¡žžÁF퉌)j 4xœÛüƒy‚úF퉌Ý »xœ+J-,Í,JUP/(-(H-ÉIL*Ž/.HMŽÏHÍ)H-Ò/JÌN/I,Î.Vwì³xœ+J-,Í,JUP/(-(H-ÉIL*Ž/.HMŽÏHÍ)H-ÒÏÍO)ÍIER3´,xœ‘ÑKÃ0ÆßûWó!Œn´0AÁ÷½Yz[«YR/ :¶ýï^Ò–‰K›¯ßå»ã—–ð-Ô„ \ƒjU¡nDF}µ Mƒ~ÒåºßOÖÖj”FdY‰NQ½FX¤HQ,ºLQ<¶!(m Ñð t6Â`gÕëPô1‚Ç\X¿ ò¼Ý.WJKçrƒïç~˜ßÃyVlä{ žŽ¡hEDl÷èØñ*à9G©*&‚£¤í‘›jWÙ KسE¸:ð^^›ø)ü ¤Ksí9âÕAæ)>äð(ïÌç)›rhʬ_—°‘ÚE¶NEÒHg,;cãk0%nÆ`jý_Ð4õ¯¤)|U$±j-€uÊfÊ:cáîúöæ7ÂäSx{‘ô ~4íïþI §œûIÖWHd hvhüStßÀùù.âÓÎ*xœm‘ÉŽ›@Dï|EYc7‹’( fñÂbƒáÆÒŒ¡hOÇ|}f’S§ÒS©TR‘ì†s D¨J¢"bµàU>¿ Wœ.3)-° ¯`– ‚ªY!+}öä¤mkÊÑ´p¾šÃOWâ b¨q g|+kús$-¦UÝ•cÝÕ#Òû¼ÃôàÅ¥¬ð«•Á T!ä¸#¾ãtÄÿ _>¥Öƾåƒ`c¹(<?œ;ãa¬I÷ ¬îá[ 'Í-ßÀ»0‡s^_ÖéÀêî+§ìÄýÈùVHÈ}/ ¢´],ʾ¤ŸdN†’ãꃑki¨D–vØD\ì"[C›†ómùn§dHU{WÅ- o;Ç|/»©è™«sC#Ú ²B¯§¦Øê8Ì`T¢6T ¥S¯„™²MlÓËÈ£}<ùòÄöcè„A³:Ýb.»yÌO¼,2×ÁDÚÀD½±gŒGIxTÖcS¬.«ËæœÎÔüÔ:Ð/Ø9xœõ¸3Μ»Ä¡ü暌=ì©1Þ6k¹â§bSWä—yìŦñ‡vÕS,lOƒ’Hy|Q{Û2÷õýRêáŒsã¬úJV³l;,ÓДÙ2޶À¶äŽÒ%)=êÓÈ{¢Yµ“ÝØÏýÎ X‚.ã$$—õ°¼•‚xÕßÎq¨M‹¿çƒä‹¡J‰z ‹VtQÐ-«>Înµ]ûe¤Ä›ïÜw–4Ñßë wý¿ãÿ¼ÖœÆ+xœm‘K“š@„ïüŠ9&e­Ž(¯­l*âˆDä¡Þž:°0ÂʯÏnrÉ!}믺ººª«¨ 1"œ_E… ’ë RE ¼*štMäˆ@B$åJ•k"ðgMA\Ýï98IkcøÒÄÓ/—Ò k“S¼ŒFø‘æüW[Ý)Ïr–¶9Ë[ÞÛ˜QþLg’4ç¢6/P…P\z£¤¥ÿ”¾|I7±e;àhay¾kþáB@›6¯Ø+ÀìáàÉŽÄØÁ"èÄ1OUðmAš>gßãs/eü3ç`¯ªn-xçõëd’Ö)ÿ"ãªI!?˜±ŽŽR„õÃúðÌ­¡jmêCR·2ýK8ê-då:*p˺½ŒüSë]¦)7ŸÂÇVâÇÛR#VéÈÅy8¹ÆÑÓø1iݶ~F½ÓÚûÚ# iý˜¨ud‘ã`۽؊ߔF“®Õ:ã”­ûý¾P¼ÜNsµñ‡õ9À,ª½2WO9ÝU«Å@CHvrÚØ®pNå\#U{ ÏìÔû[ö ¶{#š1*Äv2[f§÷Câ:’ɇv߉î–¹™"lÌÝpª‡îû6Ó5íüUçŠÊ—Áe9³î¡{†õÙ(OxÈ÷åú#™mVȨ²Þ}Ê¡°´4©õnç^ Zv/+Ñ~•÷¾~¸_Ñ×iB5‹¨|ÇÎPß3ÝZÄÅÜ”ñ›ðD…ÿ÷zÓ^üïøßâëÛŽÁ(xœm‘Yo›@„ß÷WðØÊJ¼\ÆTMU0s˜Ë†Þ`Y,Çÿú$m;O£‘f4ÒGò#F”a”炘a>Ki%ðÌ%¼âK6…Œ3—+$#@ç3ˆ\. 4«ö>Ã/WáQŠ¡Á-s¨qN˜ŸUCä‚iÝ´ÕØ´ÍH‡ìüÜbú‹ay‰“×<ÇIÌ”  ÄgœøßàÓ—TÝ0]Æ7|fo®rˆBýOb<Œ i0F{õ fâ>;d¾n´tšözÿ@èHUU©C ¬­–¾¢KWC|§­¥A ·K¾xKû3MêÄ‘Kg¸Æ+h6Kåú²''‹^U/=zs-½×}4±Ó&çT-6ãÝ_$ÉtžC"¨2u»à`æÝ‰×|³v@RzbTLi¯¤î&Œ;6ìûÒ½5Žm#‡g-F<³JÊZ4|hHrÑP¬ðÂ8ÝA¥EŽRc–S#=ä®Ý>ÿÔ—3©­vË/½ «„hc¿Ûýޤå­L\ïÕ‹Í¢ê£\ÛÊódú†`Çs_9[÷2•oB¬FEŸ¾j±—­$ªÝ—öVx1…Ãqïl¸Ù¾múb6 PEœÎ×¼ËÉq—Mr²pwbäû™æq¿*æ¡ÑOµDDx£E§öù&› “4ì‚âIm}êŽd«­·¸Î®çd¾Ó¶:—ôÎKÔ ÙŒP}YÒÕà›×­'Z›¤Ò#?Æ_š–ûü?–Û·Ér5Ã!ˤö÷Œjay¬Ÿ•ÉÎ%puppet-boolean.git/info/0000755000175000017510000000000012452553372015024 5ustar adrienadrienpuppet-boolean.git/info/exclude0000644000175000017510000000036012452553372016377 0ustar adrienadrien# git ls-files --others --exclude-from=.git/info/exclude # Lines that start with '#' are comments. # For a project mostly in C, the following would be a good set of # exclude patterns (uncomment them if you want to use them): # *.[oa] # *~ puppet-boolean.git/description0000644000175000017510000000011112452553372016330 0ustar adrienadrienUnnamed repository; edit this file 'description' to name the repository. puppet-boolean.git/packed-refs0000644000175000017510000000152012452553372016176 0ustar adrienadrien# pack-refs with: peeled fully-peeled 157011a4eaa27f1202a9d94335ee4876b26d377e refs/heads/master baa30e4d34b83187624335236cc91ecb18d9ceff refs/pull/2/head f256d7962192bf2ec4fd7b2a768ff50661b64c77 refs/pull/2/merge ddf05a0b087206d4f863b42061973d5efc0f8856 refs/pull/3/head 409029af689a86327a6bdb85039ea4beb68b64f9 refs/pull/4/head 00a56df077d218bc1b708b2e059cbf7385b8309e refs/pull/4/merge c87322a3a82c97af20372557c0abdb7213a1b18c refs/pull/5/head 64e8f4d6c6ab54d5af458a0979f6ce7c2ddb3d59 refs/tags/0.9.0 ^3084373e8d181cf2fea5b4ade2690ba22872bd67 799b694b42943fc4de2f28ca7b207961d9d07bc8 refs/tags/0.9.0-rc1 ^204df780febfd30e782a0d795df6ba0aa57de7df 1f24e410b6184b5a04878c87fa96d835c2c805f9 refs/tags/1.0.0 ^14cbb45ae3a5f764320b7e63f1a54a25a1ef6c9c b231ab607dacae3ae9668b4b54cf55470233f4ec refs/tags/1.0.1 ^01d127cba5a4046beb6e6dfb0b83be49f1014b49 r10k-4.0.2/spec/fixtures/module/000077500000000000000000000000001460033767200163725ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/module/forge/000077500000000000000000000000001460033767200174745ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/module/forge/bad_module/000077500000000000000000000000001460033767200215675ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/module/forge/bad_module/metadata.json000066400000000000000000000000171460033767200242400ustar00rootroot00000000000000I am bad JSON! r10k-4.0.2/spec/fixtures/module/forge/eight_hundred/000077500000000000000000000000001460033767200223055ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/module/forge/eight_hundred/Modulefile000066400000000000000000000005011460033767200243110ustar00rootroot00000000000000name 'branan-eight_hundred' version '8.0.0' source 'https://github.com/branan/puppet-module-eight_hundred' author 'Branan Purvine-Riley' license 'Apache License 2.0' summary '800 modules! WOOOOOOO!' description '800 modules! WOOOOOOOOOOOOOOOOOO!' project_page 'https://github.com/branan/puppet-module-eight_hundred' r10k-4.0.2/spec/fixtures/module/forge/eight_hundred/metadata.json000066400000000000000000000007461460033767200247670ustar00rootroot00000000000000{ "checksums": { "Modulefile": "1e780d794bcd6629dc3006129fc02edf" }, "license": "Apache License 2.0", "types": [ ], "version": "8.0.0", "dependencies": [ ], "summary": "800 modules! WOOOOOOO!", "source": "https://github.com/branan/puppet-module-eight_hundred", "description": "800 modules! WOOOOOOOOOOOOOOOOOO!", "author": "Branan Purvine-Riley", "name": "branan-eight_hundred", "project_page": "https://github.com/branan/puppet-module-eight_hundred" }r10k-4.0.2/spec/fixtures/tarball/000077500000000000000000000000001460033767200165265ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/tarball/tarball.tar.gz000066400000000000000000000007001460033767200212730ustar00rootroot00000000000000‹ƒö“aí–ÉNÃ0†sîSXéï–zã¯`R—â$JœŠE¼;N“.€h¡’ƒPý]F²'žIþÌx`y!$9k+z‹ëíÀ 3&%Å’„±à2ÜjQÔ6VÕ.•Zgs³úÞϹ-ÎÞckÿ 0¹m«JÛE–k_1Ü÷ŒПNŒ9&Rb§?RDùJhŸ3× nÊôì~‚É_g˜mÕ\Yš²ðãXý»¦¿«&\ý3JH¨ÿ1xÊèxbãZÁ¥ûw*Ïã‹n'ÏR]4ëÍ«J¥K ®û@ ê]Vºn²²è\0D›Õ¦5FÕÏÝꇛ²­ÓõyKk«Y’è'eª\ô4Éà …÷/ýsݤuVÙ!ćÃTk—e½É<ž¼…æõ[`ÒT:õ;þpþëê_ ×(&\Ò0ÿLt±Êê²0º°®‹Cïx"Gç?ºÕŸ"ÔéÏ óß(LA×:ÁçŸ 4Ò3aèÿØgŒ£õ¿ßÿ©ìú¿`¡þGaПúŒqŠþýÇ`ПøŒqTüeþ õ?î²·îÒ÷} œïí°¦/r10k-4.0.2/spec/fixtures/unit/000077500000000000000000000000001460033767200160645ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/action/000077500000000000000000000000001460033767200173415ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/action/r10k.yaml000066400000000000000000000001731460033767200210030ustar00rootroot00000000000000--- cachedir: /config_cachedir deploy: generate_types: /config_generate_types puppet_path: /config_puppet_path r10k-4.0.2/spec/fixtures/unit/action/r10k_cachedir.yaml000066400000000000000000000000411460033767200226170ustar00rootroot00000000000000--- cachedir: /config_cachedir r10k-4.0.2/spec/fixtures/unit/action/r10k_creds.yaml000066400000000000000000000005031460033767200221600ustar00rootroot00000000000000--- git: private_key: '/global/config/private/key' oauth_token: '/global/config/oauth/token' repositories: - remote: 'git@myfakegitserver.com:user/repo.git' private_key: '/config/private/key' - remote: 'https://myfakegitserver.com/user/repo.git' oauth_token: '/config/oauth/token' r10k-4.0.2/spec/fixtures/unit/action/r10k_forge_auth.yaml000066400000000000000000000001241460033767200232020ustar00rootroot00000000000000--- forge: baseurl: 'http://private-forge.com' authorization_token: 'faketoken' r10k-4.0.2/spec/fixtures/unit/action/r10k_forge_auth_no_url.yaml000066400000000000000000000000561460033767200245640ustar00rootroot00000000000000--- forge: authorization_token: 'faketoken' r10k-4.0.2/spec/fixtures/unit/action/r10k_generate_types.yaml000066400000000000000000000000711460033767200240760ustar00rootroot00000000000000--- deploy: generate_types: /config_generate_types r10k-4.0.2/spec/fixtures/unit/action/r10k_logging.yaml000066400000000000000000000002521460033767200225070ustar00rootroot00000000000000--- logging: level: FATAL outputs: - type: file parameters: filename: r10k.log - type: syslog disable_default_stderr: true r10k-4.0.2/spec/fixtures/unit/action/r10k_puppet_path.yaml000066400000000000000000000000631460033767200234120ustar00rootroot00000000000000--- deploy: puppet_path: /config_puppet_path r10k-4.0.2/spec/fixtures/unit/puppetfile/000077500000000000000000000000001460033767200202415ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/argument-error/000077500000000000000000000000001460033767200232125ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/argument-error/Puppetfile000066400000000000000000000001511460033767200252470ustar00rootroot00000000000000mod 'branan/eight_hundred', '1.0.0', :git => 'https://github.com/branan/eight_hundred', :ref => 'master' r10k-4.0.2/spec/fixtures/unit/puppetfile/default-branch-override/000077500000000000000000000000001460033767200247355ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/default-branch-override/Puppetfile000066400000000000000000000002261460033767200267750ustar00rootroot00000000000000mod 'cd4pe', :git => 'test@randomurl.com:something/some_module.git', :ref => 'expected_ref', :default_branch => 'here_lies_the_default_branch' r10k-4.0.2/spec/fixtures/unit/puppetfile/duplicate-module-error/000077500000000000000000000000001460033767200246255ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/duplicate-module-error/Puppetfile000066400000000000000000000004301460033767200266620ustar00rootroot00000000000000forge "http://forge.puppetlabs.com" mod "puppetlabs/stdlib", '4.11.0' mod "puppetlabs/stdlib", '4.12.0' mod "puppetlabs/concat", '2.1.0' mod "otheruser/concat", '2.1.0' mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :branch => 'docs_experiment' r10k-4.0.2/spec/fixtures/unit/puppetfile/forge-override/000077500000000000000000000000001460033767200231605ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/forge-override/Puppetfile000066400000000000000000000003161460033767200252200ustar00rootroot00000000000000forge "my.custom.forge.com" mod "puppetlabs/stdlib", '4.12.0' mod "puppetlabs/concat", '2.1.0' mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :branch => 'docs_experiment' r10k-4.0.2/spec/fixtures/unit/puppetfile/invalid-syntax/000077500000000000000000000000001460033767200232135ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/invalid-syntax/Puppetfile000066400000000000000000000001151460033767200252500ustar00rootroot00000000000000mod 'branan/eight_hundred' :git => 'https://github.com/branan/eight_hundred' r10k-4.0.2/spec/fixtures/unit/puppetfile/load-error/000077500000000000000000000000001460033767200223075ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/load-error/Puppetfile000066400000000000000000000000271460033767200243460ustar00rootroot00000000000000require 'a-shrubbery!' r10k-4.0.2/spec/fixtures/unit/puppetfile/name-error/000077500000000000000000000000001460033767200223105ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/name-error/Puppetfile000066400000000000000000000001071460033767200243460ustar00rootroot00000000000000mod 'branan/eight_hundred', undefined_lookup_function('eight_hundred') r10k-4.0.2/spec/fixtures/unit/puppetfile/valid-forge-with-version/000077500000000000000000000000001460033767200250745ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/valid-forge-with-version/Puppetfile000066400000000000000000000000361460033767200271330ustar00rootroot00000000000000mod 'puppetlabs/apt', '2.1.1' r10k-4.0.2/spec/fixtures/unit/puppetfile/valid-forge-without-version/000077500000000000000000000000001460033767200256245ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/valid-forge-without-version/Puppetfile000066400000000000000000000000251460033767200276610ustar00rootroot00000000000000mod 'puppetlabs/apt' r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/000077500000000000000000000000001460033767200233775ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/Puppetfile000066400000000000000000000006511460033767200254410ustar00rootroot00000000000000mod 'puppetlabs/apt', '2.1.1' mod 'puppetlabs/stdlib', :latest mod 'puppetlabs/concat' mod 'puppetlabs/rpm', '2.1.1-pre1' mod 'foo', git: 'this/remote', branch: 'main' mod 'bar', git: 'this/remote', tag: 'v1.2.3' mod 'baz', git: 'this/remote', commit: '123abc456' mod 'fizz', git: 'this/remote', ref: '1234567890abcdef1234567890abcdef12345678' mod 'buzz', git: 'this/remote', ref: 'refs/heads/main' mod 'canary', local: true r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/Puppetfile.new000066400000000000000000000006511460033767200262310ustar00rootroot00000000000000mod 'puppetlabs/apt', '3.0.0' mod 'puppetlabs/stdlib', :latest mod 'puppetlabs/concat' mod 'puppetlabs/rpm', '2.1.1-pre1' mod 'foo', git: 'this/remote', branch: 'main' mod 'bar', git: 'this/remote', tag: 'v1.2.3' mod 'baz', git: 'this/remote', commit: '123abc456' mod 'fizz', git: 'this/remote', ref: '1234567890abcdef1234567890abcdef12345678' mod 'buzz', git: 'this/remote', ref: 'refs/heads/main' mod 'canary', local: true r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/000077500000000000000000000000001460033767200250475ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/apt/000077500000000000000000000000001460033767200256335ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/apt/.gitkeep000066400000000000000000000001171460033767200272630ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/baz/000077500000000000000000000000001460033767200256235ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/baz/.gitkeep000066400000000000000000000001171460033767200272530ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/buzz/000077500000000000000000000000001460033767200260415ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/buzz/.gitkeep000066400000000000000000000001171460033767200274710ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/canary/000077500000000000000000000000001460033767200263245ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/canary/.gitkeep000066400000000000000000000001171460033767200277540ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/fizz/000077500000000000000000000000001460033767200260315ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/fizz/.gitkeep000066400000000000000000000001171460033767200274610ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/rpm/000077500000000000000000000000001460033767200256455ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/puppetfile/various-modules/modules/rpm/.gitkeep000066400000000000000000000001171460033767200272750ustar00rootroot00000000000000This only exists so the directory can be committed to git for testing purposes.r10k-4.0.2/spec/fixtures/unit/util/000077500000000000000000000000001460033767200170415ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/000077500000000000000000000000001460033767200210075ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/000077500000000000000000000000001460033767200232445ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/expected_1000066400000000000000000000000001460033767200251760ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/000077500000000000000000000000001460033767200264305ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/managed_symlink_file000077700000000000000000000000001460033767200346662../expected_1ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_allowlisted_2/000077500000000000000000000000001460033767200325445ustar00rootroot00000000000000ignored_1000066400000000000000000000000001460033767200342450ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_allowlisted_2r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_expected_1000066400000000000000000000000001460033767200317320ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_unmanaged_1000066400000000000000000000000001460033767200320700ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/unmanaged_symlink_dir000077700000000000000000000000001460033767200355472../../managed_twoustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/managed_symlink_dir000077700000000000000000000000001460033767200322712managed_subdir_1ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/unmanaged_1000066400000000000000000000000001460033767200253340ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_one/unmanaged_symlink_file000077700000000000000000000000001460033767200316322expected_1ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_two/000077500000000000000000000000001460033767200232745ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_two/.hidden/000077500000000000000000000000001460033767200246055ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_two/.hidden/unmanaged_3000066400000000000000000000000001460033767200266770ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_two/expected_2000066400000000000000000000000001460033767200252270ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/purgeable/managed_two/unmanaged_2000066400000000000000000000000001460033767200253650ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/subprocess/000077500000000000000000000000001460033767200212315ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/subprocess/runner/000077500000000000000000000000001460033767200225425ustar00rootroot00000000000000r10k-4.0.2/spec/fixtures/unit/util/subprocess/runner/no-execute.sh000066400000000000000000000000761460033767200251550ustar00rootroot00000000000000#!/bin/bash echo "This was supposed to be non-executable..." r10k-4.0.2/spec/integration/000077500000000000000000000000001460033767200155575ustar00rootroot00000000000000r10k-4.0.2/spec/integration/git/000077500000000000000000000000001460033767200163425ustar00rootroot00000000000000r10k-4.0.2/spec/integration/git/rugged/000077500000000000000000000000001460033767200176175ustar00rootroot00000000000000r10k-4.0.2/spec/integration/git/rugged/bare_repository_spec.rb000066400000000000000000000005541460033767200243720ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/rugged/bare_repository' describe R10K::Git::Rugged::BareRepository, :if => R10K::Features.available?(:rugged) do include_context 'Git integration' let(:dirname) { 'bare-repo.git' } subject { described_class.new(basedir, dirname) } it_behaves_like 'a git repository' it_behaves_like 'a git bare repository' end r10k-4.0.2/spec/integration/git/rugged/cache_spec.rb000066400000000000000000000015711460033767200222250ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/rugged/cache' describe R10K::Git::Rugged::Cache, :if => R10K::Features.available?(:rugged) do include_context 'Git integration' let(:dirname) { 'working-repo' } let(:remote_name) { 'origin' } subject { described_class.new(remote) } context "syncing with the remote" do before(:each) do subject.reset! end describe "with the correct configuration" do it "is able to sync with the remote" do subject.sync expect(subject.synced?).to eq(true) end end describe "with a out of date cached remote" do it "updates the cached remote configuration" do subject.repo.update_remote('foo', remote_name) expect(subject.repo.remotes[remote_name]).to eq('foo') subject.sync expect(subject.repo.remotes[remote_name]).to eq(remote) end end end end r10k-4.0.2/spec/integration/git/rugged/thin_repository_spec.rb000066400000000000000000000006221460033767200244170ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/rugged/thin_repository' describe R10K::Git::Rugged::ThinRepository, :if => R10K::Features.available?(:rugged) do include_context 'Git integration' let(:dirname) { 'working-repo' } let(:cacherepo) { R10K::Git::Rugged::Cache.generate(remote) } subject { described_class.new(basedir, dirname, cacherepo) } it_behaves_like "a git thin repository" end r10k-4.0.2/spec/integration/git/rugged/working_repository_spec.rb000066400000000000000000000031101460033767200251300ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/rugged/working_repository' describe R10K::Git::Rugged::WorkingRepository, :if => R10K::Features.available?(:rugged) do include_context 'Git integration' let(:dirname) { 'working-repo' } subject { described_class.new(basedir, dirname) } it_behaves_like 'a git repository' it_behaves_like 'a git working repository' describe "checking out an unresolvable ref" do it "raises an error indicating that the ref was unresolvable" do expect(subject).to receive(:resolve).with("unresolvable") expect { subject.checkout("unresolvable") }.to raise_error(R10K::Git::GitError, /Unable to check out unresolvable ref 'unresolvable'/) end end context "checking out a specific SHA" do let(:_rugged_repo) { double("Repository") } before do subject.clone(remote) allow(subject).to receive(:with_repo).and_yield(_rugged_repo) allow(subject).to receive(:resolve).and_return("157011a4eaa27f1202a9d94335ee4876b26d377e") end describe "with force" do it "does not receive a checkout call" do expect(_rugged_repo).to_not receive(:checkout) expect(_rugged_repo).to receive(:reset) subject.checkout("157011a4eaa27f1202a9d94335ee4876b26d377e", {:force => true}) end end describe "without force" do it "does receive a checkout call" do expect(_rugged_repo).to receive(:checkout) expect(_rugged_repo).to_not receive(:reset) subject.checkout("157011a4eaa27f1202a9d94335ee4876b26d377e", {:force => false}) end end end end r10k-4.0.2/spec/integration/git/shellgit/000077500000000000000000000000001460033767200201555ustar00rootroot00000000000000r10k-4.0.2/spec/integration/git/shellgit/bare_repository_spec.rb000066400000000000000000000005051460033767200247240ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/shellgit/bare_repository' describe R10K::Git::ShellGit::BareRepository do include_context 'Git integration' let(:dirname) { 'bare-repo.git' } subject { described_class.new(basedir, dirname) } it_behaves_like 'a git repository' it_behaves_like 'a git bare repository' end r10k-4.0.2/spec/integration/git/shellgit/thin_repository_spec.rb000066400000000000000000000005551460033767200247620ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/shellgit/thin_repository' describe R10K::Git::ShellGit::ThinRepository do include_context 'Git integration' let(:dirname) { 'working-repo' } let(:cacherepo) { R10K::Git::ShellGit::Cache.generate(remote) } subject { described_class.new(basedir, dirname, cacherepo) } it_behaves_like "a git thin repository" end r10k-4.0.2/spec/integration/git/shellgit/working_repository_spec.rb000066400000000000000000000005151460033767200254740ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/shellgit/working_repository' describe R10K::Git::ShellGit::WorkingRepository do include_context 'Git integration' let(:dirname) { 'working-repo' } subject { described_class.new(basedir, dirname) } it_behaves_like 'a git repository' it_behaves_like 'a git working repository' end r10k-4.0.2/spec/integration/git/stateful_repository_spec.rb000066400000000000000000000120561460033767200240330ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git' require 'r10k/git/stateful_repository' describe R10K::Git::StatefulRepository do include_context 'Git integration' let(:dirname) { 'working-repo' } let(:cacherepo) { R10K::Git.cache.generate(remote) } let(:thinrepo) { R10K::Git.thin_repository.new(basedir, dirname, cacherepo) } let(:ref) { '0.9.x' } subject { described_class.new(remote, basedir, dirname) } describe 'status' do describe "when the directory does not exist" do it "is absent" do expect(subject.status(ref)).to eq :absent end end describe "when the directory is not a git repository" do it "is mismatched" do thinrepo.path.mkdir expect(subject.status(ref)).to eq :mismatched end end describe "when the directory has a .git file" do it "is mismatched" do thinrepo.path.mkdir File.open("#{thinrepo.path}/.git", "w") {} expect(subject.status(ref)).to eq :mismatched end end describe "when the repository doesn't match the desired remote" do it "is mismatched" do thinrepo.clone(remote, {:ref => '1.0.0'}) allow(subject.repo).to receive(:origin).and_return('http://some.site/repo.git') expect(subject.status(ref)).to eq :mismatched end end describe "when the wrong ref is checked out" do it "is outdated" do thinrepo.clone(remote, {:ref => '1.0.0'}) expect(subject.status(ref)).to eq :outdated end end describe "when the ref is a branch and the cache is not synced" do it "is outdated" do thinrepo.clone(remote, {:ref => ref}) cacherepo.reset! expect(subject.status(ref)).to eq :outdated end end describe "when the ref can't be resolved" do let(:ref) { '1.1.x' } it "is outdated" do thinrepo.clone(remote, {:ref => '0.9.x'}) expect(subject.status(ref)).to eq :outdated end end describe "when the workdir has local modifications" do it "is dirty when workdir is up to date" do thinrepo.clone(remote, {:ref => ref}) File.open(File.join(thinrepo.path, 'README.markdown'), 'a') { |f| f.write('local modifications!') } expect(subject.status(ref)).to eq :dirty end it "is dirty when workdir is not up to date" do thinrepo.clone(remote, {:ref => '1.0.0'}) File.open(File.join(thinrepo.path, 'README.markdown'), 'a') { |f| f.write('local modifications!') } expect(subject.status(ref)).to eq :dirty end end describe "when the workdir has spec dir modifications" do before(:each) do thinrepo.clone(remote, {:ref => ref}) FileUtils.mkdir_p(File.join(thinrepo.path, 'spec')) File.open(File.join(thinrepo.path, 'spec', 'file_spec.rb'), 'a') { |f| f.write('local modifications!') } thinrepo.stage_files(['spec/file_spec.rb']) end it "is dirty with exclude_spec false" do expect(subject.status(ref, false)).to eq :dirty end it "is insync with exclude_spec true" do expect(subject.status(ref, true)).to eq :insync end end describe "if the right ref is checked out" do it "is insync" do thinrepo.clone(remote, {:ref => ref}) expect(subject.status(ref)).to eq :insync end end end describe "syncing" do describe "when the ref is unresolvable" do let(:ref) { '1.1.x' } it "raises an error" do expect { subject.sync(ref) }.to raise_error(R10K::Git::UnresolvableRefError) end end describe "when the repo is absent" do it "creates the repo" do subject.sync(ref) expect(subject.status(ref)).to eq :insync end end describe "when the repo is mismatched" do it "removes and recreates the repo" do thinrepo.path.mkdir subject.sync(ref) expect(subject.status(ref)).to eq :insync end end describe "when the repo is out of date" do it "updates the repository" do thinrepo.clone(remote, {:ref => '1.0.0'}) subject.sync(ref) expect(subject.status(ref)).to eq :insync end end describe "when the workdir is dirty" do before(:each) do thinrepo.clone(remote, {:ref => ref}) File.open(File.join(thinrepo.path, 'README.markdown'), 'a') { |f| f.write('local modifications!') } end context "when force == true" do let(:force) { true } it "warns and overwrites local modifications" do expect(subject.logger).to receive(:warn).with(/overwriting local modifications/i) subject.sync(ref, force) expect(subject.status(ref)).to eq :insync end end context "when force != true" do let(:force) { false } it "warns and does not overwrite local modifications" do expect(subject.logger).to receive(:warn).with(/skipping.*due to local modifications/i) subject.sync(ref, force) expect(subject.status(ref)).to eq :dirty end end end end end r10k-4.0.2/spec/integration/util/000077500000000000000000000000001460033767200165345ustar00rootroot00000000000000r10k-4.0.2/spec/integration/util/purageable_spec.rb000066400000000000000000000034131460033767200222030ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/purgeable' require 'r10k/util/cleaner' require 'tmpdir' RSpec.describe R10K::Util::Purgeable do it 'purges only unmanaged files' do Dir.mktmpdir do |envdir| managed_directory = "#{envdir}/managed_one" desired_contents = [ "#{managed_directory}/expected_1", "#{managed_directory}/managed_subdir_1", "#{managed_directory}/managed_symlink_dir", "#{managed_directory}/managed_subdir_1/subdir_expected_1", "#{managed_directory}/managed_subdir_1/managed_symlink_file", ] FileUtils.cp_r('spec/fixtures/unit/util/purgeable/managed_one/', managed_directory) cleaner = R10K::Util::Cleaner.new([managed_directory], desired_contents) cleaner.purge!({ recurse: true, whitelist: ["**/subdir_allowlisted_2"] }) # Files present after purge expect(File.exist?("#{managed_directory}/expected_1")).to be true expect(File.exist?("#{managed_directory}/managed_subdir_1")).to be true expect(File.exist?("#{managed_directory}/managed_symlink_dir")).to be true expect(File.exist?("#{managed_directory}/managed_subdir_1/subdir_expected_1")).to be true expect(File.exist?("#{managed_directory}/managed_subdir_1/managed_symlink_file")).to be true expect(File.exist?("#{managed_directory}/managed_subdir_1/subdir_allowlisted_2")).to be true # Purged files expect(File.exist?("#{managed_directory}/unmanaged_1")).to be false expect(File.exist?("#{managed_directory}/managed_subdir_1/unmanaged_symlink_dir")).to be false expect(File.exist?("#{managed_directory}/unmanaged_symlink_file")).to be false expect(File.exist?("#{managed_directory}/managed_subdir_1/subdir_unmanaged_1")).to be false end end end r10k-4.0.2/spec/matchers/000077500000000000000000000000001460033767200150425ustar00rootroot00000000000000r10k-4.0.2/spec/matchers/exit_with.rb000066400000000000000000000011121460033767200173660ustar00rootroot00000000000000RSpec::Matchers.define :exit_with do |expected| supports_block_expectations match do |block| actual = nil begin block.call rescue SystemExit => e actual = e.status end actual and actual == expected end failure_message do |actual| "expected exit with code #{expected} but " + (actual.nil? ? " exit was not called" : "we exited with #{actual} instead") end failure_message_when_negated do |actual| "expected that exit would not be called with #{expected}" end description do "expect exit with #{expected}" end end r10k-4.0.2/spec/matchers/match_realpath.rb000066400000000000000000000006711460033767200203470ustar00rootroot00000000000000RSpec::Matchers.define :match_realpath do |expected| match do |actual| actual == expected || realpath(actual) == realpath(expected) end failure_message do |actual| "expected that #{actual} would have a real path of #{expected}" end failure_message_when_negated do |actual| "expected that #{actual} would not have a real path of #{expected}" end def realpath(path) Pathname.new(path).realpath.to_s end end r10k-4.0.2/spec/r10k-mocks.rb000066400000000000000000000001401460033767200154430ustar00rootroot00000000000000require 'r10k-mocks/mock_env' require 'r10k-mocks/mock_source' require 'r10k-mocks/mock_config' r10k-4.0.2/spec/r10k-mocks/000077500000000000000000000000001460033767200151235ustar00rootroot00000000000000r10k-4.0.2/spec/r10k-mocks/mock_config.rb000066400000000000000000000010261460033767200177250ustar00rootroot00000000000000require 'r10k/deployment/config' module R10K class Deployment class MockConfig attr_accessor :hash def initialize(hash) @hash = hash.merge(deploy: {}) end def configfile "/some/nonexistent/config_file" end # Perform a scan for key and check for both string and symbol keys def setting(key) @hash[key] end alias [] setting def settings @hash end def merge(other) hash.merge(other) end end end end r10k-4.0.2/spec/r10k-mocks/mock_env.rb000066400000000000000000000003671460033767200172570ustar00rootroot00000000000000require 'r10k/environment' require 'r10k/util/purgeable' class R10K::Environment::Mock < R10K::Environment::Base include R10K::Util::Purgeable def sync "synced" end def status :insync end def signature "mock" end end r10k-4.0.2/spec/r10k-mocks/mock_source.rb000066400000000000000000000010601460033767200177560ustar00rootroot00000000000000require 'r10k/source' require 'r10k-mocks/mock_env' class R10K::Source::Mock < R10K::Source::Base R10K::Source.register(:mock, self) def environments if @_environments.nil? corrected_environment_names = @options[:environments].map do |env| R10K::Environment::Name.new(env, :prefix => @prefix, :invalid => 'correct_and_warn') end @_environments = corrected_environment_names.map { |env| R10K::Environment::Mock.new(env.name, @basedir, env.dirname, { overrides: @options[:overrides] }) } end @_environments end end r10k-4.0.2/spec/shared-contexts/000077500000000000000000000000001460033767200163475ustar00rootroot00000000000000r10k-4.0.2/spec/shared-contexts/git-fixtures.rb000066400000000000000000000023371460033767200213330ustar00rootroot00000000000000require 'archive/tar/minitar' require 'tmpdir' shared_context "Git integration" do # Use tmpdir for cached git repositories before(:all) do @old_cache_root = R10K::Git::Cache.settings[:cache_root] R10K::Git::Cache.settings[:cache_root] = Dir.mktmpdir end after(:all) do FileUtils.remove_entry_secure(R10K::Git::Cache.settings[:cache_root]) R10K::Git::Cache.settings[:cache_root] = @old_cache_root end # Create a mutable path for remote repositories def fixture_path File.join(PROJECT_ROOT, 'spec', 'fixtures', 'integration', 'git') end def remote_path @remote_path end def populate_remote_path Archive::Tar::Minitar.unpack(File.join(fixture_path, 'puppet-boolean-bare.tar'), remote_path) end def clear_remote_path FileUtils.remove_entry_secure(remote_path) end before(:all) do @remote_path = Dir.mktmpdir populate_remote_path end after(:all) do clear_remote_path @remote_path = nil end let(:remote) { Pathname.new(File.join(remote_path, 'puppet-boolean.git')).realpath.to_s } # Create a temp path for the git basedir and clean it up when finished let(:basedir) { Dir.mktmpdir } after do FileUtils.remove_entry_secure(basedir) end end r10k-4.0.2/spec/shared-contexts/tarball.rb000066400000000000000000000020461460033767200203170ustar00rootroot00000000000000require 'tmpdir' require 'fileutils' shared_context "Tarball" do # Suggested subject: # # subject { described_class.new('fixture-tarball', fixture_tarball, checksum: fixture_checksum) } # let(:fixture_tarball) do File.expand_path('spec/fixtures/tarball/tarball.tar.gz', PROJECT_ROOT) end let(:fixture_checksum) { '292e692ad18faabd4f9b21037d51f0185e04b69f82c522a54af91fb5b88c2d3b' } # Use tmpdir for cached tarballs let(:tmpdir) { Dir.mktmpdir } # `moduledir` and `cache_root` are available for examples to use in creating # their subjects let(:moduledir) { File.join(tmpdir, 'modules').tap { |path| Dir.mkdir(path) } } let(:cache_root) { File.join(tmpdir, 'cache').tap { |path| Dir.mkdir(path) } } around(:each) do |example| if subject.is_a?(R10K::Tarball) subject.settings[:cache_root] = cache_root elsif subject.respond_to?(:tarball) && subject.tarball.is_a?(R10K::Tarball) subject.tarball.settings[:cache_root] = cache_root end example.run FileUtils.remove_entry_secure(tmpdir) end end r10k-4.0.2/spec/shared-examples/000077500000000000000000000000001460033767200163165ustar00rootroot00000000000000r10k-4.0.2/spec/shared-examples/deploy-actions.rb000066400000000000000000000032761460033767200216050ustar00rootroot00000000000000require 'spec_helper' require 'r10k/deployment' shared_examples_for "a deploy action that requires a config file" do let(:config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => '/some/nonexistent/path/control', :environments => %w[first second third], }, :hiera => { :type => :mock, :basedir => '/some/nonexistent/path/hiera', :environments => %w[fourth fifth sixth], } } ) end let(:deployment) { R10K::Deployment.new(config) } before do allow(R10K::Deployment).to receive(:new).and_return(deployment) end it "exits when no config file was provided or found" do subject.instance_variable_set(:@config, nil) expect { subject.call }.to exit_with(8) end end shared_examples_for "a deploy action that can be write locked" do let(:config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => '/some/nonexistent/path/control', :environments => %w[first second third], }, :hiera => { :type => :mock, :basedir => '/some/nonexistent/path/hiera', :environments => %w[fourth fifth sixth], } } ) end let(:deployment) { R10K::Deployment.new(config) } before do allow(R10K::Deployment).to receive(:new).and_return(deployment) subject.settings = {deploy: {write_lock: "Disabled, yo"}} end it "exits without running when the write lock is set" do expect(subject).to_not receive(:visit_deployment) expect { subject.call }.to exit_with(16) end end r10k-4.0.2/spec/shared-examples/git-repository.rb000066400000000000000000000017251460033767200216500ustar00rootroot00000000000000shared_examples_for "a git repository" do it "does not exist if the repo is not present" do expect(subject.exist?).to be_falsey end describe "listing tags" do before do subject.clone(remote) end it "lists all tags in alphabetical order" do expect(subject.tags).to eq(%w[0.9.0 0.9.0-rc1 1.0.0 1.0.1]) end end describe "resolving refs" do before do subject.clone(remote) end it "can resolve branches" do expect(subject.resolve('master')).to eq '157011a4eaa27f1202a9d94335ee4876b26d377e' end it "can resolve tags" do expect(subject.resolve('1.0.0')).to eq '14cbb45ae3a5f764320b7e63f1a54a25a1ef6c9c' end it "can resolve commits" do expect(subject.resolve('3084373e8d181cf2fea5b4ade2690ba22872bd67')).to eq '3084373e8d181cf2fea5b4ade2690ba22872bd67' end it "returns nil when the object cannot be resolved" do expect(subject.resolve('1.2.3')).to be_nil end end end r10k-4.0.2/spec/shared-examples/git/000077500000000000000000000000001460033767200171015ustar00rootroot00000000000000r10k-4.0.2/spec/shared-examples/git/bare_repository.rb000066400000000000000000000071461460033767200226460ustar00rootroot00000000000000RSpec.shared_examples "a git bare repository" do describe "checking for the presence of the repo" do it "exists if the repo is present" do subject.clone(remote) expect(subject.exist?).to be_truthy end it "doesn't exist if the repo is not present" do expect(subject.exist?).to be_falsey end end describe "cloning the repo" do it "creates the repo at the expected location" do subject.clone(remote) config = File.read(File.join(basedir, dirname, 'config')) expect(config).to match(remote) end context "without a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return(nil) end it 'does not change proxy ENV' do expect(ENV).to_not receive(:[]=) expect(ENV).to_not receive(:update) subject.clone(remote) end end context "with a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return('http://proxy.example.com:3128') end it "manages proxy-related ENV vars" do # Sets proxy settings. ['HTTPS_PROXY', 'https_proxy', 'HTTP_PROXY', 'http_proxy'].each do |var| expect(ENV).to receive(:[]=).with(var, 'http://proxy.example.com:3128') end # Resets proxy settings when done. expect(ENV).to receive(:update).with(hash_including('HTTPS_PROXY' => nil)) subject.clone(remote) end end end describe "updating the repo" do let(:tag_090) { subject.git_dir + 'refs' + 'tags' + '0.9.0' } let(:packed_refs) { subject.git_dir + 'packed-refs' } before do subject.clone(remote) tag_090.delete if tag_090.exist? packed_refs.delete if packed_refs.exist? end it "fetches objects from the remote" do expect(subject.tags).to_not include('0.9.0') subject.fetch expect(subject.tags).to include('0.9.0') end context "without a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return(nil) end it 'does not change proxy ENV' do expect(ENV).to_not receive(:[]=) expect(ENV).to_not receive(:update) subject.fetch end end context "with a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return('http://proxy.example.com:3128') end it "manages proxy-related ENV vars" do # Sets proxy settings. ['HTTPS_PROXY', 'https_proxy', 'HTTP_PROXY', 'http_proxy'].each do |var| expect(ENV).to receive(:[]=).with(var, 'http://proxy.example.com:3128') end # Resets proxy settings when done. expect(ENV).to receive(:update).with(hash_including('HTTPS_PROXY' => nil)) subject.fetch end end end describe "listing branches" do before do subject.clone(remote) end it "lists all branches in alphabetical order" do expect(subject.branches).to eq(%w[0.9.x master]) end end describe "determining ref type" do before do subject.clone(remote) end it "can infer the type of a branch ref" do expect(subject.ref_type('master')).to eq :branch end it "can infer the type of a tag ref" do expect(subject.ref_type('1.0.0')).to eq :tag end it "can infer the type of a commit" do expect(subject.ref_type('3084373e8d181cf2fea5b4ade2690ba22872bd67')).to eq :commit end it "returns :unknown when the type cannot be inferred" do expect(subject.ref_type('1.2.3')).to eq :unknown end end end r10k-4.0.2/spec/shared-examples/git/thin_repository.rb000066400000000000000000000014231460033767200226670ustar00rootroot00000000000000RSpec.shared_examples "a git thin repository" do describe "cloning" do it "creates a working copy of the repo" do subject.clone(remote) expect(subject.exist?).to be_truthy end it "sets the remote origin url to the provided url" do subject.clone(remote) expect(subject.origin).to eq remote end it "sets the remote cache url to the path to the cache repo" do subject.clone(remote) expect(subject.cache).to eq cacherepo.git_dir.to_s end it "adds the cache repo to the alternates file" do subject.clone(remote) objectpath = cacherepo.git_dir + 'objects' alternates = subject.alternates.to_a expect(alternates.size).to eq 1 expect(alternates[0]).to match_realpath objectpath end end end r10k-4.0.2/spec/shared-examples/git/working_repository.rb000066400000000000000000000143661460033767200234170ustar00rootroot00000000000000RSpec.shared_examples "a git working repository" do describe "cloning" do it "creates a working copy of the repo" do subject.clone(remote) expect(subject.exist?).to be_truthy end describe "using the default branch" do describe "and the remote HEAD is 'master'" do it "checks out the default branch" do subject.clone(remote) expect(subject.head).to eq "157011a4eaa27f1202a9d94335ee4876b26d377e" end end describe "and the remote HEAD is '0.9.x'" do before do File.open(File.join(remote, 'HEAD'), 'w') do |fh| fh.write('ref: refs/heads/0.9.x') end end after do clear_remote_path populate_remote_path end it "checks out the default branch" do subject.clone(remote) expect(subject.head).to eq "3084373e8d181cf2fea5b4ade2690ba22872bd67" end end end describe "using an explicit ref" do it "can check out tags" do subject.clone(remote, {:ref => '1.0.0'}) expect(subject.head).to eq "14cbb45ae3a5f764320b7e63f1a54a25a1ef6c9c" end it "can check out remote branches" do subject.clone(remote, {:ref => 'origin/0.9.x'}) expect(subject.head).to eq "3084373e8d181cf2fea5b4ade2690ba22872bd67" end it "can check out commits" do subject.clone(remote, {:ref => '14cbb45ae3a5f764320b7e63f1a54a25a1ef6c9c'}) expect(subject.head).to eq "14cbb45ae3a5f764320b7e63f1a54a25a1ef6c9c" end end describe "with a reference repository" do it "adds the reference repository to the alternates directory" do subject.clone(remote, {:reference => remote}) alternates = subject.alternates.to_a expect(alternates.size).to eq 1 expect(alternates[0]).to match_realpath File.join(remote, 'objects') end end context "without a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return(nil) end it 'does not change proxy ENV' do expect(ENV).to_not receive(:[]=) expect(ENV).to_not receive(:update) subject.clone(remote) end end context "with a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return('http://proxy.example.com:3128') end it "manages proxy-related ENV vars" do # Sets proxy settings. ['HTTPS_PROXY', 'https_proxy', 'HTTP_PROXY', 'http_proxy'].each do |var| expect(ENV).to receive(:[]=).with(var, 'http://proxy.example.com:3128') end # Resets proxy settings when done. expect(ENV).to receive(:update).with(hash_including('HTTPS_PROXY' => nil)) subject.clone(remote) end end end describe "updating the repo" do let(:tag_090) { subject.git_dir + 'refs' + 'tags' + '0.9.0' } let(:packed_refs) { subject.git_dir + 'packed-refs' } before do subject.clone(remote) tag_090.delete if tag_090.exist? packed_refs.delete if packed_refs.exist? end it "fetches objects from the remote" do expect(subject.tags).to_not include('0.9.0') subject.fetch expect(subject.tags).to include('0.9.0') end context "without a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return(nil) end it 'does not change proxy ENV' do expect(ENV).to_not receive(:[]=) expect(ENV).to_not receive(:update) subject.fetch end end context "with a proxy" do before(:each) do allow(R10K::Git).to receive(:get_proxy_for_remote).with(remote).and_return('http://proxy.example.com:3128') end it "manages proxy-related ENV vars" do # Sets proxy settings. ['HTTPS_PROXY', 'https_proxy', 'HTTP_PROXY', 'http_proxy'].each do |var| expect(ENV).to receive(:[]=).with(var, 'http://proxy.example.com:3128') end # Resets proxy settings when done. expect(ENV).to receive(:update).with(hash_including('HTTPS_PROXY' => nil)) subject.fetch end end end describe "listing branches" do before do subject.clone(remote) end it "lists the local branches" do expect(subject.branches).to eq(%w[master]) end end describe "listing the origin" do it "is nil if the remote is not set" do expect(subject.origin).to be_nil end it "is the remote URL when set" do subject.clone(remote) expect(subject.origin).to eq remote end end describe "checking out ref" do before(:each) do subject.clone(remote) File.open(File.join(subject.path, 'README.markdown'), 'a') { |f| f.write('local modifications!') } end context "with force = true" do it "should revert changes in managed files" do subject.checkout(subject.head, {:force => true}) expect(File.read(File.join(subject.path, 'README.markdown')).include?('local modifications!')).to eq false end end context "with force = false" do it "should not revert changes in managed files" do subject.checkout(subject.head, {:force => false}) expect(File.read(File.join(subject.path, 'README.markdown')).include?('local modifications!')).to eq true end end end describe "checking if worktree is dirty" do before do subject.clone(remote) end context "with no local changes" do it "reports worktree as not dirty" do expect(subject.dirty?).to be false end end context "with local changes" do before(:each) do File.open(File.join(subject.path, 'README.markdown'), 'a') { |f| f.write('local modifications!') } File.open(File.join(subject.path, 'CHANGELOG'), 'a') { |f| f.write('local modifications to the changelog too') } end it "logs and reports worktree as dirty" do expect(subject.logger).to receive(:debug).with(/found local modifications in.*README\.markdown/i) expect(subject.logger).to receive(:debug).with(/found local modifications in.*CHANGELOG/i) expect(subject.logger).to receive(:debug1).twice expect(subject.dirty?).to be true end end end end r10k-4.0.2/spec/shared-examples/puppetfile-action.rb000066400000000000000000000020461460033767200222750ustar00rootroot00000000000000require 'spec_helper' shared_examples_for "a puppetfile action" do describe "initializing" do it "accepts the :root option" do described_class.new({root: "/some/nonexistent/path"}, [], {}) end it "accepts the :puppetfile option" do described_class.new({puppetfile: "/some/nonexistent/path/Puppetfile"}, [], {}) end it "accepts the :moduledir option" do described_class.new({moduledir: "/some/nonexistent/path/modules"}, [], {}) end end end shared_examples_for "a puppetfile install action" do describe "initializing" do it "accepts the :root option" do described_class.new({root: "/some/nonexistent/path"}, [], {}) end it "accepts the :puppetfile option" do described_class.new({puppetfile: "/some/nonexistent/path/Puppetfile"}, [], {}) end it "accepts the :moduledir option" do described_class.new({moduledir: "/some/nonexistent/path/modules"}, [], {}) end it "accepts the :force option" do described_class.new({force: true}, [], {}) end end end r10k-4.0.2/spec/shared-examples/settings/000077500000000000000000000000001460033767200201565ustar00rootroot00000000000000r10k-4.0.2/spec/shared-examples/settings/ancestry.rb000066400000000000000000000022351460033767200223350ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/collection' require 'r10k/settings/list' shared_examples_for "a setting with ancestors" do describe '#parent=' do it "allows assignment to a collection" do parent = R10K::Settings::Collection.new(:parent, []) subject.parent = parent expect(subject.parent).to eq parent end it "allows assignment to a list" do parent = R10K::Settings::List.new(:parent, []) subject.parent = parent expect(subject.parent).to eq parent end it "rejects assignment when argument is not a settings collection or list" do parent = Hash.new expect { subject.parent = parent }.to raise_error do |error| expect(error.message).to match /may only belong to a settings collection or list/i end end it "rejects re-assignment" do parent = R10K::Settings::Collection.new(:parent, []) step_parent = R10K::Settings::Collection.new(:step_parent, []) subject.parent = parent expect { subject.parent = step_parent }.to raise_error do |error| expect(error.message).to match /cannot be reassigned.*new parent/i end end end end r10k-4.0.2/spec/shared-examples/subprocess-runner.rb000066400000000000000000000044611460033767200223470ustar00rootroot00000000000000shared_examples_for "a subprocess runner" do |fixture_root| describe "running 'echo test'" do subject { described_class.new(%w[echo test]) } it "sets the exit code to 0" do result = subject.run expect(result.exit_code).to eq 0 end it "returns the contents of stdout" do result = subject.run expect(result.stdout).to eq 'test' end it "indicates the command succeeded" do result = subject.run expect(result).to be_success expect(result).to_not be_failed end end describe "running a command with a large amount of output" do subject do described_class.new(['ruby', '-e', 'blob = "buffalo!" * (2 << 16); puts blob']) end it "does not hang" do Timeout.timeout(5) do subject.run end end end describe "running 'ls' or 'dir' with a different working directory" do subject do if R10K::Util::Platform.windows? described_class.new(%w[cmd /c dir]).tap do |o| o.cwd = fixture_root end else described_class.new(%w[ls]).tap do |o| o.cwd = fixture_root end end end it "returns the contents of the given working directory" do result = subject.run expect(result.stdout).to match('no-execute.sh') end end describe "running 'false'" do subject { described_class.new(%w[false]) } it "sets the exit code to 1", unless: R10K::Util::Platform.windows? do result = subject.run expect(result.exit_code).to eq 1 end it "indicates the command failed" do result = subject.run expect(result).to_not be_success expect(result).to be_failed end end describe "running '/this/command/will/not/exist'" do subject { described_class.new(%w[/this/command/will/not/exist]) } it "indicates the command failed" do result = subject.run expect(result).to_not be_success expect(result).to be_failed end end describe "running a non-executable file", :unless => R10K::Util::Platform.windows? do let(:fixture_file) { File.join(fixture_root, 'no-execute.sh') } subject { described_class.new([fixture_file]) } it "indicates the command failed" do result = subject.run expect(result).to_not be_success expect(result).to be_failed end end end r10k-4.0.2/spec/spec_helper.rb000066400000000000000000000022431460033767200160530ustar00rootroot00000000000000PROJECT_ROOT = File.expand_path('..', File.dirname(__FILE__)) if ENV['COVERAGE'] require 'simplecov' SimpleCov.start do libdir = File.join(PROJECT_ROOT, 'lib') add_filter do |src| !src.filename.match(%r[\A#{libdir}]) end %w[Deployment Source Environment Module Git SVN Action Util].each do |group| add_group group, "lib/r10k/#{group.downcase}" end end end require 'r10k' Dir.glob(File.expand_path('spec/shared-examples/**/*.rb', PROJECT_ROOT)).each { |file| require file } require 'shared-contexts/git-fixtures' require 'shared-contexts/tarball' require 'matchers/exit_with' require 'matchers/match_realpath' require 'r10k-mocks' # Add a negated version of the change matcher. RSpec::Matchers.define_negated_matcher :not_change, :change RSpec.configure do |config| config.before(:all) do Log4r::Logger.global.level = Log4r::OFF end end shared_context 'fail on execution' do before do allow_any_instance_of(described_class).to receive(:execute).and_raise "Tests should never invoke system calls" allow_any_instance_of(R10K::Util::Subprocess).to receive(:execute).and_raise "Tests should never invoke system calls" end end r10k-4.0.2/spec/unit/000077500000000000000000000000001460033767200142135ustar00rootroot00000000000000r10k-4.0.2/spec/unit/action/000077500000000000000000000000001460033767200154705ustar00rootroot00000000000000r10k-4.0.2/spec/unit/action/cri_runner_spec.rb000066400000000000000000000034111460033767200211740ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/base' require 'r10k/action/cri_runner' describe R10K::Action::CriRunner do let(:action_class) do Class.new do attr_reader :opts attr_reader :argv def initialize(opts, argv, settings = {}) @opts = opts @argv = argv @settings = {} end def call @opts[:runok] end end end subject(:cri_runner) { described_class.wrap(action_class) } let(:opts) { {:value => :yep} } let(:argv) { %w[value yes] } describe "handling options" do it "adapts the :verbose flag to :loglevel" do input = {:value => :yep, :verbose => 'DEBUG'} output = {:value => :yep, :loglevel => 'DEBUG'} expect(cri_runner.handle_opts(input)).to eq(output) end end describe "handling arguments" do it "sets the arguments as-is" do expect(cri_runner.handle_argv(%w[one two])).to eq(%w[one two]) end end describe "proxying invocations to .new" do it "returns itself" do expect(cri_runner.new(opts, argv, :cri_cmd)).to eql cri_runner end it "handles options" do expect(cri_runner).to receive(:handle_opts) cri_runner.new({:value => :yep, :verbose => 'DEBUG'}, argv, :cri_cmd) end it "handles arguments" do expect(cri_runner).to receive(:handle_argv) cri_runner.new({:value => :yep, :verbose => 'DEBUG'}, argv, :cri_cmd) end end describe "calling" do it "exits with a return value of 0 if the action returned true" do expect { cri_runner.new({:runok => true}, []).call }.to exit_with(0) end it "exits with a return value of 1 if the action returned false" do expect { cri_runner.new({:runok => false}, []).call }.to exit_with(1) end end end r10k-4.0.2/spec/unit/action/deploy/000077500000000000000000000000001460033767200167645ustar00rootroot00000000000000r10k-4.0.2/spec/unit/action/deploy/deploy_helpers_spec.rb000066400000000000000000000021741460033767200233450ustar00rootroot00000000000000require 'spec_helper' require 'r10k/logging' require 'r10k/action/deploy/deploy_helpers' describe R10K::Action::Deploy::DeployHelpers do subject do Object.new.tap do |o| o.extend(R10K::Logging) o.extend(described_class) end end describe "checking for a config file" do it "logs a warning and exits when no config file was set" do logger = subject.logger expect(logger).to receive(:fatal).with("No configuration file given, no config file found in current directory, and no global config present") expect { subject.expect_config! }.to exit_with(8) end end describe "checking the write lock setting" do it "logs a warning and exits when the write lock is set" do logger = subject.logger expect(logger).to receive(:fatal).with("Making changes to deployed environments has been administratively disabled.") expect(logger).to receive(:fatal).with("Reason: r10k is sleepy and wants to take a nap") expect { subject.check_write_lock!(deploy: {write_lock: "r10k is sleepy and wants to take a nap"}) }.to exit_with(16) end end end r10k-4.0.2/spec/unit/action/deploy/display_spec.rb000066400000000000000000000032561460033767200217760ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/deploy/display' describe R10K::Action::Deploy::Display do describe "initializing" do it "accepts a puppetfile option" do described_class.new({puppetfile: true}, [], {}) end it "accepts a modules option" do described_class.new({modules: true}, [], {}) end it "accepts a detail option" do described_class.new({detail: true}, [], {}) end it "accepts a format option" do described_class.new({format: "json"}, [], {}) end it "accepts a fetch option" do described_class.new({fetch: true}, [], {}) end end subject { described_class.new({config: "/some/nonexistent/path"}, [], {}) } before do allow(subject).to receive(:puts) end it_behaves_like "a deploy action that requires a config file" describe "collecting info" do subject { described_class.new({config: "/some/nonexistent/path", format: 'json', puppetfile: true, detail: true}, ['first'], {}) } let(:mock_config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => '/some/nonexistent/path/control', :environments => %w[first second third env-that/will-be-corrected], :prefix => 'PREFIX' } } ) end let(:deployment) { R10K::Deployment.new(mock_config) } it "gathers environment info" do source_info = subject.send(:source_info, deployment.sources.first, ['first']) expect(source_info[:name]).to eq(:control) expect(source_info[:environments].length).to eq(1) expect(source_info[:environments][0][:name]).to eq('first') end end end r10k-4.0.2/spec/unit/action/deploy/environment_spec.rb000066400000000000000000000544101460033767200226730ustar00rootroot00000000000000require 'spec_helper' require 'r10k/deployment' require 'r10k/action/deploy/environment' describe R10K::Action::Deploy::Environment do subject { described_class.new({config: "/some/nonexistent/path"}, [], {}) } it_behaves_like "a deploy action that can be write locked" it_behaves_like "a deploy action that requires a config file" describe "initializing" do it "can accept a cachedir option" do described_class.new({cachedir: "/some/nonexistent/cachedir"}, [], {}) end it "can accept a puppetfile option" do described_class.new({puppetfile: true}, [], {}) end it "can accept a modules option" do described_class.new({modules: true}, [], {}) end it "can accept a default_branch_override option" do described_class.new({:'default-branch-override' => 'default_branch_override_name'}, [], {}) end it "can accept a no-force option" do described_class.new({:'no-force' => true}, [], {}) end it 'can accept a generate-types option' do described_class.new({ 'generate-types': true }, [], {}) end it 'can accept a puppet-path option' do described_class.new({ 'puppet-path': '/nonexistent' }, [], {}) end it 'can accept a private-key option' do described_class.new({ 'private-key': '/nonexistent' }, [], {}) end it 'can accept a token option' do described_class.new({ 'oauth-token': '/nonexistent' }, [], {}) end it 'can accept an app id option' do described_class.new({ 'github-app-id': '/nonexistent' }, [], {}) end it 'can accept a ttl option' do described_class.new({ 'github-app-ttl': '/nonexistent' }, [], {}) end it 'can accept a ssl private key option' do described_class.new({ 'github-app-key': '/nonexistent' }, [], {}) end it 'can accept a exclude-spec option' do described_class.new({ :'exclude-spec' => true }, [], {}) end it 'can accept an incremental option' do described_class.new({ :incremental => true }, [], {}) end end describe "when called" do let(:mock_config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => '/some/nonexistent/path/control', :environments => %w[first second third env-that/will-be-corrected], :prefix => 'PREFIX' } } ) end describe "with puppetfile or modules flag" do let(:deployment) { R10K::Deployment.new(mock_config) } let(:loader) do instance_double("R10K::ModuleLoader::Puppetfile", :load => { :modules => ['foo'], :purge_exclusions => [], :managed_directories => [], :desired_contents => [] } ).as_null_object end before do expect(R10K::Deployment).to receive(:new).and_return(deployment) expect(R10K::ModuleLoader::Puppetfile).to receive(:new). and_return(loader).at_least(:once) end it "syncs the puppetfile content when given the puppetfile flag" do expect(loader).to receive(:load).exactly(4).times expect(R10K::ContentSynchronizer).to receive(:concurrent_sync).exactly(4).times action = described_class.new({config: "/some/nonexistent/path", puppetfile: true}, [], {}) action.call end it "syncs the puppetfile when given the modules flag" do expect(loader).to receive(:load).exactly(4).times expect(R10K::ContentSynchronizer).to receive(:concurrent_sync).exactly(4).times action = described_class.new({config: "/some/nonexistent/path", modules: true}, [], {}) action.call end end describe "with incremental flag" do let(:loader) do instance_double("R10K::ModuleLoader::Puppetfile", :load => { :modules => ['foo'], :purge_exclusions => [], :managed_directories => [], :desired_contents => [] } ).as_null_object end before do expect(R10K::Deployment).to receive(:new).and_wrap_original do |original, settings| original.call(mock_config.merge(settings)) end expect(R10K::ModuleLoader::Puppetfile).to receive(:new). and_return(loader).at_least(:once) end it "incremental flag causes the module definitons to be preloaded by the loader" do expect(loader).to receive(:load_metadata).exactly(4).times action = described_class.new({:config => "/some/nonexistent/path", :modules => true, :incremental => true}, [], {}) action.call end end describe "with an environment that doesn't exist" do let(:deployment) do R10K::Deployment.new(mock_config) end before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject { described_class.new({config: "/some/nonexistent/path"}, %w[not_an_environment], {}) } it "logs that the environments can't be deployed and returns false" do expect(subject.logger).to receive(:error).with("Environment(s) 'not_an_environment' cannot be found in any source and will not be deployed.") logger = subject.logger expect(subject.call).to eq false end end describe "with no-force" do subject { described_class.new({ config: "/some/nonexistent/path", modules: true, :'no-force' => true}, %w[first], {}) } it "tries to preserve local modifications" do expect(subject.settings[:overrides][:modules][:force]).to equal(false) end end describe "postrun" do context "basic postrun hook" do let(:settings) { { postrun: ["/path/to/executable", "arg1", "arg2"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new( {config: "/some/nonexistent/path" }, %w[PREFIX_first], settings ) end it "is passed to Subprocess" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/path/to/executable", "arg1", "arg2"]). and_return(mock_subprocess) subject.call end end context "supports environments" do context "when one environment" do let(:settings) { { postrun: ["/generate/types/wrapper", "$modifiedenvs"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new( {config: "/some/nonexistent/path" }, %w[PREFIX_first], settings ) end it "properly substitutes the environment" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/generate/types/wrapper", "PREFIX_first"]). and_return(mock_subprocess) subject.call end end context "when many environments" do let(:settings) { { postrun: ["/generate/types/wrapper", "$modifiedenvs"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new( {config: "/some/nonexistent/path" }, [], settings ) end it "properly substitutes the environment" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/generate/types/wrapper", "PREFIX_first PREFIX_second PREFIX_third PREFIX_env_that_will_be_corrected"]). and_return(mock_subprocess) subject.call end end end end describe "Purging allowlist" do let(:settings) { { pool_size: 4, deploy: { purge_levels: [:environment], purge_allowlist: ['coolfile', 'coolfile2'] } } } let(:overrides) { { environments: {}, modules: { pool_size: 4 }, purging: { purge_levels: [:environment], purge_allowlist: ['coolfile', 'coolfile2'] } } } let(:deployment) do R10K::Deployment.new(mock_config.merge({overrides: overrides})) end before do expect(R10K::Deployment).to receive(:new).and_return(deployment) allow_any_instance_of(R10K::Environment::Base).to receive(:purge!) end subject { described_class.new({ config: "/some/nonexistent/path", modules: true }, %w[PREFIX_first], settings) } it "reads in the purge_allowlist setting and purges accordingly" do expect(subject.logger).to receive(:debug).with(/Purging unmanaged content for environment/) expect(subject.settings[:overrides][:purging][:purge_allowlist]).to eq(['coolfile', 'coolfile2']) subject.call end end describe "purge_levels" do let(:settings) { { deploy: { purge_levels: purge_levels } } } let(:overrides) do { environments: { requested_environments: ['PREFIX_first'] }, modules: { deploy_modules: true, pool_size: 4 }, purging: { purge_levels: purge_levels } } end let(:deployment) do R10K::Deployment.new(mock_config.merge({ overrides: overrides })) end before do expect(R10K::Deployment).to receive(:new).and_return(deployment) allow_any_instance_of(R10K::Environment::Base).to receive(:purge!) end subject { described_class.new({ config: "/some/nonexistent/path", modules: true }, %w[PREFIX_first], settings) } describe "deployment purge level" do let(:purge_levels) { [:deployment] } it "updates the source's cache before it purges environments" do deployment.sources.each do |source| expect(source).to receive(:reload!).ordered end expect(deployment).to receive(:purge!).ordered subject.call end it "only logs about purging deployment" do expect(subject).to receive(:visit_environment).and_wrap_original do |original, env, &block| expect(env.logger).to_not receive(:debug).with(/Purging unmanaged puppetfile content/) original.call(env) end.at_least(:once) expect(subject.logger).to receive(:debug).with(/Purging unmanaged environments for deployment/) expect(subject.logger).to_not receive(:debug).with(/Purging unmanaged content for environment/) subject.call end end describe "environment purge level" do let(:purge_levels) { [:environment] } it "only logs about purging environment" do expect(subject).to receive(:visit_environment).and_wrap_original do |original, env, &block| expect(env.logger).to_not receive(:debug).with(/Purging unmanaged puppetfile content/) original.call(env) end.at_least(:once) expect(subject.logger).to receive(:debug).with(/Purging unmanaged content for environment/) expect(subject.logger).to_not receive(:debug).with(/Purging unmanaged environments for deployment/) subject.call end it "logs that environment was not purged if deploy failed" do expect(subject).to receive(:visit_environment).and_wrap_original do |original, env, &block| if env.name =~ /first/ expect(env).to receive(:deploy) { subject.instance_variable_set(:@visit_ok, false) } end original.call(env) end.at_least(:once) expect(subject.logger).to receive(:debug).with(/Not purging unmanaged content for environment/) subject.call end end describe "puppetfile purge level" do let(:purge_levels) { [:puppetfile] } it "only logs about purging puppetfile" do allow(R10K::ContentSynchronizer).to receive(:concurrent_sync) expect(subject).to receive(:visit_environment).and_wrap_original do |original, env, &block| if env.name =~ /first/ expect(env.logger).to receive(:debug).with(/Purging unmanaged Puppetfile content/) end original.call(env) end.at_least(:once) expect(subject.logger).to_not receive(:debug).with(/Purging unmanaged environments for deployment/) expect(subject.logger).to_not receive(:debug).with(/Purging unmanaged content for environment/) subject.call end end end describe "generate-types" do let(:deployment) do R10K::Deployment.new( R10K::Deployment::MockConfig.new( sources: { control: { type: :mock, basedir: '/some/nonexistent/path/control', environments: %w[first second] } }, overrides: { modules: { pool_size: 4 } } ) ) end before do allow(R10K::Deployment).to receive(:new).and_return(deployment) allow_any_instance_of(R10K::Environment::Base).to receive(:purge!) allow(subject).to receive(:write_environment_info!) expect(subject.logger).not_to receive(:error) end context 'with generate-types enabled' do subject do described_class.new( { config: '/some/nonexistent/path', modules: true, 'generate-types': true }, %w[first second], {} ) end it 'generate_types is true' do expect(subject.settings[:overrides][:environments][:generate_types]).to eq(true) end it 'only calls puppet generate types on specified environment' do settings = subject.instance_variable_get(:@settings) settings[:overrides][:environments][:requested_environments] = %w{first} subject.instance_variable_set(:@settings, settings) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| if environment.dirname == 'first' expect(environment).to receive(:generate_types!) else expect(environment).not_to receive(:generate_types!) end original.call(environment, &block) end.twice subject.call end it 'does not call puppet generate types on puppetfile failure' do expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| allow(environment).to receive(:deploy) { subject.instance_variable_set(:@visit_ok, false) } expect(environment).not_to receive(:generate_types!) original.call(environment, &block) end.twice subject.call end it 'calls puppet generate types on previous puppetfile failure' do expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| allow(environment).to receive(:deploy) do subject.instance_variable_set(:@visit_ok, false) if environment.dirname == 'first' end if environment.dirname == 'second' expect(environment).to receive(:generate_types!) else expect(environment).not_to receive(:generate_types!) end original.call(environment, &block) end.twice subject.call end end context 'with generate-types disabled' do subject do described_class.new( { config: '/some/nonexistent/path', modules: true, 'generate-types': false }, %w[first], {} ) end it 'generate_types is false' do expect(subject.settings[:overrides][:environments][:generate_types]).to eq(false) end it 'does not call puppet generate types' do expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| expect(environment).not_to receive(:generate_types!) original.call(environment, &block) end.twice subject.call end end end describe 'with puppet-path' do subject { described_class.new({ config: '/some/nonexistent/path', 'puppet-path': '/nonexistent' }, [], {}) } it 'sets puppet_path' do expect(subject.instance_variable_get(:@puppet_path)).to eq('/nonexistent') end end describe 'with puppet-conf' do subject { described_class.new({ config: '/some/nonexistent/path', 'puppet-conf': '/nonexistent' }, [], {}) } it 'sets puppet_conf' do expect(subject.instance_variable_get(:@puppet_conf)).to eq('/nonexistent') end end describe 'with private-key' do subject { described_class.new({ config: '/some/nonexistent/path', 'private-key': '/nonexistent' }, [], {}) } it 'sets private_key' do expect(subject.instance_variable_get(:@private_key)).to eq('/nonexistent') end end describe 'with oauth-token' do subject { described_class.new({ config: '/some/nonexistent/path', 'oauth-token': '/nonexistent' }, [], {}) } it 'sets oauth_token' do expect(subject.instance_variable_get(:@oauth_token)).to eq('/nonexistent') end end end describe "write_environment_info!" do class Fake_Environment attr_accessor :path attr_accessor :puppetfile attr_accessor :info def initialize(path, info) @path = path @info = info @puppetfile = R10K::Puppetfile.new("", {}) end end let(:mock_stateful_repo_1) { instance_double("R10K::Git::StatefulRepository", :head => "123456") } let(:mock_stateful_repo_2) { instance_double("R10K::Git::StatefulRepository", :head => "654321") } let(:mock_git_module_1) do instance_double("R10K::Module::Git", :name => "my_cool_module", :properties => { :type => :git, :expected => "1.0", :actual => mock_stateful_repo_1.head }) end let(:mock_git_module_2) do instance_double("R10K::Module::Git", :name => "my_uncool_module", :properties => { :type => :git, :expected => "0.0.1", :actual => mock_stateful_repo_2.head }) end let(:mock_forge_module_1) { double(:name => "their_shiny_module", :properties => { :expected => "2.0.0" }) } before(:all) do @tmp_path = "./tmp-r10k-test-dir/" Dir.mkdir(@tmp_path) unless File.exist?(@tmp_path) end after(:all) do File.delete("#{@tmp_path}/.r10k-deploy.json") Dir.delete(@tmp_path) end it "writes the .r10k-deploy file correctly if all goes well" do fake_env = Fake_Environment.new(@tmp_path, {:name => "my_cool_environment", :signature => "pablo picasso"}) allow(fake_env).to receive(:modules).and_return([mock_git_module_1, mock_git_module_2, mock_forge_module_1]) subject.send(:write_environment_info!, fake_env, "2019-01-01 23:23:22 +0000", true) file_contents = File.read("#{@tmp_path}/.r10k-deploy.json") r10k_deploy = JSON.parse(file_contents) expect(r10k_deploy['name']).to eq("my_cool_environment") expect(r10k_deploy['signature']).to eq("pablo picasso") expect(r10k_deploy['started_at']).to eq("2019-01-01 23:23:22 +0000") expect(r10k_deploy['deploy_success']).to eq(true) expect(r10k_deploy['module_deploys'].length).to eq(3) expect(r10k_deploy['module_deploys'][0]['name']).to eq("my_cool_module") expect(r10k_deploy['module_deploys'][0]['version']).to eq("1.0") expect(r10k_deploy['module_deploys'][0]['sha']).to eq("123456") expect(r10k_deploy['module_deploys'][1]['name']).to eq("my_uncool_module") expect(r10k_deploy['module_deploys'][1]['version']).to eq("0.0.1") expect(r10k_deploy['module_deploys'][1]['sha']).to eq("654321") expect(r10k_deploy['module_deploys'][2]['name']).to eq("their_shiny_module") expect(r10k_deploy['module_deploys'][2]['version']).to eq("2.0.0") expect(r10k_deploy['module_deploys'][2]['sha']).to eq(nil) end it "writes the .r10k-deploy file correctly if there's a failure" do fake_env = Fake_Environment.new(@tmp_path, {:name => "my_cool_environment", :signature => "pablo picasso"}) allow(fake_env).to receive(:modules).and_return([mock_git_module_1, mock_git_module_2, mock_forge_module_1]) allow(mock_forge_module_1).to receive(:properties).and_raise(StandardError) subject.send(:write_environment_info!, fake_env, "2019-01-01 23:23:22 +0000", true) file_contents = File.read("#{@tmp_path}/.r10k-deploy.json") r10k_deploy = JSON.parse(file_contents) expect(r10k_deploy['name']).to eq("my_cool_environment") expect(r10k_deploy['signature']).to eq("pablo picasso") expect(r10k_deploy['started_at']).to eq("2019-01-01 23:23:22 +0000") expect(r10k_deploy['deploy_success']).to eq(true) expect(r10k_deploy['module_deploys'].length).to eq(0) end end end r10k-4.0.2/spec/unit/action/deploy/module_spec.rb000066400000000000000000000376301460033767200216210ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/deploy/module' describe R10K::Action::Deploy::Module do subject { described_class.new({config: "/some/nonexistent/path"}, [], {}) } it_behaves_like "a deploy action that requires a config file" it_behaves_like "a deploy action that can be write locked" describe "initializing" do it "accepts an environment option" do described_class.new({environment: "production"}, [], {}) end it "can accept a no-force option" do described_class.new({:'no-force' => true}, [], {}) end it 'can accept a generate-types option' do described_class.new({ 'generate-types': true }, [], {}) end it 'can accept a puppet-path option' do described_class.new({ 'puppet-path': '/nonexistent' }, [], {}) end it 'can accept a puppet-conf option' do described_class.new({ 'puppet-conf': '/nonexistent' }, [], {}) end it 'can accept a cachedir option' do described_class.new({ cachedir: '/nonexistent' }, [], {}) end it 'can accept a private-key option' do described_class.new({ 'private-key': '/nonexistent' }, [], {}) end it 'can accept a token option' do described_class.new({ 'oauth-token': '/nonexistent' }, [], {}) end it 'can accept an app id option' do described_class.new({ 'github-app-id': '/nonexistent' }, [], {}) end it 'can accept a ttl option' do described_class.new({ 'github-app-ttl': '/nonexistent' }, [], {}) end it 'can accept a ssl private key option' do described_class.new({ 'github-app-key': '/nonexistent' }, [], {}) end it 'can accept a exclude-spec option' do described_class.new({ :'exclude-spec' => true }, [], {}) end end describe "with no-force" do subject { described_class.new({ config: "/some/nonexistent/path", :'no-force' => true}, [], {}) } it "tries to preserve local modifications" do expect(subject.settings[:overrides][:modules][:force]).to equal(false) end end describe 'generate-types' do let(:deployment) do R10K::Deployment.new( sources: { control: { type: :mock, basedir: '/some/nonexistent/path/control', environments: %w[first second] } }, overrides: { modules: { pool_size: 4 } } ) end before do allow(R10K::Deployment).to receive(:new).and_return(deployment) end context 'with generate-types enabled' do subject do described_class.new( { config: '/some/nonexistent/path', 'generate-types': true }, %w[first], {} ) end it 'generate_types is true' do expect(subject.settings[:overrides][:environments][:generate_types]).to eq(true) end it 'only calls puppet generate types on environments where the specified module was updated' do allow(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| if environment.name == 'first' expect(environment).to receive(:deploy).and_return(['first']) expect(environment).to receive(:generate_types!) else expect(environment).to receive(:deploy).and_return([]) expect(environment).not_to receive(:generate_types!) end original.call(environment, &block) end subject.call end end context 'with generate-types disabled' do subject do described_class.new( { config: '/some/nonexistent/path', 'generate-types': false }, %w[first], {} ) end it 'generate_types is false' do expect(subject.settings[:overrides][:environments][:generate_types]).to eq(false) end it 'does not call puppet generate types' do |it| expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| expect(environment).not_to receive(:generate_types!) original.call(environment, &block) end.twice subject.call end end end describe 'with puppet-path' do subject { described_class.new({ config: '/some/nonexistent/path', 'puppet-path': '/nonexistent' }, [], {}) } it 'sets puppet_path' do expect(subject.instance_variable_get(:@puppet_path)).to eq('/nonexistent') end end describe 'with puppet-conf' do subject { described_class.new({ config: '/some/nonexistent/path', 'puppet-conf': '/nonexistent' }, [], {}) } it 'sets puppet_conf' do expect(subject.instance_variable_get(:@puppet_conf)).to eq('/nonexistent') end end describe 'with cachedir' do subject { described_class.new({ config: '/some/nonexistent/path', cachedir: '/nonexistent' }, [], {}) } it 'sets cachedir' do expect(subject.instance_variable_get(:@cachedir)).to eq('/nonexistent') end end describe 'with private-key' do subject { described_class.new({ config: '/some/nonexistent/path', 'private-key': '/nonexistent' }, [], {}) } it 'sets private_key' do expect(subject.instance_variable_get(:@private_key)).to eq('/nonexistent') end end describe 'with oauth-token' do subject { described_class.new({ config: '/some/nonexistent/path', 'oauth-token': '/nonexistent' }, [], {}) } it 'sets token_path' do expect(subject.instance_variable_get(:@oauth_token)).to eq('/nonexistent') end end describe 'with github-app-id' do subject { described_class.new({ config: '/some/nonexistent/path', 'github-app-id': '/nonexistent' }, [], {}) } it 'sets github-app-id' do expect(subject.instance_variable_get(:@github_app_id)).to eq('/nonexistent') end end describe 'with github-app-key' do subject { described_class.new({ config: '/some/nonexistent/path', 'github-app-key': '/nonexistent' }, [], {}) } it 'sets github-app-key' do expect(subject.instance_variable_get(:@github_app_key)).to eq('/nonexistent') end end describe 'with github-app-ttl' do subject { described_class.new({ config: '/some/nonexistent/path', 'github-app-ttl': '/nonexistent' }, [], {}) } it 'sets github-app-ttl' do expect(subject.instance_variable_get(:@github_app_ttl)).to eq('/nonexistent') end end describe 'with modules' do subject { described_class.new({ config: '/some/nonexistent/path' }, ['mod1', 'mod2'], {}) } let(:cache) { instance_double("R10K::Git::Cache", 'sanitized_dirname' => 'foo', 'cached?' => true, 'sync' => true) } let(:repo) { instance_double("R10K::Git::StatefulRepository", cache: cache, resolve: 'main', tracked_paths: []) } it 'does not sync modules not given' do allow(R10K::Deployment).to receive(:new).and_wrap_original do |original, settings, &block| original.call(settings.merge({ sources: { main: { remote: 'https://not/a/remote', basedir: '/not/a/basedir', type: 'git' } } })) end allow(R10K::Git::StatefulRepository).to receive(:new).and_return(repo) allow(R10K::Git).to receive_message_chain(:cache, :generate).and_return(cache) allow_any_instance_of(R10K::Source::Git).to receive(:environment_names).and_return([R10K::Environment::Name.new('first', {})]) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| # For this test we want to have realistic Modules and access to # their internal Repos to validate the sync. Unfortunately, to # do so we do some invasive mocking, effectively implementing # our own R10K::ModuleLoader::Puppetfile#load. We directly update # the Environment's internal ModuleLoader and then call `load` on # it so it will create the correct loaded_content. loader = environment.loader allow(loader).to receive(:puppetfile_content).and_return('') expect(loader).to receive(:load) do loader.add_module('mod1', { git: 'https://remote', default_branch: 'main'}) loader.add_module('mod2', { git: 'https://remote', default_branch: 'main'}) loader.add_module('mod3', { git: 'https://remote', default_branch: 'main'}) loaded_content = loader.load! loaded_content[:modules].each do |mod| if ['mod1', 'mod2'].include?(mod.name) expect(mod.should_sync?).to be(true) else expect(mod.should_sync?).to be(false) end expect(mod).to receive(:sync).and_call_original end loaded_content end original.call(environment, &block) end expect(repo).to receive(:sync).twice subject.call end end describe 'with environments' do subject { described_class.new({ config: '/some/nonexistent/path', environment: 'first' }, ['mod1'], {}) } let(:cache) { instance_double("R10K::Git::Cache", 'sanitized_dirname' => 'foo', 'cached?' => true, 'sync' => true) } let(:repo) { instance_double("R10K::Git::StatefulRepository", cache: cache, resolve: 'main', tracked_paths: []) } it 'only syncs to the given environments' do allow(R10K::Deployment).to receive(:new).and_wrap_original do |original, settings, &block| original.call(settings.merge({ sources: { main: { remote: 'https://not/a/remote', basedir: '/not/a/basedir', type: 'git' } } })) end allow(R10K::Git::StatefulRepository).to receive(:new).and_return(repo) allow(R10K::Git).to receive_message_chain(:cache, :generate).and_return(cache) allow_any_instance_of(R10K::Source::Git).to receive(:environment_names).and_return([R10K::Environment::Name.new('first', {}), R10K::Environment::Name.new('second', {})]) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| loader = environment.loader if environment.name == 'first' # For this test we want to have realistic Modules and access to # their internal Repos to validate the sync. Unfortunately, to # do so we do some invasive mocking, effectively implementing # our own R10K::ModuleLoader::Puppetfile#load. We directly update # the Environment's internal ModuleLoader and then call `load` on # it so it will create the correct loaded_content. allow(loader).to receive(:puppetfile_content).and_return('') expect(loader).to receive(:load) do loader.add_module('mod1', { git: 'https://remote', default_branch: 'main'}) loader.add_module('mod2', { git: 'https://remote', default_branch: 'main'}) loaded_content = loader.load! loaded_content[:modules].each do |mod| if mod.name == 'mod1' expect(mod.should_sync?).to be(true) else expect(mod.should_sync?).to be(false) end expect(mod).to receive(:sync).and_call_original end loaded_content end else expect(loader).not_to receive(:load) end original.call(environment, &block) end.twice expect(repo).to receive(:sync).once expect(subject.logger).to receive(:debug1).with(/Updating modules.*in environment.*first/i) expect(subject.logger).to receive(:debug1).with(/skipping environment.*second/i) subject.call end end describe "postrun" do let(:mock_config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => '/some/nonexistent/path/control', :environments => %w[first second third], } } ) end context "basic postrun hook" do let(:settings) { { postrun: ["/path/to/executable", "arg1", "arg2"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new({config: "/some/nonexistent/path" }, ['mod1'], settings) end it "is passed to Subprocess" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/path/to/executable", "arg1", "arg2"]). and_return(mock_subprocess) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| modified = subject.instance_variable_get(:@modified_envs) << environment subject.instance_variable_set(:modified_envs, modified) end.exactly(3).times subject.call end end context "supports environments" do context "with one environment" do let(:settings) { { postrun: ["/generate/types/wrapper", "$modifiedenvs"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new({ config: '/some/nonexistent/path', environment: 'first' }, ['mod1'], settings) end it "properly substitutes the environment" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/generate/types/wrapper", "first"]). and_return(mock_subprocess) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| if environment.name == 'first' expect(environment).to receive(:deploy).and_return(['first']) end original.call(environment, &block) end.exactly(3).times subject.call end end context "with all environments" do let(:settings) { { postrun: ["/generate/types/wrapper", "$modifiedenvs"] } } let(:deployment) { R10K::Deployment.new(mock_config.merge(settings)) } before do expect(R10K::Deployment).to receive(:new).and_return(deployment) end subject do described_class.new({ config: '/some/nonexistent/path' }, ['mod1'], settings) end it "properly substitutes the environment where modules were deployed" do mock_subprocess = double allow(mock_subprocess).to receive(:logger=) expect(mock_subprocess).to receive(:execute) expect(R10K::Util::Subprocess).to receive(:new). with(["/generate/types/wrapper", "first third"]). and_return(mock_subprocess) expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| if ['first', 'third'].include?(environment.name) expect(environment).to receive(:deploy).and_return(['mod1']) end original.call(environment, &block) end.exactly(3).times subject.call end it "does not execute the command if no envs had the module" do expect(R10K::Util::Subprocess).not_to receive(:new) mock_mod2 = double('mock_mod', name: 'mod2') expect(subject).to receive(:visit_environment).and_wrap_original do |original, environment, &block| expect(environment).to receive(:deploy).and_return([]) original.call(environment, &block) end.exactly(3).times subject.call end end end end end r10k-4.0.2/spec/unit/action/puppetfile/000077500000000000000000000000001460033767200176455ustar00rootroot00000000000000r10k-4.0.2/spec/unit/action/puppetfile/check_spec.rb000066400000000000000000000051751460033767200222710ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/puppetfile/check' describe R10K::Action::Puppetfile::Check do let(:default_opts) { {root: "/some/nonexistent/path"} } let(:modules) do [R10K::Module::Git.new("author/modname", "/some/nonexistent/path/modname", {git: 'https://my/git/remote', branch: 'main'})] end let(:loader) { instance_double('R10K::ModuleLoader::Puppetfile', :load! => {}, :modules => modules) } def checker(opts = {}, argv = [], settings = {}) opts = default_opts.merge(opts) return described_class.new(opts, argv, settings) end before(:each) do allow(R10K::ModuleLoader::Puppetfile). to receive(:new). with({ basedir: "/some/nonexistent/path", overrides: {modules: {default_ref: nil}} }).and_return(loader) end it_behaves_like "a puppetfile action" describe 'when no ref is defined' do let(:modules) do [R10K::Module::Git.new("author/modname", "/some/nonexistent/path/modname", {git: 'https://my/git/remote'})] end it 'returns an error message' do expect($stderr).to receive(:puts).with(/no ref defined/i) checker.call end end describe 'when a default_ref is defined' do let(:modules) do [R10K::Module::Git.new("author/modname", "/some/nonexistent/path/modname", {git: 'https://my/git/remote', overrides: {modules: {default_ref: 'main'}}})] end it 'is valid syntax' do expect($stderr).to receive(:puts).with(/Syntax OK/i) checker.call end end it "prints 'Syntax OK' when the Puppetfile syntax could be validated" do expect($stderr).to receive(:puts).with("Syntax OK") checker.call end it "prints an error message when validating the Puppetfile syntax raised an error" do allow(loader).to receive(:load!).and_raise(R10K::Error.new("Boom!")) allow(R10K::Errors::Formatting). to receive(:format_exception). with(instance_of(R10K::Error), anything). and_return("Formatted error message") expect($stderr).to receive(:puts).with("Formatted error message") checker.call end it "respects --puppetfile option" do allow($stderr).to receive(:puts) expect(R10K::ModuleLoader::Puppetfile). to receive(:new). with({ basedir: "/some/nonexistent/path", overrides: {modules: {default_ref: nil}}, puppetfile: "/custom/puppetfile/path" }).and_return(loader) checker({puppetfile: "/custom/puppetfile/path"}).call end end r10k-4.0.2/spec/unit/action/puppetfile/cri_runner_spec.rb000066400000000000000000000024361460033767200233570ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/puppetfile/cri_runner' describe R10K::Action::Puppetfile::CriRunner do let(:action_class) do Class.new do attr_reader :opts attr_reader :argv def initialize(opts, argv) @opts = opts @argv = argv end def call @opts[:runok] end end end subject(:cri_runner) { described_class.wrap(action_class) } let(:opts) { {:value => :yep} } let(:argv) { %w[value yes] } describe "handling options" do it "sets the root to the current wd" do expect(cri_runner).to receive(:wd).and_return('/some/nonexistent') expect(cri_runner.handle_opts({})).to include(:root => '/some/nonexistent') end describe "for the moduledir" do it "sets the option from the cli option if given" do opts = {:moduledir => '/some/other/nonexistent/modules'} expect(cri_runner.handle_opts(opts)).to include(:moduledir => '/some/other/nonexistent/modules') end end describe "for the puppetfile path" do it "sets the option from the cli option if given" do opts = {:puppetfile => '/some/other/nonexistent/modules'} expect(cri_runner.handle_opts(opts)).to include(:puppetfile => '/some/other/nonexistent/modules') end end end end r10k-4.0.2/spec/unit/action/puppetfile/install_spec.rb000066400000000000000000000074201460033767200226550ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/puppetfile/install' describe R10K::Action::Puppetfile::Install do let(:default_opts) { { root: "/some/nonexistent/path" } } let(:loader) { R10K::ModuleLoader::Puppetfile.new( basedir: '/some/nonexistent/path', overrides: {force: false}) } def installer(opts = {}, argv = [], settings = {}) opts = default_opts.merge(opts) return described_class.new(opts, argv, settings) end before(:each) do allow(loader).to receive(:load!).and_return({}) allow(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", overrides: {force: false, modules: {default_ref: nil}}}). and_return(loader) end it_behaves_like "a puppetfile install action" describe "installing modules" do let(:modules) do (1..4).map do |idx| R10K::Module::Base.new("author/modname#{idx}", "/some/nonexistent/path/modname#{idx}", {}) end end before do allow(loader).to receive(:load!).and_return({ modules: modules, managed_directories: [], desired_contents: [], purge_exclusions: [] }) end it "syncs each module in the Puppetfile" do modules.each { |m| expect(m).to receive(:sync) } expect(installer.call).to eq true end it "returns false if a module failed to install" do modules[0..2].each { |m| expect(m).to receive(:sync) } expect(modules[3]).to receive(:sync).and_raise expect(installer.call).to eq false end it "reads in the default for git refs" do modules.each { |m| expect(m).to receive(:sync) } expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", overrides: {force: false, modules: {default_ref: 'main'}}}). and_return(loader) installer({}, [], {git: {default_ref: 'main'}}).call end end describe "purging" do it "purges the moduledir after installation" do allow(loader).to receive(:load!).and_return({ modules: [], desired_contents: [ 'root/foo' ], managed_directories: [ 'root' ], purge_exclusions: [ 'root/**/**.rb' ] }) mock_cleaner = double("cleaner") expect(R10K::Util::Cleaner).to receive(:new). with(["root"], ["root/foo"], ["root/**/**.rb"]). and_return(mock_cleaner) expect(mock_cleaner).to receive(:purge!) installer.call end end describe "using custom paths" do it "can use a custom moduledir path" do expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", overrides: {force: false, modules: {default_ref: nil}}, puppetfile: "/some/other/path/Puppetfile"}). and_return(loader) installer({puppetfile: "/some/other/path/Puppetfile"}).call expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", overrides: {force: false, modules: {default_ref: nil}}, moduledir: "/some/other/path/site-modules"}). and_return(loader) installer({moduledir: "/some/other/path/site-modules"}).call end end describe "forcing to overwrite local changes" do it "can use the force overwrite option" do allow(loader).to receive(:load!).and_return({ modules: [] }) subject = described_class.new({root: "/some/nonexistent/path", force: true}, [], {}) expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", overrides: {force: true, modules: {default_ref: nil}}}). and_return(loader) subject.call end end end r10k-4.0.2/spec/unit/action/puppetfile/purge_spec.rb000066400000000000000000000034701460033767200223320ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/puppetfile/purge' describe R10K::Action::Puppetfile::Purge do let(:default_opts) { {root: "/some/nonexistent/path"} } let(:puppetfile) do instance_double('R10K::ModuleLoader::Puppetfile', :load! => { :modules => %w{mod}, :managed_directories => %w{foo}, :desired_contents => %w{bar}, :purge_exclusions => %w{baz} }) end def purger(opts = {}, argv = [], settings = {}) opts = default_opts.merge(opts) return described_class.new(opts, argv, settings) end before(:each) do allow(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path"}). and_return(puppetfile) end it_behaves_like "a puppetfile action" it "purges unmanaged entries in the Puppetfile moduledir" do mock_cleaner = double("cleaner") expect(R10K::Util::Cleaner).to receive(:new). with(["foo"], ["bar"], ["baz"]). and_return(mock_cleaner) expect(mock_cleaner).to receive(:purge!) purger.call end describe "using custom paths" do it "can use a custom puppetfile path" do expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", puppetfile: "/some/other/path/Puppetfile"}). and_return(puppetfile) purger({puppetfile: "/some/other/path/Puppetfile"}).call end it "can use a custom moduledir path" do expect(R10K::ModuleLoader::Puppetfile).to receive(:new). with({basedir: "/some/nonexistent/path", moduledir: "/some/other/path/site-modules"}). and_return(puppetfile) purger({moduledir: "/some/other/path/site-modules"}).call end end end r10k-4.0.2/spec/unit/action/runner_spec.rb000066400000000000000000000345541460033767200203530ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/base' require 'puppet_forge/connection' require 'r10k/action/runner' describe R10K::Action::Runner do let(:action_class) do Class.new do attr_reader :opts attr_reader :argv attr_reader :settings def initialize(opts, argv, settings = {}) @opts = opts @argv = argv @settings = settings end def call @argv.map(&:upcase) end end end subject(:runner) { described_class.new({:opts => :yep}, %w[args yes], action_class) } before(:each) do expect(runner.logger).not_to receive(:error) end describe "instantiating the wrapped class" do it "creates an instance of the class" do expect(runner.instance).to be_a_kind_of action_class end it "passes the opts and argv to the instance" do expect(runner.instance.opts).to eq(:opts => :yep) expect(runner.instance.argv).to eq(%w[args yes]) end it "strips out options that the runner handles" do runner = described_class.new({:opts => :yep, :loglevel => 'FATAL'}, %w[args yes], action_class) expect(runner.instance.opts).to eq(:opts => :yep) end end describe "calling" do it "configures logging" do expect(runner).to receive(:setup_logging) runner.call end it "configures settings" do expect(runner).to receive(:setup_settings) runner.call end it "configures forge authorization" do expect(runner).to receive(:setup_authorization) runner.call end it "returns the result of the wrapped class #call method" do expect(runner.call).to eq %w[ARGS YES] end end describe "configuring settings" do subject(:runner) { described_class.new(options, %w[args yes], action_class) } let(:global_settings) { R10K::Settings.global_settings } before(:each) do expect(R10K::Settings).to receive(:global_settings).and_return(global_settings) allow(File).to receive(:executable?).and_return(true) end opts = { cachedir: nil, puppet_path: :deploy, generate_types: :deploy, } opts.each do |opt, conf_path| context "with #{opt} config setting" do let(:options) { { config: "spec/fixtures/unit/action/r10k_#{opt}.yaml" } } context "when not overridden" do it "uses the config value" do override = { "#{opt}": "/config_#{opt}" } overrides = if conf_path.nil? override else { "#{conf_path}": override } end expect(global_settings).to receive(:evaluate).with(hash_including(overrides)).and_call_original runner.call end end context "when overridden" do let(:options) { super().merge("#{opt.to_s.sub('_','-')}": "/overridden_#{opt}") } it "uses the overridden value" do override = { "#{opt}": "/overridden_#{opt}" } overrides = if conf_path.nil? override else { "#{conf_path}": override } end expect(global_settings).to receive(:evaluate).with(hash_including(overrides)).and_call_original runner.call end end end context "with complete config" do let(:options) { { config: "spec/fixtures/unit/action/r10k.yaml" } } let(:config) do config = {} opts.each do |o, path| if path.nil? config[o] = "/config_#{o}" else config[path] ||= {} config[path][o] = "/config_#{o}" end end config end context "when not overridden" do it "uses the config value" do expect(global_settings).to receive(:evaluate).with(config).and_call_original runner.call end end context "when overridden" do let(:options) { super().merge("#{opt.to_s.sub('_','-')}": "/overridden_#{opt}") } it "uses the overridden value" do with_overrides = config if conf_path.nil? with_overrides[opt] = "/overridden_#{opt}" else with_overrides[conf_path][opt] = "/overridden_#{opt}" end expect(global_settings).to receive(:evaluate).with(with_overrides).and_call_original runner.call end end end end end describe "configuring logging" do before(:each) do R10K::Logging.outputters.clear end it "sets the log level if :loglevel is provided" do runner = described_class.new({:opts => :yep, :loglevel => 'FATAL'}, %w[args yes], action_class) # The settings/overrides system causes the level to be set twice expect(R10K::Logging).to receive(:level=).with('FATAL').twice runner.call end # The logging fixture tests require a platform with syslog if !R10K::Util::Platform.windows? it "sets the log level if the logging.level setting is provided" do runner = described_class.new({ opts: :yep, config: 'spec/fixtures/unit/action/r10k_logging.yaml'}, %w[args yes], action_class) expect(R10K::Logging).to receive(:level=).with('FATAL') runner.call end it "sets the outputters if logging.outputs is provided" do runner = described_class.new({ opts: :yep, config: 'spec/fixtures/unit/action/r10k_logging.yaml' }, %w[args yes], action_class) expect(R10K::Logging).to receive(:add_outputters).with([ { type: 'file', parameters: { filename: 'r10k.log' } }, { type: 'syslog' } ]) runner.call end it "disables the default outputter if the logging.disable_default_stderr setting is provided" do runner = described_class.new({ opts: :yep, config: 'spec/fixtures/unit/action/r10k_logging.yaml'}, %w[args yes], action_class) expect(R10K::Logging).to receive(:disable_default_stderr=).with(true) runner.call end it "adds additional log outputs if the logging.outputs setting is provided" do runner = described_class.new({ opts: :yep, config: 'spec/fixtures/unit/action/r10k_logging.yaml'}, %w[args yes], action_class) runner.call expect(R10K::Logging.outputters).to_not be_empty end it "disables the default output if the logging.disable_default_stderr setting is provided" do runner = described_class.new({ opts: :yep, config: 'spec/fixtures/unit/action/r10k_logging.yaml'}, %w[args yes], action_class) runner.call expect(runner.logger.outputters).to satisfy { |outputs| outputs.any? { |output| output.is_a?(R10K::Logging::TerminalOutputter) && output.level == Log4r::OFF } } end end it "doesn't add additional log outputs if the logging.outputs setting is not provided" do runner.call expect(R10K::Logging.outputters).to be_empty end it "includes the default stderr outputter" do runner.call expect(runner.logger.outputters).to satisfy { |outputs| outputs.any? { |output| output.is_a? R10K::Logging::TerminalOutputter } } end it "does not modify the loglevel if :loglevel is not provided" do expect(R10K::Logging).to_not receive(:level=) runner.call end end describe "configuring github app credentials" do it 'errors if app id is passed without ssl key' do runner = described_class.new( { 'github-app-id': '/nonexistent', }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Must specify both id and SSL private key/) end it 'errors if ssl key is passed without app id' do runner = described_class.new( { 'github-app-key': '/nonexistent', }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Must specify both id and SSL private key/) end it 'errors if both app id and token paths are passed' do runner = described_class.new( { 'github-app-id': '/nonexistent', 'oauth-token': '/also/fake' }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Cannot specify both/) end it 'errors if both ssl key and token paths are passed' do runner = described_class.new( { 'github-app-key': '/nonexistent', 'oauth-token': '/also/fake' }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Cannot specify both/) end it 'errors if both ssl key and ssh key paths are passed' do runner = described_class.new( { 'github-app-key': '/nonexistent', 'private-key': '/also/fake' }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Cannot specify both/) end it 'errors if both app id and ssh key are passed' do runner = described_class.new( { 'github-app-id': '/nonexistent', 'private-key': '/also/fake' }, %w[args yes], action_class ) expect{ runner.call }.to raise_error(R10K::Error, /Cannot specify both/) end it 'saves the parameters in settings hash' do runner = described_class.new( { 'github-app-id': '123456', 'github-app-key': '/my/ssl/key', 'github-app-ttl': '600' }, %w[args yes], action_class ) runner.call expect(runner.instance.settings[:git][:github_app_id]).to eq('123456') expect(runner.instance.settings[:git][:github_app_key]).to eq('/my/ssl/key') expect(runner.instance.settings[:git][:github_app_ttl]).to eq('600') end it 'saves the parameters in settings hash without ttl and uses its default value' do runner = described_class.new( { 'github-app-id': '123456', 'github-app-key': '/my/ssl/key', }, %w[args yes], action_class ) runner.call expect(runner.instance.settings[:git][:github_app_id]).to eq('123456') expect(runner.instance.settings[:git][:github_app_key]).to eq('/my/ssl/key') expect(runner.instance.settings[:git][:github_app_ttl]).to eq('120') end end describe "configuring git credentials" do it 'errors if both token and key paths are passed' do runner = described_class.new({ 'oauth-token': '/nonexistent', 'private-key': '/also/fake' }, %w[args yes], action_class) expect{ runner.call }.to raise_error(R10K::Error, /Cannot specify both/) end it 'saves the sshkey path in settings hash' do runner = described_class.new({ 'private-key': '/my/ssh/key' }, %w[args yes], action_class) runner.call expect(runner.instance.settings[:git][:private_key]).to eq('/my/ssh/key') end it 'overrides per-repo sshkey in settings hash' do runner = described_class.new({ config: "spec/fixtures/unit/action/r10k_creds.yaml", 'private-key': '/my/ssh/key' }, %w[args yes], action_class) runner.call expect(runner.instance.settings[:git][:private_key]).to eq('/my/ssh/key') expect(runner.instance.settings[:git][:repositories].count).to eq(2) runner.instance.settings[:git][:repositories].each do |repo_settings| expect(repo_settings[:private_key]).to eq('/my/ssh/key') end end it 'saves the token path in settings hash' do runner = described_class.new({ 'oauth-token': '/my/token/path' }, %w[args yes], action_class) runner.call expect(runner.instance.settings[:git][:oauth_token]).to eq('/my/token/path') end it 'overrides per-repo oauth token in settings hash' do runner = described_class.new({ config: "spec/fixtures/unit/action/r10k_creds.yaml", 'oauth-token': '/my/token' }, %w[args yes], action_class) runner.call expect(runner.instance.settings[:git][:oauth_token]).to eq('/my/token') expect(runner.instance.settings[:git][:repositories].count).to eq(2) runner.instance.settings[:git][:repositories].each do |repo_settings| expect(repo_settings[:oauth_token]).to eq('/my/token') end end end describe "configuration authorization" do context "settings auth" do it "sets the configured token as the forge authorization header" do options = { config: "spec/fixtures/unit/action/r10k_forge_auth.yaml" } runner = described_class.new(options, %w[args yes], action_class) expect(PuppetForge).to receive(:host=).with('http://private-forge.com') expect(PuppetForge::Connection).to receive(:authorization=).with('faketoken') expect(PuppetForge::Connection).to receive(:authorization).and_return('faketoken') expect(R10K::Util::License).not_to receive(:load) runner.setup_settings runner.setup_authorization end end context "license auth" do context "when license is not present" do before(:each) do expect(R10K::Util::License).to receive(:load).and_return(nil) end it "does not set authorization header on connection class" do expect(PuppetForge::Connection).not_to receive(:authorization=) runner.setup_authorization end end context "when license is present but invalid" do before(:each) do expect(R10K::Util::License).to receive(:load).and_raise(R10K::Error.new('invalid license')) end it "issues warning to logger" do expect(runner.logger).to receive(:warn).with(/invalid license/) runner.setup_authorization end it "does not set authorization header on connection class" do expect(PuppetForge::Connection).not_to receive(:authorization=) runner.setup_authorization end end context "when license is present and valid" do before(:each) do mock_license = double('pe-license', :authorization_token => 'test token') expect(R10K::Util::License).to receive(:load).and_return(mock_license) end it "sets authorization header on connection class" do expect(PuppetForge::Connection).to receive(:authorization=).with('test token') runner.setup_authorization end end end end end r10k-4.0.2/spec/unit/action/visitor_spec.rb000066400000000000000000000020161460033767200205250ustar00rootroot00000000000000require 'spec_helper' require 'r10k/action/visitor' require 'r10k/logging' describe R10K::Action::Visitor do let(:visitor_class) do Class.new do include R10K::Action::Visitor include R10K::Logging attr_accessor :trace def visit_error(other) raise ArgumentError, "no soup for you" end end end subject { visitor_class.new } it "dispatches visit invocations to the type specific method" do expect(subject).to receive(:visit_sym).with(:hi) subject.visit(:sym, :hi) end describe "when a visit_ method raises an error" do [true, false].each do |trace| msg = trace ? "a" : "no" it "logs the error with #{msg} backtrace when trace is #{trace}" do subject.trace = trace expect(R10K::Errors::Formatting).to( receive(:format_exception).with(instance_of(ArgumentError), trace) ).and_return("errmsg") expect(subject.logger).to receive(:error).with('errmsg') subject.visit(:error, :hi) end end end end r10k-4.0.2/spec/unit/cli_spec.rb000066400000000000000000000002721460033767200163220ustar00rootroot00000000000000require 'spec_helper' RSpec.describe 'basic cli sanity check' do it 'can load the R10K::CLI namespace' do expect { require 'r10k/cli' }.not_to raise_exception end end r10k-4.0.2/spec/unit/deployment/000077500000000000000000000000001460033767200163735ustar00rootroot00000000000000r10k-4.0.2/spec/unit/deployment/config_spec.rb000066400000000000000000000022671460033767200212060ustar00rootroot00000000000000require 'spec_helper' describe R10K::Deployment::Config do let(:loader) { instance_double('R10K::Settings::Loader') } let(:initializer) { instance_double('R10K::Initializers::GlobalInitializer') } describe "applying global settings" do before do expect(R10K::Settings::Loader).to receive(:new).and_return(loader) expect(R10K::Initializers::GlobalInitializer).to receive(:new).and_return(initializer) end it 'runs application initialization' do config = instance_double('Hash') allow(loader).to receive(:read).and_return({}) expect(initializer).to receive(:call) described_class.new('some/path') end end describe "overriding settings" do before do expect(R10K::Settings::Loader).to receive(:new).and_return(loader) end it 'allows settings from config to be overridden' do allow(loader).to receive(:read).and_return({:cachedir => 'fromfile'}) expect(R10K::Initializers::GlobalInitializer).to receive(:new).with(hash_including(:cachedir => 'fromoverride')).and_return(initializer) expect(initializer).to receive(:call) described_class.new('some/path', {:cachedir => 'fromoverride'}) end end end r10k-4.0.2/spec/unit/deployment_spec.rb000066400000000000000000000111331460033767200177310ustar00rootroot00000000000000require 'spec_helper' require 'r10k/deployment' require 'tmpdir' describe R10K::Deployment do let(:confdir) { Dir.mktmpdir } let(:config) do R10K::Deployment::MockConfig.new( :sources => { :control => { :type => :mock, :basedir => File.join(confdir, 'environments'), :environments => %w[first second third], }, :hiera => { :type => :mock, :basedir => File.join(confdir, 'hiera'), :environments => %w[fourth fifth sixth], } } ) end subject(:deployment) { described_class.new(config) } let(:control) { deployment.sources.find { |source| source.name == :control } } let(:hiera) { deployment.sources.find { |source| source.name == :hiera } } describe "loading" do describe "sources" do it "creates a source for each key in the ':sources' config entry" do expect(control.basedir).to eq(File.join(confdir, 'environments')) expect(hiera.basedir).to eq(File.join(confdir, 'hiera')) end end describe "loading environments" do it "loads environments from each source" do %w[first second third fourth fifth sixth].each do |env| expect(deployment.environments.map(&:name)).to include(env) end end end end describe "preloading" do it "invokes #preload! on each source" do deployment.sources.each do |source| expect(source).to receive(:preload!) end deployment.preload! end end describe "paths" do it "retrieves the path for each source" do expect(deployment.paths).to include(File.join(confdir, 'environments')) expect(deployment.paths).to include(File.join(confdir, 'hiera')) end end describe "paths and sources" do it "retrieves the path for each source" do p_a_s = deployment.paths_and_sources expect(p_a_s[File.join(confdir, 'environments')]).to eq([control]) expect(p_a_s[File.join(confdir, 'hiera')]).to eq([hiera]) end end describe "purging" do it "purges each managed directory" do env_basedir = double("basedir environments") hiera_basedir = double("basedir hiera") expect(env_basedir).to receive(:purge!) expect(hiera_basedir).to receive(:purge!) expect(R10K::Util::Basedir).to receive(:new).with(File.join(confdir, 'environments'), [control]).and_return(env_basedir) expect(R10K::Util::Basedir).to receive(:new).with(File.join(confdir, 'hiera'), [hiera]).and_return(hiera_basedir) deployment.purge! end end describe "accepting a visitor" do it "passes itself to the visitor" do visitor = spy('visitor') expect(visitor).to receive(:visit).with(:deployment, subject) subject.accept(visitor) end it "passes the visitor to each environment if the visitor yields" do visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :deployment expect(other).to eq subject block.call end source1 = spy('source') expect(source1).to receive(:accept).with(visitor) source2 = spy('source') expect(source2).to receive(:accept).with(visitor) expect(subject).to receive(:sources).and_return([source1, source2]) subject.accept(visitor) end end end describe R10K::Deployment, "with environment collisions" do let(:confdir) { Dir.mktmpdir } let(:config) do R10K::Deployment::MockConfig.new( :sources => { :s1 => { :type => :mock, :basedir => File.join(confdir, 'environments'), :environments => %w[first second third], }, :s2 => { :type => :mock, :basedir => File.join(confdir, 'environments'), :environments => %w[third fourth fifth], } } ) end subject(:deployment) { described_class.new(config) } it "raises an error when validating" do expect { deployment.validate! }.to raise_error(R10K::Error, /Environment collision at .* between s\d:third and s\d:third/) end end describe R10K::Deployment, "checking the 'sources' key" do { "when missing" => {}, "when empty" => {:sources => []}, }.each_pair do |desc, config_hash| describe desc do let(:config) { R10K::Deployment::MockConfig.new(config_hash) } subject(:deployment) { described_class.new(config) } it "raises an error when enumerating sources" do expect { deployment.sources }.to raise_error(R10K::Error, "Unable to load sources; the supplied configuration does not define the 'sources' key") end end end end r10k-4.0.2/spec/unit/environment/000077500000000000000000000000001460033767200165575ustar00rootroot00000000000000r10k-4.0.2/spec/unit/environment/base_spec.rb000066400000000000000000000073231460033767200210350ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::Base do let(:basepath) { '/some/imaginary/path' } let(:envname) { 'env_name' } let(:path) { File.join(basepath, envname) } subject(:environment) { described_class.new('envname', basepath, envname, {}) } it "can return the fully qualified path" do expect(environment.path).to eq(Pathname.new(path)) end it "raises an exception when #sync is called" do expect { environment.sync }.to raise_error(NotImplementedError) end describe "accepting a visitor" do it "passes itself to the visitor" do visitor = spy('visitor') expect(visitor).to receive(:visit).with(:environment, subject) subject.accept(visitor) end it "passes the visitor to the puppetfile if the visitor yields" do visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :environment expect(other).to eq subject block.call end pf = spy('puppetfile') expect(pf).to receive(:accept).with(visitor) expect(subject).to receive(:puppetfile).and_return(pf) subject.accept(visitor) end end describe "#whitelist" do let(:whitelist) do ['**/*.xpp', 'custom', '*.tmp'] end it "combines given patterns with full_path to env" do expect(subject.whitelist(whitelist)).to all(start_with(subject.path.to_s)) end end describe "#purge_exclusions" do let(:mock_env) { instance_double("R10K::Environment::Base") } let(:mock_puppetfile) { instance_double("R10K::Puppetfile", :environment= => true, :environment => mock_env) } let(:loader) do instance_double("R10K::ModuleLoader::Puppetfile", :environment= => nil, :load => { :modules => @modules, :managed_directories => @managed_dirs, :desired_contents => @desired_contents, :purge_exclusions => @purge_ex }) end before(:each) do @modules = [] @managed_dirs = [] @desired_contents = [] @purge_exclusions = [] end it "excludes .r10k-deploy.json" do allow(R10K::ModuleLoader::Puppetfile).to receive(:new).and_return(loader) subject.deploy expect(subject.purge_exclusions).to include(/r10k-deploy\.json/) end it "excludes puppetfile managed directories" do @managed_dirs = [ '/some/imaginary/path/env_name/modules', '/some/imaginary/path/env_name/data', ] allow(R10K::ModuleLoader::Puppetfile).to receive(:new).and_return(loader) subject.deploy exclusions = subject.purge_exclusions @managed_dirs.each do |dir| expect(exclusions).to include(dir) end end describe "puppetfile desired contents" do before(:each) do @desired_contents = [ 'modules/apache', 'data/local/site' ].collect do |c| File.join(path, c) end allow(File).to receive(:directory?).and_return true allow(R10K::ModuleLoader::Puppetfile).to receive(:new).and_return(loader) subject.deploy end it "excludes desired directory contents with glob" do exclusions = subject.purge_exclusions expect(exclusions).to include(/#{Regexp.escape(File.join('apache', '**', '*'))}$/) expect(exclusions).to include(/#{Regexp.escape(File.join('site', '**', '*'))}$/) end it "excludes ancestors of desired directories" do exclusions = subject.purge_exclusions expect(exclusions).to include(/modules$/) expect(exclusions).to include(/data\/local$/) expect(exclusions).to include(/data$/) end end end end r10k-4.0.2/spec/unit/environment/git_spec.rb000066400000000000000000000063521460033767200207070ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::Git do subject do described_class.new( 'myenv', '/some/nonexistent/environmentdir', 'gitref', { :remote => 'https://git-server.site/my-repo.git', :ref => 'd026ea677116424d2968edb9cee8cbc24d09322b', } ) end describe "initializing" do subject do described_class.new('name', '/dir', 'ref', { :remote => 'url', :ref => 'value', :puppetfile_name => 'Puppetfile', :moduledir => 'modules', :modules => { }, }) end it "accepts valid base class initialization arguments" do expect(subject.name).to eq 'name' end end describe "storing attributes" do it "can return the environment name" do expect(subject.name).to eq 'myenv' end it "can return the environment basedir" do expect(subject.basedir).to eq '/some/nonexistent/environmentdir' end it "can return the environment dirname" do expect(subject.dirname).to eq 'gitref' end it "can return the environment remote" do expect(subject.remote).to eq 'https://git-server.site/my-repo.git' end it "can return the environment ref" do expect(subject.ref).to eq 'd026ea677116424d2968edb9cee8cbc24d09322b' end end describe "synchronizing the environment" do it "syncs the git repository" do expect(subject.repo).to receive(:sync) subject.sync end end describe "generating a puppetfile for the environment" do let(:puppetfile) { subject.puppetfile } it "creates a puppetfile at the full path to the environment" do expect(puppetfile.basedir).to eq '/some/nonexistent/environmentdir/gitref' end it "sets the moduledir to 'modules' relative to the environment path" do expect(puppetfile.moduledir).to eq '/some/nonexistent/environmentdir/gitref/modules' end it "sets the puppetfile path to 'Puppetfile' relative to the environment path" do expect(puppetfile.puppetfile_path).to eq '/some/nonexistent/environmentdir/gitref/Puppetfile' end end describe "enumerating modules" do it "loads the Puppetfile and returns modules in that puppetfile" do loaded = { desired_contents: [], managed_directories: [], purge_exclusions: [] } mod = double('A module', :name => 'dbl') expect(subject.loader).to receive(:load).and_return(loaded.merge(modules: [mod])) expect(subject.modules).to eq([mod]) end end describe "determining the status" do it "delegates to the repo" do expect(subject.repo).to receive(:status).and_return :some_status expect(subject.status).to eq :some_status end end describe "environment signature" do it "returns the git sha of HEAD" do expect(subject.repo).to receive(:head).and_return 'f00b00' expect(subject.signature).to eq 'f00b00' end end describe "info hash" do let(:info_hash) { subject.info } before(:each) do allow(subject.repo).to receive(:head).and_return 'f00b00' end it "includes name and signature" do expect(info_hash.keys).to include :name, :signature expect(info_hash).not_to have_value(nil) end end end r10k-4.0.2/spec/unit/environment/name_spec.rb000066400000000000000000000133341460033767200210420ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment/name' describe R10K::Environment::Name do describe "strip_component" do it "does not modify the given name when no strip_component is given" do bn = described_class.new('myenv', source: 'source', prefix: false) expect(bn.dirname).to eq 'myenv' expect(bn.name).to eq 'myenv' expect(bn.original_name).to eq 'myenv' end it "removes the first occurance of a regex match when a regex is given" do bn = described_class.new('myenv', source: 'source', prefix: false, strip_component: '/env/') expect(bn.dirname).to eq 'my' expect(bn.name).to eq 'my' expect(bn.original_name).to eq 'myenv' end it "does not modify the given name when there is no regex match" do bn = described_class.new('myenv', source: 'source', prefix: false, strip_component: '/bar/') expect(bn.dirname).to eq 'myenv' expect(bn.name).to eq 'myenv' expect(bn.original_name).to eq 'myenv' end it "removes the given name's prefix when it matches strip_component" do bn = described_class.new('env/prod', source: 'source', prefix: false, strip_component: 'env/') expect(bn.dirname).to eq 'prod' expect(bn.name).to eq 'prod' expect(bn.original_name).to eq 'env/prod' end it "raises an error when given an integer" do expect { described_class.new('env/prod', source: 'source', prefix: false, strip_component: 4) }.to raise_error(%r{Improper.*"4"}) end end describe "prefixing" do it "uses the branch name as the dirname when prefixing is off" do bn = described_class.new('mybranch', :source => 'source', :prefix => false) expect(bn.dirname).to eq 'mybranch' expect(bn.name).to eq 'mybranch' expect(bn.original_name).to eq 'mybranch' end it "prepends the source name when prefixing is on" do bn = described_class.new('mybranch', :source => 'source', :prefix => true) expect(bn.dirname).to eq 'source_mybranch' expect(bn.name).to eq 'mybranch' expect(bn.original_name).to eq 'mybranch' end it "prepends the prefix name when prefixing is overridden" do bn = described_class.new('mybranch', {:prefix => "bar", :sourcename => 'foo'}) expect(bn.dirname).to eq 'bar_mybranch' expect(bn.name).to eq 'mybranch' expect(bn.original_name).to eq 'mybranch' end it "uses the branch name as the dirname when prefixing is nil" do bn = described_class.new('mybranch', {:prefix => nil, :sourcename => 'foo'}) expect(bn.dirname).to eq 'mybranch' expect(bn.name).to eq 'mybranch' expect(bn.original_name).to eq 'mybranch' end end describe "determining the validate behavior with :invalid" do [ ['correct_and_warn', {:validate => true, :correct => true}], ['correct', {:validate => false, :correct => true}], ['error', {:validate => true, :correct => false}], ].each do |(setting, outcome)| it "treats #{setting} as #{outcome.inspect}" do bn = described_class.new('mybranch', :source => 'source', :invalid => setting) expect(bn.validate?).to eq outcome[:validate] expect(bn.correct?).to eq outcome[:correct] end end end describe "determining if a branch is a valid environment name" do invalid_cases = [ 'hyphenated-branch', 'dotted.branch', 'slashed/branch', 'at@branch', 'http://branch' ] valid_cases = [ 'my_branchname', 'my_issue_346', ] describe "and validate is false" do invalid_cases.each do |branch| it "is valid if the branch is #{branch}" do bn = described_class.new(branch, {:validate => false}) expect(bn).to be_valid end end valid_cases.each do |branch| it "is valid if the branch is #{branch}" do bn = described_class.new(branch, {:validate => false}) expect(bn).to be_valid end end end describe "and validate is true" do invalid_cases.each do |branch| it "is invalid if the branch is #{branch}" do bn = described_class.new(branch, {:validate => true}) expect(bn).to_not be_valid end end valid_cases.each do |branch| it "is valid if the branch is #{branch}" do bn = described_class.new(branch, {:validate => true}) expect(bn).to be_valid end end end end describe "correcting branch names" do invalid_cases = [ 'hyphenated-branch', 'dotted.branch', 'slashed/branch', 'at@branch', 'http://branch' ] valid_cases = [ 'my_branchname', 'my_issue_346', ] describe "and correct is false" do invalid_cases.each do |branch| it "doesn't modify #{branch}" do bn = described_class.new(branch.dup, {:correct => false}) expect(bn.dirname).to eq branch end end valid_cases.each do |branch| it "doesn't modify #{branch}" do bn = described_class.new(branch.dup, {:correct => false}) expect(bn.dirname).to eq branch end end end describe "and correct is true" do invalid_cases.each do |branch| it "replaces invalid characters in #{branch} with underscores" do bn = described_class.new(branch.dup, {:correct => true}) expect(bn.dirname).to eq branch.gsub(/\W/, '_') expect(bn.name).to eq branch expect(bn.original_name).to eq branch end end valid_cases.each do |branch| it "doesn't modify #{branch}" do bn = described_class.new(branch.dup, {:correct => true}) expect(bn.dirname).to eq branch end end end end end r10k-4.0.2/spec/unit/environment/plain_spec.rb000066400000000000000000000003531460033767200212220ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::Plain do it "initializes successfully" do expect(described_class.new('envname', '/basedir', 'dirname', {})).to be_a_kind_of(described_class) end end r10k-4.0.2/spec/unit/environment/svn_spec.rb000066400000000000000000000112661460033767200207320ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::SVN do subject do described_class.new( 'myenv', '/some/nonexistent/environmentdir', 'svn-dirname', { :remote => 'https://svn-server.site/svn-repo/trunk' } ) end let(:working_dir) { subject.working_dir } describe "initializing" do subject do described_class.new('name', '/dir', 'ref', { :puppetfile_name => 'Puppetfile', }) end it "accepts valid base class initialization arguments" do expect(subject.name).to eq 'name' end end describe "storing attributes" do it "can return the environment name" do expect(subject.name).to eq 'myenv' end it "can return the environment basedir" do expect(subject.basedir).to eq '/some/nonexistent/environmentdir' end it "can return the environment dirname" do expect(subject.dirname).to eq 'svn-dirname' end it "can return the environment remote" do expect(subject.remote).to eq 'https://svn-server.site/svn-repo/trunk' end end describe "synchronizing the environment" do it "checks out the working directory when creating a new environment" do allow(working_dir).to receive(:is_svn?).and_return(false) expect(working_dir).to receive(:checkout) subject.sync end it "updates the working directory when updating an existing environment" do allow(working_dir).to receive(:is_svn?).and_return(true) expect(working_dir).to receive(:update) subject.sync end end describe "generating a puppetfile for the environment" do let(:puppetfile) { subject.puppetfile } it "creates a puppetfile at the full path to the environment" do expect(puppetfile.basedir).to eq '/some/nonexistent/environmentdir/svn-dirname' end it "sets the moduledir to 'modules' relative to the environment path" do expect(puppetfile.moduledir).to eq '/some/nonexistent/environmentdir/svn-dirname/modules' end it "sets the puppetfile path to 'Puppetfile' relative to the environment path" do expect(puppetfile.puppetfile_path).to eq '/some/nonexistent/environmentdir/svn-dirname/Puppetfile' end end describe "enumerating modules" do it "loads the Puppetfile and returns modules in that puppetfile" do loaded = { managed_directories: [], desired_contents: [], purge_exclusions: [] } mod = double('A module', :name => 'dbl') expect(subject.loader).to receive(:load).and_return(loaded.merge(modules: [mod])) expect(subject.modules).to eq([mod]) end end describe "determining the status" do it "is absent if the working directory is absent" do expect(subject.path).to receive(:exist?).and_return(false) expect(subject.status).to eq :absent end it "is mismatched if the working directory is not an SVN repo" do expect(subject.path).to receive(:exist?).and_return(true) expect(working_dir).to receive(:is_svn?).and_return(false) expect(subject.status).to eq :mismatched end it "is mismatched if the working directory remote doesn't match the expected remote" do expect(subject.path).to receive(:exist?).and_return(true) expect(working_dir).to receive(:is_svn?).and_return(true) expect(working_dir).to receive(:url).and_return 'https://svn-server.site/another-svn-repo/trunk' expect(subject.status).to eq :mismatched end it "is outdated when the the working directory has not synced" do expect(subject.path).to receive(:exist?).and_return(true) expect(working_dir).to receive(:is_svn?).and_return(true) expect(working_dir).to receive(:url).and_return 'https://svn-server.site/svn-repo/trunk' expect(subject.status).to eq :outdated end it "is insync when the working directory has been synced" do expect(subject.path).to receive(:exist?).and_return(true) expect(working_dir).to receive(:is_svn?).twice.and_return(true) expect(working_dir).to receive(:url).and_return 'https://svn-server.site/svn-repo/trunk' expect(working_dir).to receive(:update) subject.sync expect(subject.status).to eq :insync end end describe "environment signature" do it "returns the svn revision of the branch" do expect(working_dir).to receive(:revision).and_return '1337' expect(subject.signature).to eq '1337' end end describe "info hash" do let(:info_hash) { subject.info } before(:each) do expect(working_dir).to receive(:revision).and_return '1337' end it "includes name and signature" do expect(info_hash.keys).to include :name, :signature expect(info_hash).not_to have_value(nil) end end end r10k-4.0.2/spec/unit/environment/tarball_spec.rb000066400000000000000000000024211460033767200215360ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::Tarball do let(:tgz_path) do File.expand_path('spec/fixtures/tarball/tarball.tar.gz', PROJECT_ROOT) end let(:checksum) { '36afcfc2378b8235902d6e647fce7479da6898354d620388646c595a1155ed67' } let(:base_params) { { source: tgz_path, version: checksum, modules: { } } } subject { described_class.new('envname', '/some/imaginary/path', 'dirname', base_params) } describe "initializing" do it "accepts valid base class initialization arguments" do expect(subject.name).to eq 'envname' end end describe "storing attributes" do it "can return the environment name" do expect(subject.name).to eq 'envname' end it "can return the environment basedir" do expect(subject.basedir).to eq '/some/imaginary/path' end it "can return the environment dirname" do expect(subject.dirname).to eq 'dirname' end it "can return the environment path" do expect(subject.path.to_s).to eq '/some/imaginary/path/dirname' end it "can return the environment source" do expect(subject.tarball.source).to eq tgz_path end it "can return the environment version" do expect(subject.tarball.checksum).to eq checksum end end end r10k-4.0.2/spec/unit/environment/with_modules_spec.rb000066400000000000000000000103411460033767200226200ustar00rootroot00000000000000require 'spec_helper' require 'r10k/environment' describe R10K::Environment::WithModules do subject do described_class.new( 'release42', '/some/nonexistent/environmentdir', 'prefix_release42', { :type => 'plain', :modules => { 'puppetlabs-stdlib' => { local: true }, 'puppetlabs-concat' => { local: true }, 'puppetlabs-exec' => { local: true }, } }.merge(subject_params) ) end # Default no additional params let(:subject_params) { {} } describe "dealing with module conflicts" do context "with no module conflicts" do it "validates when there are no conflicts" do mod = instance_double('R10K::Module::Base', name: 'nonconflict', origin: :puppetfile) expect(subject.module_conflicts?(mod)).to eq false end end context "with module conflicts and default behavior" do it "does not raise an error" do mod = instance_double('R10K::Module::Base', name: 'stdlib', origin: :puppetfile) expect(subject.logger).to receive(:warn).with(/Puppetfile.*both define.*ignored/i) expect(subject.module_conflicts?(mod)).to eq true end end context "with module conflicts and 'error' behavior" do let(:subject_params) {{ :module_conflicts => 'error' }} it "raises an error" do mod = instance_double('R10K::Module::Base', name: 'stdlib', origin: :puppetfile) expect { subject.module_conflicts?(mod) }.to raise_error(R10K::Error, /Puppetfile.*both define.*/i) end end context "with module conflicts and 'override' behavior" do let(:subject_params) {{ :module_conflicts => 'override' }} it "does not raise an error" do mod = instance_double('R10K::Module::Base', name: 'stdlib', origin: :puppetfile) expect(subject.logger).to receive(:debug).with(/Puppetfile.*both define.*ignored/i) expect(subject.module_conflicts?(mod)).to eq true end end context "with module conflicts and invalid configuration" do let(:subject_params) {{ :module_conflicts => 'batman' }} it "raises an error" do mod = instance_double('R10K::Module::Base', name: 'stdlib', origin: :puppetfile) expect { subject.module_conflicts?(mod) }.to raise_error(R10K::Error, /Unexpected value.*module_conflicts.*/i) end end end describe "modules method" do it "returns the configured modules, and Puppetfile modules" do loaded = { managed_directories: [], desired_contents: [], purge_exclusions: [] } puppetfile_mod = instance_double('R10K::Module::Base', name: 'zebra') expect(subject.loader).to receive(:load).and_return(loaded.merge(modules: [puppetfile_mod])) returned_modules = subject.modules expect(returned_modules.map(&:name).sort).to eq(%w[concat exec stdlib zebra]) end end describe "module options" do let(:subject_params) {{ :modules => { 'hieradata' => { :type => 'git', :source => 'git@git.example.com:site_data.git', :install_path => '' }, 'site_data_2' => { :type => 'git', :source => 'git@git.example.com:site_data.git', :install_path => 'subdir' }, } }} it "should support empty install_path" do modules = subject.modules expect(modules[0].title).to eq 'hieradata' expect(modules[0].path).to eq Pathname.new('/some/nonexistent/environmentdir/prefix_release42/hieradata') end it "should support install_path" do modules = subject.modules expect(modules[1].title).to eq 'site_data_2' expect(modules[1].path).to eq Pathname.new('/some/nonexistent/environmentdir/prefix_release42/subdir/site_data_2') end context "with invalid configuration" do let(:subject_params) {{ :modules => { 'site_data_2' => { :type => 'git', :source => 'git@git.example.com:site_data.git', :install_path => '/absolute_path_outside_of_containing_environment' } } }} it "raises an error" do expect{ subject.modules }.to raise_error(R10K::Error, /Environment cannot.*outside of containing environment.*/i) end end end end r10k-4.0.2/spec/unit/errors/000077500000000000000000000000001460033767200155275ustar00rootroot00000000000000r10k-4.0.2/spec/unit/errors/formatting_spec.rb000066400000000000000000000042471460033767200212470ustar00rootroot00000000000000require 'spec_helper' require 'r10k/errors/formatting' describe R10K::Errors::Formatting do describe "without a nested exception" do let(:exc) do ArgumentError.new("ArgumentError message").tap do |a| a.set_backtrace(%w[/backtrace/line:1 /backtrace/line:2]) end end describe "and without a backtrace" do subject do described_class.format_exception(exc, false) end it "formats the exception with the message" do expect(subject).to eq("ArgumentError message") end end describe "and with a backtrace" do subject do described_class.format_exception(exc, true) end it "formats the exception with the message and backtrace" do expect(subject).to eq([ "ArgumentError message", "/backtrace/line:1", "/backtrace/line:2", ].join("\n")) end end end describe "with a nested exception" do let(:nestee) do ArgumentError.new("ArgumentError message").tap do |a| a.set_backtrace(%w[/backtrace/line:1 /backtrace/line:2]) end end let(:exc) do R10K::Error.wrap(nestee, "R10K::Error message").tap do |r| r.set_backtrace(%w[/another/backtrace/line:1 /another/backtrace/line:2]) end end describe "and without a backtrace" do subject do described_class.format_exception(exc, false) end it "formats the exception with the message and original message" do expect(subject).to eq([ "R10K::Error message", "Original exception:", "ArgumentError message" ].join("\n")) end end describe "and with a backtrace" do subject do described_class.format_exception(exc, true) end it "formats the exception with the message, backtrace, original message, and original backtrace" do expect(subject).to eq([ "R10K::Error message", "/another/backtrace/line:1", "/another/backtrace/line:2", "Original exception:", "ArgumentError message", "/backtrace/line:1", "/backtrace/line:2", ].join("\n")) end end end end r10k-4.0.2/spec/unit/feature_spec.rb000066400000000000000000000033731460033767200172130ustar00rootroot00000000000000require 'spec_helper' require 'r10k/feature' describe R10K::Feature do describe "confining a feature to a library" do it "is available if the library can be loaded" do feature = described_class.new(:r10k, :libraries => 'r10k') expect(feature.available?).to be_truthy end it "is unavailable if the library cannot be loaded" do feature = described_class.new(:squidlibs, :libraries => 'squid/libs') expect(feature.available?).to be_falsey end end describe "confining a feature to a block" do it "is available if the block is true" do feature = described_class.new(:blockfeature) { true } expect(feature.available?).to be_truthy end it "is unavailable if the block is false" do feature = described_class.new(:blockfeature) { false } expect(feature.available?).to be_falsey end end describe "confining a feature to both a block and libraries" do it "is unavailable if the block returns false and libraries are absent" do feature = described_class.new(:nope, :libraries => 'nope/nope') { false } expect(feature.available?).to be_falsey end it "is unavailable if the block returns true and libraries are absent" do feature = described_class.new(:nope, :libraries => 'nope/nope') { true } expect(feature.available?).to be_falsey end it "is unavailable if the block returns false and libraries are present" do feature = described_class.new(:nope, :libraries => 'r10k') { false } expect(feature.available?).to be_falsey end it "is available if the block returns true and libraries are present" do feature = described_class.new(:yep, :libraries => 'r10k') { true } expect(feature.available?).to be_truthy end end end r10k-4.0.2/spec/unit/forge/000077500000000000000000000000001460033767200153155ustar00rootroot00000000000000r10k-4.0.2/spec/unit/forge/module_release_spec.rb000066400000000000000000000213641460033767200216470ustar00rootroot00000000000000require 'spec_helper' require 'r10k/forge/module_release' require 'r10k/util/exec_env' require 'puppet_forge' describe R10K::Forge::ModuleRelease do subject { described_class.new('branan-eight_hundred', '8.0.0') } let(:forge_release_class) { PuppetForge::V3::Release } let(:sha256_digest_class) { Digest::SHA256 } let(:md5_digest_class) { Digest::MD5 } let(:download_path) { instance_double('Pathname') } let(:tarball_cache_path) { instance_double('Pathname') } let(:tarball_cache_root) { instance_double('Pathname') } let(:unpack_path) { instance_double('Pathname') } let(:target_dir) { instance_double('Pathname') } let(:tarball_cache_path) { instance_double('Pathname') } let(:md5_file_path) { instance_double('Pathname') } let(:sha256_file_path) { instance_double('Pathname') } let(:file_lists) { {:valid=>['valid_ex'], :invalid=>[], :symlinks=>['symlink_ex']} } let(:file_contents) { "skeletor's closet" } let(:sha256_digest) { instance_double('Digest::SHA256') } let(:sha256_of_tarball) { "sha256_hash" } let(:md5_digest) { instance_double('Digest::MD5') } let(:md5_of_tarball) { "md5_hash" } let(:good_md5) { md5_of_tarball } let(:good_sha256) { sha256_of_tarball } let(:bad_sha256) { "bad_sha256_hash" } let(:bad_md5) { "bad_md5_hash" } before do subject.download_path = download_path subject.tarball_cache_path = tarball_cache_path subject.tarball_cache_root = tarball_cache_root subject.unpack_path = unpack_path subject.md5_file_path = md5_file_path subject.sha256_file_path = sha256_file_path end context "no cached tarball" do describe '#download' do it "downloads the module from the forge into `download_path`" do expect(tarball_cache_path).to receive(:exist?).and_return(false) expect(subject.forge_release).to receive(:download).with(download_path) allow(FileUtils).to receive(:mkdir_p).with(tarball_cache_root) expect(FileUtils).to receive(:mv).with(download_path, tarball_cache_path) subject.download end end end context "with cached tarball" do describe '#download' do it "does not download a new tarball" do expect(tarball_cache_path).to receive(:exist?).and_return(true) expect(subject.forge_release).not_to receive(:download).with(download_path) subject.download end end end describe '#verify' do it "verifies using the file SHA256, if that exists" do allow(sha256_digest_class).to receive(:file).and_return(sha256_digest) allow(sha256_digest).to receive(:hexdigest).and_return(sha256_of_tarball) allow(sha256_file_path).to receive(:exist?).and_return(true) expect(subject).to receive(:verify_from_file).with(sha256_of_tarball, sha256_file_path) subject.verify end it "verifies using the forge file_sha256, if no sha256 file exists" do allow(sha256_digest_class).to receive(:file).and_return(sha256_digest) allow(sha256_digest).to receive(:hexdigest).and_return(sha256_of_tarball) allow(sha256_file_path).to receive(:exist?).and_return(false) allow(subject.forge_release).to receive(:respond_to?).and_return(true) allow(subject.forge_release).to receive(:sha256_file).and_return(sha256_of_tarball) expect(subject).to receive(:verify_from_forge) subject.verify end it "falls back to md5 verification when not in FIPS mode and no sha256 available" do expect(R10K::Util::Platform).to receive(:fips?).and_return(false) # failed sha256 verification allow(sha256_digest_class).to receive(:file).and_return(sha256_digest) allow(sha256_digest).to receive(:hexdigest).and_return(sha256_of_tarball) allow(sha256_file_path).to receive(:exist?).and_return(false) allow(subject.forge_release).to receive(:respond_to?).and_return(false) allow(subject).to receive(:verify_from_forge) # md5 verification allow(md5_digest_class).to receive(:file).and_return(md5_digest) allow(md5_digest).to receive(:hexdigest).and_return(md5_of_tarball) allow(md5_file_path).to receive(:exist?).and_return(true) expect(subject).to receive(:verify_from_file) subject.verify end it "errors when in FIPS mode and no sha256 is available" do expect(R10K::Util::Platform).to receive(:fips?).and_return(true) allow(sha256_digest_class).to receive(:file).and_return(sha256_digest) allow(sha256_digest).to receive(:hexdigest).and_return(sha256_of_tarball) allow(sha256_file_path).to receive(:exist?).and_return(false) allow(subject.forge_release).to receive(:respond_to?).and_return(false) allow(subject).to receive(:verify_from_forge) expect { subject.verify }.to raise_error(R10K::Error) end end describe '#verify_from_file' do it "does nothing when the checksums match" do expect(File).to receive(:read).with(sha256_file_path).and_return(good_sha256) expect(subject).not_to receive(:cleanup_cached_tarball_path) subject.verify_from_file(sha256_of_tarball, sha256_file_path) end it "raises an error and cleans up when the checksums do not match" do expect(File).to receive(:read).with(sha256_file_path).and_return(bad_sha256) expect(tarball_cache_path).to receive(:delete) expect(sha256_file_path).to receive(:delete) expect { subject.verify_from_file(sha256_of_tarball, sha256_file_path) }.to raise_error(PuppetForge::V3::Release::ChecksumMismatch) end end describe '#verify_from_forge' do it "write the checksum to file when the checksums match" do expect(tarball_cache_path).not_to receive(:delete) expect(File).to receive(:write).with(sha256_file_path, good_sha256) subject.verify_from_forge(sha256_of_tarball, good_sha256, sha256_file_path) end it "raises an error and cleans up when the checksums do not match" do expect(tarball_cache_path).to receive(:delete) expect { subject.verify_from_forge(sha256_of_tarball, bad_sha256, sha256_file_path) } .to raise_error(PuppetForge::V3::Release::ChecksumMismatch) end end describe '#unpack' do it "unpacks the module tarball in `tarball_cache_path` into the provided target path" do expect(PuppetForge::Unpacker).to receive(:unpack).with(tarball_cache_path.to_s, target_dir.to_s, unpack_path.to_s).\ and_return({:valid=>["extractedmodule/metadata.json"], :invalid=>[], :symlinks=>[]}) subject.unpack(target_dir) end it "logs a warning if symlinks are present during the unpacking process" do logger_dbl = double(Log4r::Logger, debug1: true, debug2: true) allow(subject).to receive(:logger).and_return(logger_dbl) allow(PuppetForge::Unpacker).to receive(:unpack).and_return(file_lists) expect(logger_dbl).to receive(:warn).with(/symlinks are unsupported.*#{Regexp.escape(subject.forge_release.slug)}/i) subject.unpack(target_dir) end end describe "#cleanup" do it "cleans up the unpack paths" do expect(subject).to receive(:cleanup_unpack_path) expect(subject).to receive(:cleanup_download_path) subject.cleanup end end describe "#cleanup_unpack_path" do it "ignores the unpack_path if the parent doesn't exist" do parent = instance_double('Pathname') expect(parent).to receive(:exist?).and_return false expect(parent).to_not receive(:rmtree) expect(unpack_path).to receive(:parent).and_return(parent) subject.cleanup_unpack_path end it "removes the containing directory of unpack_path if it exists" do parent = instance_double('Pathname') expect(parent).to receive(:rmtree) expect(parent).to receive(:exist?).and_return true expect(unpack_path).to receive(:parent).and_return(parent).exactly(2).times subject.cleanup_unpack_path end end describe "#cleanup_download_path" do it "ignores the download_path if the parent doesn't exist" do parent = instance_double('Pathname') expect(parent).to receive(:exist?).and_return false expect(parent).to_not receive(:rmtree) expect(download_path).to receive(:parent).and_return(parent) subject.cleanup_download_path end it "removes the containing directory of download_path if it exists" do parent = instance_double('Pathname') expect(parent).to receive(:rmtree) expect(parent).to receive(:exist?).and_return true expect(download_path).to receive(:parent).and_return(parent).exactly(2).times subject.cleanup_download_path end end describe '#install' do it "performs all steps needed to install the module" do expect(subject).to receive(:download) expect(subject).to receive(:verify) expect(subject).to receive(:unpack).with(target_dir) expect(subject).to receive(:cleanup) subject.install(target_dir) end end end r10k-4.0.2/spec/unit/git/000077500000000000000000000000001460033767200147765ustar00rootroot00000000000000r10k-4.0.2/spec/unit/git/alternates_spec.rb000066400000000000000000000123271460033767200205040ustar00rootroot00000000000000require 'spec_helper' require 'stringio' require 'r10k/git' describe R10K::Git::Alternates do subject { described_class.new(Pathname.new("/some/nonexistent/path/.git")) } it "interacts with the alternates file in the given git repository" do expect(subject.file.to_s).to eq("/some/nonexistent/path/.git/objects/info/alternates") end describe "reading alternate object entries" do it "reads the alternates file and splits on lines" do expect(subject.file).to receive(:file?).and_return true expect(subject.file).to receive(:readlines).and_return([ "/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git\n", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git\n", ]) expect(subject.read).to eq([ "/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", ]) end it "returns an empty array when the file is not present" do expect(subject.file).to receive(:file?).and_return false expect(subject.file).to receive(:readlines).never expect(subject.to_a).to eq([]) end end describe "determining if an entry is already present" do before do allow(subject).to receive(:to_a).and_return([ "/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", ]) end it "is true if the element is in the array of read entries" do expect(subject).to include("/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git") end it "is false if the element is not in the array of read entries" do expect(subject).to_not include("/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git") end end describe "writing alternate entries" do describe "and the git objects/info directory does not exist" do it "raises an error when the parent directory does not exist" do expect { subject.write(["/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) }.to raise_error(R10K::Git::GitError,"Cannot write /some/nonexistent/path/.git/objects/info/alternates; parent directory does not exist") end end describe "and the git objects/info directory exists" do let(:io) { StringIO.new } before do expect(subject.file).to receive(:open).with('w').and_yield(io) expect(subject.file).to receive_message_chain(:parent, :directory?).and_return true end it "creates the alternates file with the new entry when not present" do subject.write(["/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) expect(io.string).to eq("/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git\n") end it "rewrites the file with all alternate entries" do subject.write(["/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) expect(io.string).to eq(<<-EOD) /var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git /vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git /tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git EOD end end describe "appending a new alternate object entry" do it "re-writes the file with the new entry concatenated to the file" do expect(subject).to receive(:to_a).and_return(["/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) expect(subject).to receive(:write).with(["/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/vagrant/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) subject.add("/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git") end end end describe "conditionally appending a new alternate object entry" do before do expect(subject).to receive(:read).and_return(%w[/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git]) end it "adds the entry and returns true when the entry doesn't exist" do expect(subject).to receive(:write).with(["/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git", "/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git"]) expect(subject.add?("/tmp/.r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git")).to eq true end it "doesn't modify the file and returns false when the entry exists" do expect(subject).to_not receive(:write) expect(subject.add?("/var/cache/r10k/git/https---github.com-puppetlabs-puppetlabs-apache.git")).to eq false end end end r10k-4.0.2/spec/unit/git/cache_spec.rb000066400000000000000000000030131460033767200173750ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/cache' describe R10K::Git::Cache do describe 'the default cache_root' do it 'is in the right location in linux', unless: R10K::Util::Platform.windows? do expect(described_class.defaults[:cache_root]).to match(/\.r10k\/git/) end it 'is in the right location for windows', if: R10K::Util::Platform.windows? do expect(described_class.defaults[:cache_root]).to match(/[^.]r10k\/git/) end end let(:subclass) do Class.new(described_class) do def self.bare_repository Class.new { def initialize(*args) end } end end end let(:remote) { 'https://some/git/remote' } subject { subclass.new(remote) } describe "updating the cache" do it "only updates the cache once" do expect(subject).to receive(:sync!).exactly(1).times subject.sync subject.sync end end describe "methods on the repository" do def expect_delegation(method) expect(subject.repo).to receive(method) subject.send(method) end it "delegates #git_dir" do expect_delegation(:git_dir) end it "delegates #objects_dir" do expect_delegation(:objects_dir) end it "delegates #branches" do expect_delegation(:branches) end it "delegates #tags" do expect_delegation(:tags) end it "delegates #exist?" do expect_delegation(:exist?) end it "aliases #cached? to #exist?" do expect(subject.repo).to receive(:exist?) subject.cached? end end end r10k-4.0.2/spec/unit/git/rugged/000077500000000000000000000000001460033767200162535ustar00rootroot00000000000000r10k-4.0.2/spec/unit/git/rugged/cache_spec.rb000066400000000000000000000025401460033767200206560ustar00rootroot00000000000000require 'spec_helper' describe R10K::Git::Rugged::Cache, :unless => R10K::Util::Platform.jruby? do before(:all) do require 'r10k/git/rugged/cache' end subject(:cache) { described_class.new('https://some/git/remote') } it "wraps a Rugged::BareRepository instance" do expect(cache.repo).to be_a_kind_of R10K::Git::Rugged::BareRepository end describe "settings" do before do R10K::Git::Cache.settings[:cache_root] = '/some/path' described_class.settings.reset! end after do R10K::Git::Cache.settings.reset! described_class.settings.reset! end it "falls back to the parent class settings" do expect(described_class.settings[:cache_root]).to eq '/some/path' end end describe "remote url updates" do before do allow(subject.repo).to receive(:exist?).and_return true allow(subject.repo).to receive(:fetch) allow(subject.repo).to receive(:remotes).and_return({ 'origin' => 'https://some/git/remote' }) end it "does not update the URLs if they match" do expect(subject.repo).to_not receive(:update_remote) subject.sync! end it "updates the remote URL if they do not match" do allow(subject.repo).to receive(:remotes).and_return({ 'origin' => 'foo'}) expect(subject.repo).to receive(:update_remote) subject.sync! end end end r10k-4.0.2/spec/unit/git/rugged/credentials_spec.rb000066400000000000000000000245711460033767200221200ustar00rootroot00000000000000require 'spec_helper' describe R10K::Git::Rugged::Credentials, :unless => R10K::Util::Platform.jruby? || R10K::Util::Platform.windows? do before(:all) do require 'r10k/git/rugged/credentials' require 'rugged/credentials' end let(:repo) { R10K::Git::Rugged::BareRepository.new("/some/nonexistent/path", "repo.git") } subject { described_class.new(repo) } after(:each) { R10K::Git.settings.reset! } describe "determining the username" do before { R10K::Git.settings[:username] = "moderns" } after { R10K::Git.settings.reset! } it "prefers a username from the URL" do user = subject.get_git_username("https://tessier-ashpool.freeside/repo.git", "ashpool") expect(user).to eq "ashpool" end it "uses the username from the Git config when specified" do user = subject.get_git_username("https://tessier-ashpool.freeside/repo.git", nil) expect(user).to eq "moderns" end it "falls back to the current user" do R10K::Git.settings.reset! expect(Etc).to receive(:getlogin).and_return("finn") user = subject.get_git_username("https://tessier-ashpool.freeside/repo.git", nil) expect(user).to eq "finn" end end describe "generating ssh key credentials" do after(:each) { R10K::Git.settings.reset! } it "prefers a per-repository SSH private key" do allow(File).to receive(:readable?).with("/etc/puppetlabs/r10k/ssh/tessier-ashpool-id_rsa").and_return true R10K::Git.settings[:private_key] = "/etc/puppetlabs/r10k/ssh/id_rsa" R10K::Git.settings[:repositories] = [{ remote: "ssh://git@tessier-ashpool.freeside/repo.git", private_key: "/etc/puppetlabs/r10k/ssh/tessier-ashpool-id_rsa"}] creds = subject.get_ssh_key_credentials("ssh://git@tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::SshKey) expect(creds.instance_variable_get(:@privatekey)).to eq("/etc/puppetlabs/r10k/ssh/tessier-ashpool-id_rsa") end it "falls back to the global SSH private key" do allow(File).to receive(:readable?).with("/etc/puppetlabs/r10k/ssh/id_rsa").and_return true R10K::Git.settings[:private_key] = "/etc/puppetlabs/r10k/ssh/id_rsa" creds = subject.get_ssh_key_credentials("ssh://git@tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::SshKey) expect(creds.instance_variable_get(:@privatekey)).to eq("/etc/puppetlabs/r10k/ssh/id_rsa") end it "raises an error if no key has been set" do R10K::Git.settings[:private_key] = nil expect { subject.get_ssh_key_credentials("https://tessier-ashpool.freeside/repo.git", nil) }.to raise_error(R10K::Git::GitError, /no private key was given/) end it "raises an error if the private key is unreadable" do R10K::Git.settings[:private_key] = "/some/nonexistent/.ssh/key" expect(File).to receive(:readable?).with("/some/nonexistent/.ssh/key").and_return false expect { subject.get_ssh_key_credentials("https://tessier-ashpool.freeside/repo.git", nil) }.to raise_error(R10K::Git::GitError, /Unable to use SSH key auth for.*is missing or unreadable/) end it "generates the rugged sshkey credential type" do allow(File).to receive(:readable?).with("/etc/puppetlabs/r10k/ssh/id_rsa").and_return true R10K::Git.settings[:private_key] = "/etc/puppetlabs/r10k/ssh/id_rsa" creds = subject.get_ssh_key_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::SshKey) expect(creds.instance_variable_get(:@privatekey)).to eq("/etc/puppetlabs/r10k/ssh/id_rsa") end end describe "generating github app tokens" do it 'errors if app id has invalid characters' do expect { subject.github_app_token("123A567890", "fake", "300") }.to raise_error(R10K::Git::GitError, /App id contains invalid characters/) end it 'errors if app ttl has invalid characters' do expect { subject.github_app_token("123456", "fake", "abc") }.to raise_error(R10K::Git::GitError, /Github App token ttl contains/) end it 'errors if private file does not exist' do R10K::Git.settings[:github_app_key] = "/missing/token/file" expect(File).to receive(:readable?).with(R10K::Git.settings[:github_app_key]).and_return false expect { subject.github_app_token("123456", R10K::Git.settings[:github_app_key], "300") }.to raise_error(R10K::Git::GitError, /App key is missing or unreadable/) end it 'errors if file is not a valid SSL key' do token_file = Tempfile.new('token') token_file.write('my_token') token_file.close R10K::Git.settings[:github_app_key] = token_file.path expect(File).to receive(:readable?).with(token_file.path).and_return true expect { subject.github_app_token("123456", R10K::Git.settings[:github_app_key], "300") }.to raise_error(R10K::Git::GitError, /App key is not a valid SSL key/) token_file.unlink end end describe "generating token credentials" do it 'errors if token file does not exist' do R10K::Git.settings[:oauth_token] = "/missing/token/file" expect(File).to receive(:readable?).with("/missing/token/file").and_return false R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] expect { subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) }.to raise_error(R10K::Git::GitError, /cannot load OAuth token/) end it 'errors if the token on stdin is not a valid OAuth token' do allow($stdin).to receive(:read).and_return("token") R10K::Git.settings[:oauth_token] = "-" R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] expect { subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) }.to raise_error(R10K::Git::GitError, /invalid characters/) end it 'errors if the token in the file is not a valid OAuth token' do token_file = Tempfile.new('token') token_file.write('my bad \ntoken') token_file.close R10K::Git.settings[:oauth_token] = token_file.path R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] expect { subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) }.to raise_error(R10K::Git::GitError, /invalid characters/) end it 'prefers per-repo token file' do token_file = Tempfile.new('token') token_file.write('my_token') token_file.close R10K::Git.settings[:oauth_token] = "/do/not/use" R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git", oauth_token: token_file.path }] creds = subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::UserPassword) expect(creds.instance_variable_get(:@password)).to eq("my_token") expect(creds.instance_variable_get(:@username)).to eq("x-oauth-token") end it 'uses the token from a file as a password' do token_file = Tempfile.new('token') token_file.write('my_token') token_file.close R10K::Git.settings[:oauth_token] = token_file.path R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] creds = subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::UserPassword) expect(creds.instance_variable_get(:@password)).to eq("my_token") expect(creds.instance_variable_get(:@username)).to eq("x-oauth-token") end it 'uses the token from stdin as a password' do allow($stdin).to receive(:read).and_return("my_token") R10K::Git.settings[:oauth_token] = '-' R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] creds = subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds).to be_a_kind_of(Rugged::Credentials::UserPassword) expect(creds.instance_variable_get(:@password)).to eq("my_token") expect(creds.instance_variable_get(:@username)).to eq("x-oauth-token") end it 'only reads the token in once' do expect($stdin).to receive(:read).and_return("my_token").once R10K::Git.settings[:oauth_token] = '-' R10K::Git.settings[:repositories] = [{remote: "https://tessier-ashpool.freeside/repo.git"}] creds = subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds.instance_variable_get(:@password)).to eq("my_token") creds = subject.get_plaintext_credentials("https://tessier-ashpool.freeside/repo.git", nil) expect(creds.instance_variable_get(:@password)).to eq("my_token") end end describe "generating default credentials" do it "generates the rugged default credential type" do creds = subject.get_default_credentials("https://azurediamond:hunter2@tessier-ashpool.freeside/repo.git", "azurediamond") expect(creds).to be_a_kind_of(Rugged::Credentials::Default) end end describe "generating credentials" do it "creates ssh key credentials for the sshkey allowed type" do allow(File).to receive(:readable?).with("/etc/puppetlabs/r10k/ssh/id_rsa").and_return true R10K::Git.settings[:private_key] = "/etc/puppetlabs/r10k/ssh/id_rsa" expect(subject.call("https://tessier-ashpool.freeside/repo.git", nil, [:ssh_key])).to be_a_kind_of(Rugged::Credentials::SshKey) end it "creates user/password credentials for the default allowed type" do expect(subject.call("https://tessier-ashpool.freeside/repo.git", nil, [:plaintext])).to be_a_kind_of(Rugged::Credentials::UserPassword) end it "creates default credentials when no other types are allowed" do expect(subject.call("https://tessier-ashpool.freeside/repo.git", nil, [])).to be_a_kind_of(Rugged::Credentials::Default) end it "refuses to generate credentials more than 50 times" do (1..50).each { subject.call("https://tessier-ashpool.freeside/repo.git", nil, [:plaintext]) } expect { subject.call("https://tessier-ashpool.freeside/repo.git", nil, [:plaintext]) }.to raise_error(R10K::Git::GitError, /authentication failed/i) end end end r10k-4.0.2/spec/unit/git/shellgit/000077500000000000000000000000001460033767200166115ustar00rootroot00000000000000r10k-4.0.2/spec/unit/git/shellgit/cache_spec.rb000066400000000000000000000012511460033767200212120ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git/shellgit/cache' describe R10K::Git::ShellGit::Cache do subject { described_class.new('https://some/git/remote') } it "wraps a ShellGit::BareRepository instance" do expect(subject.repo).to be_a_kind_of R10K::Git::ShellGit::BareRepository end describe "settings" do before do R10K::Git::Cache.settings[:cache_root] = '/some/path' described_class.settings.reset! end after do R10K::Git::Cache.settings.reset! described_class.settings.reset! end it "falls back to the parent class settings" do expect(described_class.settings[:cache_root]).to eq '/some/path' end end end r10k-4.0.2/spec/unit/git/stateful_repository_spec.rb000066400000000000000000000027451460033767200224730ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git' require 'r10k/git/stateful_repository' describe R10K::Git::StatefulRepository do let(:remote) { 'https://some.site/some-repo.git' } let(:ref) { '0.9.x' } subject { described_class.new(remote, '/some/nonexistent/basedir', 'some-dirname') } describe "determining if the cache needs to be synced" do let(:cache) { double('cache') } before { expect(R10K::Git.cache).to receive(:generate).with(remote).and_return(cache) } it "is true if the cache is absent" do expect(cache).to receive(:exist?).and_return false expect(subject.sync_cache?(ref)).to eq true end it "is true if the ref is HEAD" do expect(cache).to receive(:exist?).and_return true expect(subject.sync_cache?('HEAD')).to eq true end it "is true if the ref is unresolvable" do expect(cache).to receive(:exist?).and_return true expect(cache).to receive(:ref_type).with('0.9.x').and_return(:unknown) expect(subject.sync_cache?(ref)).to eq true end it "is true if the ref is not a tag or commit" do expect(cache).to receive(:exist?).and_return true expect(cache).to receive(:ref_type).with('0.9.x').and_return(:branch) expect(subject.sync_cache?(ref)).to eq true end it "is false otherwise" do expect(cache).to receive(:exist?).and_return true expect(cache).to receive(:ref_type).with('0.9.x').and_return(:tag) expect(subject.sync_cache?(ref)).to eq false end end end r10k-4.0.2/spec/unit/git_spec.rb000066400000000000000000000101341460033767200163340ustar00rootroot00000000000000require 'spec_helper' require 'r10k/git' describe R10K::Git do before { described_class.reset! } after { described_class.reset! } describe 'selecting the default provider' do it 'returns shellgit when the git executable is present' do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return true expect(described_class.default_name).to eq :shellgit end context 'under c-based rubies with rugged available', :unless => R10K::Util::Platform.jruby? || R10K::Util::Platform.windows? do it 'returns rugged when the git executable is absent and the rugged library is present' do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return false expect(R10K::Features).to receive(:available?).with(:rugged).and_return true expect(described_class.default_name).to eq :rugged end end it 'raises an error when the git executable and rugged library are absent' do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return false expect(R10K::Features).to receive(:available?).with(:rugged).and_return false expect { described_class.default_name }.to raise_error(R10K::Error, 'No Git providers are functional.') end it "goes into an error state if an invalid provider was set" do begin described_class.provider = :nope rescue R10K::Error end expect { described_class.provider }.to raise_error(R10K::Error, "No Git provider set.") end end describe 'explicitly setting the provider' do it "raises an error if the provider doesn't exist" do expect { described_class.provider = :nope }.to raise_error(R10K::Error, "No Git provider named 'nope'.") end it "raises an error if the provider isn't functional" do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return false expect { described_class.provider = :shellgit }.to raise_error(R10K::Error, "Git provider 'shellgit' is not functional.") end context 'under c-based rubies with rugged available', :unless => R10K::Util::Platform.jruby? || R10K::Util::Platform.windows? do it "sets the current provider if the provider exists and is functional" do expect(R10K::Features).to receive(:available?).with(:rugged).and_return true described_class.provider = :rugged expect(described_class.provider).to eq(R10K::Git::Rugged) end end context 'under jruby', :if => R10K::Util::Platform.jruby? do it "sets the current provider if the provider exists and is functional" do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return true described_class.provider = :shellgit expect(described_class.provider).to eq(R10K::Git::ShellGit) end end end describe "retrieving the current provider" do context 'under c-based rubies', :unless => R10K::Util::Platform.jruby? || R10K::Util::Platform.windows? do it "uses the default if a provider has not been set" do expect(described_class).to receive(:default_name).and_return :rugged expect(described_class.provider).to eq(R10K::Git::Rugged) end it "uses an explicitly set provider" do expect(R10K::Features).to receive(:available?).with(:rugged).and_return true described_class.provider = :rugged expect(described_class).to_not receive(:default) expect(described_class.provider).to eq R10K::Git::Rugged end end context 'under jruby', :if => R10K::Util::Platform.jruby? do it "uses the default if a provider has not been set" do expect(described_class).to receive(:default_name).and_return :shellgit expect(described_class.provider).to eq(R10K::Git::ShellGit) end it "uses an explicitly set provider" do expect(R10K::Features).to receive(:available?).with(:shellgit).and_return true described_class.provider = :shellgit expect(described_class).to_not receive(:default) expect(described_class.provider).to eq R10K::Git::ShellGit end end end end r10k-4.0.2/spec/unit/initializers_spec.rb000066400000000000000000000045611460033767200202660ustar00rootroot00000000000000require 'spec_helper' require 'r10k/initializers' describe R10K::Initializers::GitInitializer do it "configures the Git provider" do subject = described_class.new({:provider => :rugged}) expect(R10K::Git).to receive(:provider=).with(:rugged) subject.call end it "configures the Git username" do subject = described_class.new({:username => 'git'}) expect(R10K::Git.settings).to receive(:[]=).with(:username, 'git') subject.call end it "configures the Git private key" do subject = described_class.new({:private_key => '/etc/puppetlabs/r10k/id_rsa'}) expect(R10K::Git.settings).to receive(:[]=).with(:private_key, '/etc/puppetlabs/r10k/id_rsa') subject.call end end describe R10K::Initializers::ForgeInitializer do it "sets the PuppetForge host" do subject = described_class.new({:baseurl => 'https://my.site.forge'}) expect(PuppetForge).to receive(:host=).with('https://my.site.forge') subject.call end it "configures PuppetForge connection proxy" do subject = described_class.new({:proxy => 'http://my.site.proxy:3128'}) expect(PuppetForge::Connection).to receive(:proxy=).with('http://my.site.proxy:3128') subject.call end end describe R10K::Initializers::GlobalInitializer do it "logs a warning if purgedirs was set" do subject = described_class.new({:purgedirs => 'This setting has been deprecated for over two years :('}) expect(subject.logger).to receive(:warn).with('the purgedirs key in r10k.yaml is deprecated. it is currently ignored.') subject.call end it "sets the Git cache_root" do subject = described_class.new({:cachedir => '/var/cache/r10k'}) expect(R10K::Git::Cache.settings).to receive(:[]=).with(:cache_root, '/var/cache/r10k') subject.call end it "delegates git settings to the Git initializer" do git = instance_double('R10K::Initializers::GitInitializer') expect(git).to receive(:call) expect(R10K::Initializers::GitInitializer).to receive(:new).and_return(git) subject = described_class.new({:git => {}}) subject.call end it "delegates forge settings to the Forge initializer" do forge = instance_double('R10K::Initializers::ForgeInitializer') expect(forge).to receive(:call) expect(R10K::Initializers::ForgeInitializer).to receive(:new).and_return(forge) subject = described_class.new({:forge => {}}) subject.call end end r10k-4.0.2/spec/unit/instance_cache_spec.rb000066400000000000000000000041651460033767200205070ustar00rootroot00000000000000require 'spec_helper' require 'r10k/instance_cache' describe R10K::InstanceCache do describe "setting up a new instance cache" do let(:klass) do dubs = double('test class') allow(dubs).to receive(:new) { |*args| args } dubs end it "can create new objects" do registry = described_class.new(klass) expect(registry.generate).to eq [] end describe "defining object arity" do it "handles unary objects" do expect(klass).to receive(:new).with(:foo) registry = described_class.new(klass) expect(registry.generate(:foo)).to eq [:foo] end it "handles ternary objects" do expect(klass).to receive(:new).with(:foo, :bar, :baz) registry = described_class.new(klass) expect(registry.generate(:foo, :bar, :baz)).to eq [:foo, :bar, :baz] end it 'handles n-ary objects' do args = %w[a bunch of arbitrary objects] expect(klass).to receive(:new).with(*args) registry = described_class.new(klass) expect(registry.generate(*args)).to eq args end it 'fails when the required arguments are not matched' do expect(klass).to receive(:new).and_raise ArgumentError, "not enough args" registry = described_class.new(klass) expect { registry.generate('arity is hard') }.to raise_error ArgumentError, "not enough args" end end it "can specify the constructor method" do expect(klass).to receive(:from_json).and_return "this is json, right?" registry = described_class.new(klass, :from_json) expect(registry.generate).to eq "this is json, right?" end end it "returns a memoized object if it's been created before" do registry = described_class.new(String) first = registry.generate "bam!" second = registry.generate "bam!" expect(first.object_id).to eq second.object_id end it 'can clear registered objects' do registry = described_class.new(String) first = registry.generate "bam!" registry.clear! second = registry.generate "bam!" expect(first.object_id).to_not eq second.object_id end end r10k-4.0.2/spec/unit/keyed_factory_spec.rb000066400000000000000000000026131460033767200204040ustar00rootroot00000000000000require 'spec_helper' require 'r10k/keyed_factory' describe R10K::KeyedFactory do let(:registered) { Class.new } describe "registering implementations" do it "can register new implementations" do subject.register(:klass, registered) expect(subject.retrieve(:klass)).to eq registered end it "raises an error when a duplicate implementation is registered" do subject.register(:klass, registered) expect { subject.register(:klass, registered) }.to raise_error(R10K::KeyedFactory::DuplicateImplementationError) end it "can register classes with nil as a key" do subject.register(nil, registered) expect(subject.retrieve(nil)).to eq registered end end describe "generating instances" do before do subject.register(:klass, registered) end it "generates an instance with the associated class" do instance = subject.generate(:klass) expect(instance).to be_a_kind_of registered end it "can generate a class with nil as a key" do other = Class.new subject.register(nil, other) instance = subject.generate(nil) expect(instance).to be_a_kind_of other end it "raises an error if no implementation was registered with the given key" do expect { subject.generate(:foo) }.to raise_error(R10K::KeyedFactory::UnknownImplementationError) end end end r10k-4.0.2/spec/unit/logging/000077500000000000000000000000001460033767200156415ustar00rootroot00000000000000r10k-4.0.2/spec/unit/logging/terminaloutputter_spec.rb000066400000000000000000000025711460033767200230140ustar00rootroot00000000000000require 'spec_helper' require 'stringio' require 'r10k/logging/terminaloutputter' describe R10K::Logging::TerminalOutputter do let(:stream) { StringIO.new } let(:formatter) do Class.new(Log4r::Formatter) do def format(logevent) logevent.data end end end subject do described_class.new('test', stream, :level => 0, :formatter => formatter).tap do |o| o.use_color = true end end tests = [ [:debug2, :cyan], [:debug1, :cyan], [:debug, :green], [:info, nil], [:notice, nil], [:warn, :yellow], [:error, :red], [:fatal, :red], ] tests.each_with_index do |(level, color), index| # Note for the unwary - using a loop in this manner shows strange # behavior with variable closure. The describe block is needed to retain # the loop variables for each test; without this the let helpers are # overwritten and the last set of helpers are used for all tests. describe "at level #{level}" do let(:message) { "level #{level}: #{color}" } let(:event) do Log4r::LogEvent.new(index + 1, Log4r::Logger.new('test::logger'), nil, message) end it "logs messages as #{color ? color : "uncolored"}" do output = color.nil? ? message : message.send(color) subject.send(level, event) expect(stream.string).to eq output end end end end r10k-4.0.2/spec/unit/logging_spec.rb000066400000000000000000000043061460033767200172030ustar00rootroot00000000000000require 'spec_helper' require 'r10k/logging' describe R10K::Logging do describe "parsing a log level" do it "parses 'true:TrueClass' as INFO" do expect(described_class.parse_level(true)).to eq Log4r::INFO end it "parses 'true:String' as nil" do expect(described_class.parse_level("true")).to be_nil end it "parses a numeric string as an integer" do expect(described_class.parse_level('2')).to eq 2 end it "parses a log level string as a log level" do expect(described_class.parse_level('debug')).to eq Log4r::DEBUG end it "returns nil when given an invalid log level" do expect(described_class.parse_level('deblag')).to be_nil end end describe "setting the log level" do after(:all) { R10K::Logging.level = 'warn' } it "sets the outputter log level" do expect(described_class.outputter).to receive(:level=).with(Log4r::DEBUG) described_class.level = 'debug' end it "stores the new log level" do allow(described_class.outputter).to receive(:level=) described_class.level = 'debug' expect(described_class.level).to eq(Log4r::DEBUG) end it "raises an exception when given an invalid log level" do expect { described_class.level = 'deblag' }.to raise_error(ArgumentError, /Invalid log level/) end describe "switching the formatter" do before do allow(described_class.outputter).to receive(:level=) end it "switches to the debug formatter if the new log level is debug or greater" do debug_formatter = double('debug formatter') expect(described_class).to receive(:debug_formatter).and_return(debug_formatter) expect(described_class.outputter).to receive(:formatter=).with(debug_formatter) described_class.level = 'debug' end it "switches to the default formatter if the new log level is info or less" do default_formatter = double('default formatter') expect(described_class).to receive(:default_formatter).and_return(default_formatter) expect(described_class.outputter).to receive(:formatter=).with(default_formatter) described_class.level = 'info' end end end end r10k-4.0.2/spec/unit/module/000077500000000000000000000000001460033767200155005ustar00rootroot00000000000000r10k-4.0.2/spec/unit/module/base_spec.rb000066400000000000000000000102541460033767200177530ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module/base' describe R10K::Module::Base do describe "parsing the title" do it "parses titles with no owner" do m = described_class.new('eight_hundred', '/moduledir', {}) expect(m.name).to eq 'eight_hundred' expect(m.owner).to be_nil end it "parses forward slash separated titles" do m = described_class.new('branan/eight_hundred', '/moduledir', {}) expect(m.name).to eq 'eight_hundred' expect(m.owner).to eq 'branan' end it "parses hyphen separated titles" do m = described_class.new('branan-eight_hundred', '/moduledir', {}) expect(m.name).to eq 'eight_hundred' expect(m.owner).to eq 'branan' end it "raises an error when the title is not correctly formatted" do expect { described_class.new('branan!eight_hundred', '/moduledir', {}) }.to raise_error(ArgumentError, "Module name (branan!eight_hundred) must match either 'modulename' or 'owner/modulename'") end end describe 'deleting the spec dir' do let(:module_org) { "coolorg" } let(:module_name) { "coolmod" } let(:title) { "#{module_org}-#{module_name}" } let(:dirname) { Pathname.new(Dir.mktmpdir) } let(:spec_path) { dirname + module_name + 'spec' } before(:each) do logger = double("logger") allow_any_instance_of(described_class).to receive(:logger).and_return(logger) allow(logger).to receive(:debug2).with(any_args) allow(logger).to receive(:info).with(any_args) end it 'removes the spec directory by default' do FileUtils.mkdir_p(spec_path) m = described_class.new(title, dirname, {}) m.maybe_delete_spec_dir expect(Dir.exist?(spec_path)).to eq false end it 'detects a symlink and deletes the target' do Dir.mkdir(dirname + module_name) target_dir = Dir.mktmpdir FileUtils.ln_s(target_dir, spec_path) m = described_class.new(title, dirname, {}) m.maybe_delete_spec_dir expect(Dir.exist?(target_dir)).to eq false end it 'does not remove the spec directory if overrides->modules->exclude_spec is set to false' do FileUtils.mkdir_p(spec_path) m = described_class.new(title, dirname, {overrides: {modules: {exclude_spec: false}}}) m.maybe_delete_spec_dir expect(Dir.exist?(spec_path)).to eq true end it 'does not remove the spec directory if exclude_spec is set to false and overrides->modules->exclude_spec is true' do FileUtils.mkdir_p(spec_path) m = described_class.new(title, dirname, {exclude_spec: false, overrides: {modules: {exclude_spec: true}}}) m.maybe_delete_spec_dir expect(Dir.exist?(spec_path)).to eq true end it 'does not remove the spec directory if spec_deletable is false' do FileUtils.mkdir_p(spec_path) m = described_class.new(title, dirname, {}) m.spec_deletable = false m.maybe_delete_spec_dir expect(Dir.exist?(spec_path)).to eq true end end describe "path variables" do it "uses the module name as the name" do m = described_class.new('eight_hundred', '/moduledir', {}) expect(m.dirname).to eq '/moduledir' expect(m.path).to eq(Pathname.new('/moduledir/eight_hundred')) end it "does not include the owner in the path" do m = described_class.new('branan/eight_hundred', '/moduledir', {}) expect(m.dirname).to eq '/moduledir' expect(m.path).to eq(Pathname.new('/moduledir/eight_hundred')) end end describe "with alternate variable names" do subject do described_class.new('branan/eight_hundred', '/moduledir', {}) end it "aliases full_name to title" do expect(subject.full_name).to eq 'branan-eight_hundred' end it "aliases author to owner" do expect(subject.author).to eq 'branan' end it "aliases basedir to dirname" do expect(subject.basedir).to eq '/moduledir' end end describe "accepting a visitor" do subject { described_class.new('branan-eight_hundred', '/moduledir', {}) } it "passes itself to the visitor" do visitor = spy('visitor') expect(visitor).to receive(:visit).with(:module, subject) subject.accept(visitor) end end end r10k-4.0.2/spec/unit/module/forge_spec.rb000066400000000000000000000237131460033767200201470ustar00rootroot00000000000000require 'r10k/module/forge' require 'spec_helper' describe R10K::Module::Forge do # TODO: make these *unit* tests not depend on a real module on the real Forge :( include_context 'fail on execution' let(:fixture_modulepath) { File.expand_path('spec/fixtures/module/forge', PROJECT_ROOT) } let(:empty_modulepath) { File.expand_path('spec/fixtures/empty', PROJECT_ROOT) } describe "statically determined version support" do it 'returns explicitly released forge versions' do static_version = described_class.statically_defined_version('branan/eight_hundred', { version: '8.0.0' }) expect(static_version).to eq('8.0.0') end it 'returns explicit pre-released forge versions' do static_version = described_class.statically_defined_version('branan/eight_hundred', { version: '8.0.0-pre1' }) expect(static_version).to eq('8.0.0-pre1') end it 'retuns nil for latest versions' do static_version = described_class.statically_defined_version('branan/eight_hundred', { version: :latest }) expect(static_version).to eq(nil) end it 'retuns nil for undefined versions' do static_version = described_class.statically_defined_version('branan/eight_hundred', { version: nil }) expect(static_version).to eq(nil) end end describe "implementing the Puppetfile spec" do it "should implement 'branan/eight_hundred', '8.0.0'" do expect(described_class).to be_implement('branan/eight_hundred', { type: 'forge', version: '8.0.0' }) end it "should implement 'branan-eight_hundred', '8.0.0'" do expect(described_class).to be_implement('branan-eight_hundred', { type: 'forge', version: '8.0.0' }) end end describe "implementing the standard options interface" do it "should implement {type: forge}" do expect(described_class).to be_implement('branan-eight_hundred', { type: 'forge', version: '8.0.0', source: 'not implemented' }) end end describe "setting attributes" do subject { described_class.new('branan/eight_hundred', '/moduledir', { version: '8.0.0' }) } it "sets the name" do expect(subject.name).to eq 'eight_hundred' end it "sets the author" do expect(subject.author).to eq 'branan' end it "sets the dirname" do expect(subject.dirname).to eq '/moduledir' end it "sets the title" do expect(subject.title).to eq 'branan-eight_hundred' end end describe "invalid attributes" do it "errors on invalid versions" do expect { described_class.new('branan/eight_hundred', '/moduledir', { version: '_8.0.0_' }) }.to raise_error ArgumentError, /version/ end end describe "properties" do subject { described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) } it "sets the module type to :forge" do expect(subject.properties).to include(:type => :forge) end it "sets the expected version" do expect(subject.properties).to include(:expected => '8.0.0') end it "sets the actual version" do expect(subject).to receive(:current_version).and_return('0.8.0') expect(subject.properties).to include(:actual => '0.8.0') end end context "when a module is deprecated" do subject { described_class.new('puppetlabs/corosync', fixture_modulepath, { version: :latest }) } it "warns on sync if module is not already insync" do allow(subject).to receive(:status).and_return(:absent) allow(R10K::Forge::ModuleRelease).to receive(:new).and_return(double('mod_release', install: true)) logger_dbl = double(Log4r::Logger) allow_any_instance_of(described_class).to receive(:logger).and_return(logger_dbl) allow(logger_dbl).to receive(:info).with(/Deploying module to.*/) allow(logger_dbl).to receive(:debug2).with(/No spec dir detected/) expect(logger_dbl).to receive(:warn).with(/puppet forge module.*puppetlabs-corosync.*has been deprecated/i) subject.sync end it "does not warn on sync if module is already insync" do allow(subject).to receive(:status).and_return(:insync) logger_dbl = double(Log4r::Logger) allow_any_instance_of(described_class).to receive(:logger).and_return(logger_dbl) allow(logger_dbl).to receive(:info).with(/Deploying module to.*/) allow(logger_dbl).to receive(:debug2).with(/No spec dir detected/) expect(logger_dbl).to_not receive(:warn).with(/puppet forge module.*puppetlabs-corosync.*has been deprecated/i) subject.sync end end describe '#expected_version' do it "returns an explicitly given expected version" do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) expect(subject.expected_version).to eq '8.0.0' end it "uses the latest version from the forge when the version is :latest" do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: :latest }) release = double("Module Release", version: '8.8.8') expect(subject.v3_module).to receive(:current_release).and_return(release).twice expect(subject.expected_version).to eq '8.8.8' end it "throws when there are no available versions" do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: :latest }) expect(subject.v3_module).to receive(:current_release).and_return(nil) expect { subject.expected_version }.to raise_error(PuppetForge::ReleaseNotFound) end end describe "determining the status" do subject { described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) } it "is :absent if the module directory is absent" do allow(subject).to receive(:exist?).and_return false expect(subject.status).to eq :absent end it "is :mismatched if there is no module metadata" do allow(subject).to receive(:exist?).and_return true allow(File).to receive(:exist?).and_return false expect(subject.status).to eq :mismatched end it "is :mismatched if module was previously a git checkout" do allow(File).to receive(:directory?).and_return true expect(subject.status).to eq :mismatched end it "is :mismatched if the metadata author doesn't match the expected author" do allow(subject).to receive(:exist?).and_return true allow(subject.instance_variable_get(:@metadata_file)).to receive(:read).and_return subject.metadata allow(subject.metadata).to receive(:full_module_name).and_return 'blargh-blargh' expect(subject.status).to eq :mismatched end it "is :outdated if the metadata version doesn't match the expected version" do allow(subject).to receive(:exist?).and_return true allow(subject.instance_variable_get(:@metadata_file)).to receive(:read).and_return subject.metadata allow(subject.metadata).to receive(:version).and_return '7.0.0' expect(subject.status).to eq :outdated end it "is :insync if the version and the author are in sync" do allow(subject).to receive(:exist?).and_return true expect(subject.status).to eq :insync end end describe "#sync" do subject { described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) } context "syncing the repo" do let(:module_org) { "coolorg" } let(:module_name) { "coolmod" } let(:title) { "#{module_org}-#{module_name}" } let(:dirname) { Pathname.new(Dir.mktmpdir) } let(:spec_path) { dirname + module_name + 'spec' } subject { described_class.new(title, dirname, {}) } it 'defaults to deleting the spec dir' do FileUtils.mkdir_p(spec_path) expect(subject).to receive(:status).and_return(:absent) expect(subject).to receive(:install) subject.sync expect(Dir.exist?(spec_path)).to eq false end end it 'does nothing when the module is in sync' do allow(subject).to receive(:status).and_return :insync expect(subject).to receive(:install).never expect(subject).to receive(:upgrade).never expect(subject).to receive(:reinstall).never expect(subject.sync).to be false end it 'reinstalls the module when it is mismatched' do allow(subject).to receive(:status).and_return :mismatched expect(subject).to receive(:reinstall) expect(subject.sync).to be true end it 'upgrades the module when it is outdated' do allow(subject).to receive(:status).and_return :outdated expect(subject).to receive(:upgrade) expect(subject.sync).to be true end it 'installs the module when it is absent' do allow(subject).to receive(:status).and_return :absent expect(subject).to receive(:install) expect(subject.sync).to be true end it 'returns false if `should_sync?` is false' do # modules do not sync if they are not requested mod = described_class.new('my_org/my_mod', '/path/to/mod', { overrides: { modules: { requested_modules: ['other_mod'] } } }) expect(mod.sync).to be false end end describe '#install' do it 'installs the module from the forge' do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) release = instance_double('R10K::Forge::ModuleRelease') expect(R10K::Forge::ModuleRelease).to receive(:new).with('branan-eight_hundred', '8.0.0').and_return(release) expect(release).to receive(:install).with(subject.path) subject.install end end describe '#uninstall' do it 'removes the module path' do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) expect(FileUtils).to receive(:rm_rf).with(subject.path.to_s) subject.uninstall end end describe '#reinstall' do it 'uninstalls and then installs the module' do subject = described_class.new('branan/eight_hundred', fixture_modulepath, { version: '8.0.0' }) expect(subject).to receive(:uninstall) expect(subject).to receive(:install) subject.reinstall end end end r10k-4.0.2/spec/unit/module/git_spec.rb000066400000000000000000000357531460033767200176370ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module/git' describe R10K::Module::Git do let(:mock_repo) do instance_double("R10K::Git::StatefulRepository") end before(:each) do allow(R10K::Git::StatefulRepository).to receive(:new).and_return(mock_repo) end describe "statically determined version support" do it 'returns a given commit' do static_version = described_class.statically_defined_version('branan/eight_hundred', { git: 'my/remote', commit: '123adf' }) expect(static_version).to eq('123adf') end it 'returns a given tag' do static_version = described_class.statically_defined_version('branan/eight_hundred', { git: 'my/remote', tag: 'v1.2.3' }) expect(static_version).to eq('v1.2.3') end it 'returns a ref if it looks like a full commit sha' do static_version = described_class.statically_defined_version('branan/eight_hundred', { git: 'my/remote', ref: '1234567890abcdef1234567890abcdef12345678' }) expect(static_version).to eq('1234567890abcdef1234567890abcdef12345678') end it 'returns nil for any non-sha-like ref' do static_version = described_class.statically_defined_version('branan/eight_hundred', { git: 'my/remote', ref: 'refs/heads/main' }) expect(static_version).to eq(nil) end it 'returns nil for branches' do static_version = described_class.statically_defined_version('branan/eight_hundred', { git: 'my/remote', branch: 'main' }) expect(static_version).to eq(nil) end end describe "setting the owner and name" do describe "with a title of 'branan/eight_hundred'" do subject do described_class.new( 'branan/eight_hundred', '/moduledir', { :git => 'https://git-server.site/branan/puppet-eight_hundred' } ) end it "sets the owner to 'branan'" do expect(subject.owner).to eq 'branan' end it "sets the name to 'eight_hundred'" do expect(subject.name).to eq 'eight_hundred' end it "sets the path to '/moduledir/eight_hundred'" do expect(subject.path).to eq(Pathname.new('/moduledir/eight_hundred')) end end describe "with a title of 'modulename'" do subject do described_class.new( 'eight_hundred', '/moduledir', { :git => 'https://git-server.site/branan/puppet-eight_hundred' } ) end it "sets the owner to nil" do expect(subject.owner).to be_nil end it "sets the name to 'eight_hundred'" do expect(subject.name).to eq 'eight_hundred' end it "sets the path to '/moduledir/eight_hundred'" do expect(subject.path).to eq(Pathname.new('/moduledir/eight_hundred')) end end end describe "properties" do subject do described_class.new('boolean', '/moduledir', {:git => 'https://git.example.com/adrienthebo/puppet-boolean', overrides: {modules: {default_ref: "main"}}}) end before(:each) do allow(mock_repo).to receive(:resolve).with('main').and_return('abc123') allow(mock_repo).to receive(:head).and_return('abc123') end it "sets the module type to :git" do expect(subject.properties).to include(:type => :git) end it "sets the expected version" do expect(subject.properties).to include(:expected => 'main') end it "sets the actual version to the revision when the revision is available" do expect(mock_repo).to receive(:head).and_return('35d3517e67ceeb4b485b56d4a14d38fb95516c92') expect(subject.properties).to include(:actual => '35d3517e67ceeb4b485b56d4a14d38fb95516c92') end it "sets the actual version to (unresolvable) when the revision is unavailable" do expect(mock_repo).to receive(:head).and_return(nil) expect(subject.properties).to include(:actual => '(unresolvable)') end end describe 'syncing the repo' do let(:module_org) { "coolorg" } let(:module_name) { "coolmod" } let(:title) { "#{module_org}-#{module_name}" } let(:dirname) { Pathname.new(Dir.mktmpdir) } let(:spec_path) { dirname + module_name + 'spec' } subject { described_class.new(title, dirname, {overrides: {modules: {default_ref: "main"}}}) } before(:each) do allow(mock_repo).to receive(:resolve).with('main').and_return('abc123') end it 'defaults to deleting the spec dir' do FileUtils.mkdir_p(spec_path) allow(mock_repo).to receive(:sync) subject.sync expect(Dir.exist?(spec_path)).to eq false end it 'returns true if repo was updated' do expect(mock_repo).to receive(:sync).and_return(true) expect(subject.sync).to be true end it 'returns false if repo was not updated (in-sync)' do expect(mock_repo).to receive(:sync).and_return(false) expect(subject.sync).to be false end it 'returns false if `should_sync?` is false' do # modules do not sync if they are not requested mod = described_class.new(title, dirname, { overrides: { modules: { requested_modules: ['other_mod'] } } }) expect(mod.sync).to be false end end describe "determining the status" do subject do described_class.new( 'boolean', '/moduledir', { :git => 'https://git.example.com/adrienthebo/puppet-boolean' } ) end it "delegates to the repo" do expect(subject).to receive(:version).and_return 'main' expect(mock_repo).to receive(:status).with('main').and_return :some_status expect(subject.status).to eq(:some_status) end end describe "option parsing" do def test_module(extra_opts, env=nil) described_class.new('boolean', '/moduledir', base_opts.merge(extra_opts), env) end let(:base_opts) { { git: 'https://git.example.com/adrienthebo/puppet-boolean' } } before(:each) do allow(mock_repo).to receive(:head).and_return('abc123') end it "raises an argument error when no refs are supplied" do expect{test_module({}).properties}.to raise_error(ArgumentError, /unable.*desired ref.*no default/i) end describe 'the overrides->modules->default_ref' do context 'specifying a default_ref only' do let(:opts) { {overrides: {modules: {default_ref: 'cranberry'}}} } it "sets the expected ref to default_ref" do expect(mock_repo).to receive(:resolve).with('cranberry').and_return('def456') expect(test_module(opts).properties).to include(expected: 'cranberry') end end context 'specifying a default_ref and a default_branch' do let(:opts) { {default_branch: 'orange', overrides: {modules: {default_ref: 'cranberry'}}}} it "sets the expected ref to the default_branch" do expect(mock_repo).to receive(:resolve).with('orange').and_return('def456') expect(test_module(opts).properties).to include(expected: 'orange') end end end describe "desired ref" do context "specifying a static desired branch" do let(:opts) { { branch: 'banana' } } it "sets expected to specified branch name" do expect(mock_repo).to receive(:resolve).with('banana').and_return('def456') mod = test_module(opts) expect(mod.properties).to include(expected: 'banana') end end context "specifying a static desired tag" do let(:opts) { { tag: '1.2.3' } } it "sets expected to specified tag" do expect(mock_repo).to receive(:resolve).with('1.2.3').and_return('def456') mod = test_module(opts) expect(mod.properties).to include(expected: '1.2.3') end end context "specifying a static desired commit sha" do let(:opts) { { commit: 'ace789' } } it "sets expected to specified commit sha" do expect(mock_repo).to receive(:resolve).with('ace789').and_return('ace789') mod = test_module(opts) expect(mod.properties).to include(expected: 'ace789') end end context "specifying a static desired ref" do before(:each) do expect(mock_repo).to receive(:resolve).and_return('abc123') end it "accepts a branch name" do mod = test_module(ref: 'banana') expect(mod.properties).to include(expected: 'banana') end it "accepts a tag name" do mod = test_module(ref: '1.2.3') expect(mod.properties).to include(expected: '1.2.3') end it "accepts a commit sha" do mod = test_module(ref: 'abc123') expect(mod.properties).to include(expected: 'abc123') end end context "specifying branch to :control_branch" do let(:mock_env) { instance_double("R10K::Environment::Git", ref: 'env_branch') } context "when module belongs to an environment and matching branch is resolvable" do before(:each) do expect(mock_repo).to receive(:resolve).with(mock_env.ref).and_return('abc123') end it "tracks environment branch" do mod = test_module({branch: :control_branch}, mock_env) expect(mod.properties).to include(expected: mock_env.ref) end end context "when module does not belong to an environment" do it "leaves desired_ref unchanged" do mod = test_module(branch: :control_branch) expect(mod.desired_ref).to eq(:control_branch) end it "warns control branch may be unresolvable" do logger = double("logger") allow_any_instance_of(described_class).to receive(:logger).and_return(logger) expect(logger).to receive(:warn).with(/Cannot track control repo branch.*boolean.*/) test_module(branch: :control_branch) end context "when default ref is provided and resolvable" do it "uses default ref" do expect(mock_repo).to receive(:resolve).with('default').and_return('abc123') mod = test_module({branch: :control_branch, default_branch: 'default'}) expect(mod.properties).to include(expected: 'default') end end context "when default ref is provided and not resolvable" do it "raises appropriate error" do expect(mock_repo).to receive(:resolve).with('default').and_return(nil) mod = test_module({branch: :control_branch, default_branch: 'default'}) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*could not resolve control repo branch.*or resolve default/i) end end context "when default ref is not provided" do it "raises appropriate error" do mod = test_module({branch: :control_branch}) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*could not resolve control repo branch.*no default provided/i) end end end context "when module does not have matching branch" do before(:each) do allow(mock_repo).to receive(:resolve).with(mock_env.ref).and_return(nil) end context "when default ref is provided and resolvable" do it "uses default ref" do expect(mock_repo).to receive(:resolve).with('default').and_return('abc123') mod = test_module({branch: :control_branch, default_branch: 'default'}, mock_env) expect(mod.properties).to include(expected: 'default') end end context "when default ref is provided and not resolvable" do it "raises appropriate error" do expect(mock_repo).to receive(:resolve).with('default').and_return(nil) mod = test_module({branch: :control_branch, default_branch: 'default'}, mock_env) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*could not resolve desired.*or resolve default/i) end end context "when default ref is not provided" do it "raises appropriate error" do mod = test_module({branch: :control_branch}, mock_env) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*no default provided/i) end end end context "when using default_branch_override" do before(:each) do allow(mock_repo).to receive(:resolve).with(mock_env.ref).and_return(nil) end context "and the default branch override is resolvable" do it "uses the override" do expect(mock_repo).to receive(:resolve).with('default_override').and_return('5566aabb') mod = test_module({branch: :control_branch, default_branch: 'default', default_branch_override: 'default_override'}, mock_env) expect(mod.properties).to include(expected: 'default_override') end end context "and the default branch override is not resolvable" do context "and default branch is provided" do it "falls back to the default" do expect(mock_repo).to receive(:resolve).with('default_override').and_return(nil) expect(mock_repo).to receive(:resolve).with('default').and_return('5566aabb') mod = test_module({branch: :control_branch, default_branch: 'default', default_branch_override: 'default_override'}, mock_env) expect(mod.properties).to include(expected: 'default') end end context "and default branch is not provided" do it "raises the appropriate error" do expect(mock_repo).to receive(:resolve).with('default_override').and_return(nil) mod = test_module({branch: :control_branch, default_branch_override: 'default_override'}, mock_env) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*or resolve the default branch override.*no default provided/i) end end context "and default branch is not resolvable" do it "raises the appropriate error" do expect(mock_repo).to receive(:resolve).with('default_override').and_return(nil) expect(mock_repo).to receive(:resolve).with('default').and_return(nil) mod = test_module({branch: :control_branch, default_branch: 'default', default_branch_override: 'default_override'}, mock_env) expect { mod.properties }.to raise_error(ArgumentError, /unable to manage.*or resolve the default branch override.*or resolve default/i) end end end end end end end end r10k-4.0.2/spec/unit/module/metadata_file_spec.rb000066400000000000000000000037271460033767200216270ustar00rootroot00000000000000require 'spec_helper' describe R10K::Module::MetadataFile do let(:path) { double 'pathname' } subject(:metadata_file) { described_class.new(path) } describe "determining if the metadata file exists" do it "doesn't exist if the given path isn't a file" do allow(path).to receive(:file?).and_return false expect(metadata_file).to_not be_exist end it "doesn't exist if the given path is an unreadable file" do allow(path).to receive(:file?).and_return true allow(path).to receive(:readable?).and_return false expect(metadata_file).to_not be_exist end it "exists if the given path exists and is readable" do allow(path).to receive(:file?).and_return true allow(path).to receive(:readable?).and_return true expect(metadata_file).to be_exist end end describe "parsing a metadata file" do let(:fixture_path) do fixture_path = Pathname.new(File.expand_path('spec/fixtures/module/forge/eight_hundred/metadata.json', PROJECT_ROOT)) end before do a_metadata_file = R10K::Module::MetadataFile.new(fixture_path) @metadata = a_metadata_file.read end it "extracts the module name" do expect(@metadata.name).to eq 'eight_hundred' end it "extracts the full module name" do expect(@metadata.full_module_name).to eq 'branan-eight_hundred' end it "extracts the module author" do expect(@metadata.author).to eq 'Branan Purvine-Riley' end it "extracts the module version" do expect(@metadata.version).to eq '8.0.0' end end describe "parsing an invalid metadata file" do let(:fixture_path) do fixture_path = Pathname.new(File.expand_path('spec/fixtures/module/forge/bad_module/metadata.json', PROJECT_ROOT)) end it "raises an error" do a_metadata_file = R10K::Module::MetadataFile.new(fixture_path) expect {a_metadata_file.read}.to raise_error(R10K::Error, "Could not read metadata.json") end end end r10k-4.0.2/spec/unit/module/svn_spec.rb000066400000000000000000000154661460033767200176610ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module/svn' describe R10K::Module::SVN do include_context 'fail on execution' describe "statically determined version support" do it 'is unsupported by svn backed modules' do static_version = described_class.statically_defined_version('branan/eight_hundred', { svn: 'my/remote', revision: '123adf' }) expect(static_version).to eq(nil) end end describe "determining it implements a Puppetfile mod" do it "implements mods with the :svn hash key" do implements = described_class.implement?('r10k-fixture-repo', :svn => 'https://github.com/adrienthebo/r10k-fixture-repo') expect(implements).to eq true end end describe "path variables" do it "uses the module name as the name" do svn = described_class.new('foo', '/moduledir', :rev => 'r10') expect(svn.name).to eq 'foo' expect(svn.owner).to be_nil expect(svn.dirname).to eq '/moduledir' expect(svn.path).to eq Pathname.new('/moduledir/foo') end it "does not include the owner in the path" do svn = described_class.new('bar/foo', '/moduledir', :rev => 'r10') expect(svn.name).to eq 'foo' expect(svn.owner).to eq 'bar' expect(svn.dirname).to eq '/moduledir' expect(svn.path).to eq Pathname.new('/moduledir/foo') end end describe "instantiating based on Puppetfile configuration" do it "can specify a revision with the :rev key" do svn = described_class.new('foo', '/moduledir', :rev => 'r10') expect(svn.expected_revision).to eq 'r10' end it "can specify a revision with the :revision key" do svn = described_class.new('foo', '/moduledir', :revision => 'r10') expect(svn.expected_revision).to eq 'r10' end end describe "properties" do subject { described_class.new('foo', '/moduledir', :svn => 'https://github.com/adrienthebo/r10k-fixture-repo', :rev => 123) } it "sets the module type to :svn" do expect(subject.properties).to include(:type => :svn) end it "sets the expected version" do expect(subject.properties).to include(:expected => 123) end it "sets the actual version to the revision when the revision is available" do expect(subject.working_dir).to receive(:revision).and_return(12) expect(subject.properties).to include(:actual => 12) end it "sets the actual version (unresolvable) when the revision is unavailable" do expect(subject.working_dir).to receive(:revision).and_raise(ArgumentError) expect(subject.properties).to include(:actual => "(unresolvable)") end end describe "determining the status" do subject { described_class.new('foo', '/moduledir', :svn => 'https://github.com/adrienthebo/r10k-fixture-repo', :rev => 123) } let(:working_dir) { double 'working_dir' } before do allow(R10K::SVN::WorkingDir).to receive(:new).and_return working_dir end it "is :absent if the module directory is absent" do allow(subject).to receive(:exist?).and_return false expect(subject.status).to eq :absent end it "is :mismatched if the directory is present but not an SVN repo" do allow(subject).to receive(:exist?).and_return true allow(working_dir).to receive(:is_svn?).and_return false expect(subject.status).to eq :mismatched end it "is mismatched when the wrong SVN URL is checked out" do allow(subject).to receive(:exist?).and_return true allow(working_dir).to receive(:is_svn?).and_return true allow(working_dir).to receive(:url).and_return 'svn://nope/trunk' expect(subject.status).to eq :mismatched end it "is :outdated when the expected rev doesn't match the actual rev" do allow(subject).to receive(:exist?).and_return true allow(working_dir).to receive(:is_svn?).and_return true allow(working_dir).to receive(:url).and_return 'https://github.com/adrienthebo/r10k-fixture-repo' allow(working_dir).to receive(:revision).and_return 99 expect(subject.status).to eq :outdated end it "is :insync if all other conditions are satisfied" do allow(subject).to receive(:exist?).and_return true allow(working_dir).to receive(:is_svn?).and_return true allow(working_dir).to receive(:url).and_return 'https://github.com/adrienthebo/r10k-fixture-repo' allow(working_dir).to receive(:revision).and_return 123 expect(subject.status).to eq :insync end end describe 'the default spec dir' do let(:module_org) { "coolorg" } let(:module_name) { "coolmod" } let(:title) { "#{module_org}-#{module_name}" } let(:dirname) { Pathname.new(Dir.mktmpdir) } let(:spec_path) { dirname + module_name + 'spec' } subject { described_class.new(title, dirname, {}) } it 'is deleted by default' do FileUtils.mkdir_p(spec_path) expect(subject).to receive(:status).and_return(:absent) expect(subject).to receive(:install).and_return(nil) subject.sync expect(Dir.exist?(spec_path)).to eq false end end describe "synchronizing" do subject { described_class.new('foo', '/moduledir', :svn => 'https://github.com/adrienthebo/r10k-fixture-repo', :rev => 123) } before do allow(File).to receive(:directory?).with('/moduledir').and_return true end describe "and the state is :absent" do before { allow(subject).to receive(:status).and_return :absent } it "installs the SVN module" do expect(subject).to receive(:install) expect(subject.sync).to be true end end describe "and the state is :mismatched" do before { allow(subject).to receive(:status).and_return :mismatched } it "reinstalls the module" do expect(subject).to receive(:reinstall) expect(subject.sync).to be true end it "removes the existing directory" do expect(subject.path).to receive(:rmtree) allow(subject).to receive(:install) expect(subject.sync).to be true end end describe "and the state is :outdated" do before { allow(subject).to receive(:status).and_return :outdated } it "upgrades the repository" do expect(subject).to receive(:update) expect(subject.sync).to be true end end describe "and the state is :insync" do before { allow(subject).to receive(:status).and_return :insync } it "doesn't change anything" do expect(subject).to receive(:install).never expect(subject).to receive(:reinstall).never expect(subject).to receive(:update).never expect(subject.sync).to be false end end it 'and `should_sync?` is false' do # modules do not sync if they are not requested mod = described_class.new('my_mod', '/path/to/mod', { overrides: { modules: { requested_modules: ['other_mod'] } } }) expect(mod.sync).to be false end end end r10k-4.0.2/spec/unit/module/tarball_spec.rb000066400000000000000000000036471460033767200204720ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module' require 'fileutils' describe R10K::Module::Tarball do include_context 'Tarball' let(:base_params) { { type: 'tarball', source: fixture_tarball, version: fixture_checksum } } subject do described_class.new( 'fixture-tarball', moduledir, base_params, ) end describe "setting the owner and name" do describe "with a title of 'fixture-tarball'" do it "sets the owner to 'fixture'" do expect(subject.owner).to eq 'fixture' end it "sets the name to 'tarball'" do expect(subject.name).to eq 'tarball' end it "sets the path to the given moduledir + modname" do expect(subject.path.to_s).to eq(File.join(moduledir, 'tarball')) end end end describe "properties" do it "sets the module type to :tarball" do expect(subject.properties).to include(type: :tarball) end it "sets the version" do expect(subject.properties).to include(expected: fixture_checksum) end end describe 'syncing the module' do it 'defaults to deleting the spec dir' do subject.sync expect(Dir.exist?(File.join(moduledir, 'tarball', 'spec'))).to be(false) end end describe "determining the status" do it "delegates to R10K::Tarball" do expect(subject).to receive(:tarball).twice.and_return instance_double('R10K::Tarball', cache_valid?: true, insync?: true) expect(subject).to receive(:path).twice.and_return instance_double('Pathname', exist?: true) expect(subject.status).to eq(:insync) end end describe "option parsing" do describe "version" do context "when no version is given" do subject { described_class.new('fixture-tarball', moduledir, base_params.reject { |k| k.eql?(:version) }) } it "does not require a version" do expect(subject).to be_kind_of(described_class) end end end end end r10k-4.0.2/spec/unit/module_loader/000077500000000000000000000000001460033767200170265ustar00rootroot00000000000000r10k-4.0.2/spec/unit/module_loader/puppetfile_spec.rb000066400000000000000000000455701460033767200225550ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module_loader/puppetfile' require 'tmpdir' describe R10K::ModuleLoader::Puppetfile do describe 'initial parameters' do describe 'honor' do let(:options) do { basedir: '/test/basedir/env', overrides: { modules: { deploy_modules: true } }, environment: R10K::Environment::Git.new('env', '/test/basedir/', 'env', { remote: 'https://foo/remote', ref: 'env' }) } end subject { R10K::ModuleLoader::Puppetfile.new(**options) } describe 'the moduledir' do it 'respects absolute paths' do absolute_options = options.merge({moduledir: '/opt/puppetlabs/special/modules'}) puppetfile = R10K::ModuleLoader::Puppetfile.new(**absolute_options) expect(puppetfile.instance_variable_get(:@moduledir)).to eq('/opt/puppetlabs/special/modules') end it 'roots the moduledir in the basepath if a relative path is specified' do relative_options = options.merge({moduledir: 'my/special/modules'}) puppetfile = R10K::ModuleLoader::Puppetfile.new(**relative_options) expect(puppetfile.instance_variable_get(:@moduledir)).to eq('/test/basedir/env/my/special/modules') end end describe 'the Puppetfile' do it 'respects absolute paths' do absolute_options = options.merge({puppetfile: '/opt/puppetlabs/special/Puppetfile'}) puppetfile = R10K::ModuleLoader::Puppetfile.new(**absolute_options) expect(puppetfile.instance_variable_get(:@puppetfile_path)).to eq('/opt/puppetlabs/special/Puppetfile') end it 'roots the Puppetfile in the basepath if a relative path is specified' do relative_options = options.merge({puppetfile: 'Puppetfile.global'}) puppetfile = R10K::ModuleLoader::Puppetfile.new(**relative_options) expect(puppetfile.instance_variable_get(:@puppetfile_path)).to eq('/test/basedir/env/Puppetfile.global') end end it 'the overrides' do expect(subject.instance_variable_get(:@overrides)).to eq({ modules: { deploy_modules: true }}) end it 'the environment' do expect(subject.instance_variable_get(:@environment).name).to eq('env') end end describe 'sane defaults' do subject { R10K::ModuleLoader::Puppetfile.new(basedir: '/test/basedir') } it 'has a moduledir rooted in the basedir' do expect(subject.instance_variable_get(:@moduledir)).to eq('/test/basedir/modules') end it 'has a Puppetfile rooted in the basedir' do expect(subject.instance_variable_get(:@puppetfile_path)).to eq('/test/basedir/Puppetfile') end it 'creates an empty overrides' do expect(subject.instance_variable_get(:@overrides)).to eq({}) end it 'does not require an environment' do expect(subject.instance_variable_get(:@environment)).to eq(nil) end end end describe 'adding modules' do let(:basedir) { '/test/basedir' } subject { R10K::ModuleLoader::Puppetfile.new(basedir: basedir, overrides: {modules: {exclude_spec: false}}) } it 'should transform Forge modules with a string arg to have a version key' do expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', subject.moduledir, hash_including(version: '1.2.3'), anything).and_call_original expect { subject.add_module('puppet/test_module', '1.2.3') }.to change { subject.modules } expect(subject.modules.collect(&:name)).to include('test_module') end it 'should not accept Forge modules with a version comparison' do expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', subject.moduledir, hash_including(version: '< 1.2.0'), anything).and_call_original expect { subject.add_module('puppet/test_module', '< 1.2.0') }.to raise_error(ArgumentError, /module version .* is not a valid forge module version/i) expect(subject.modules.collect(&:name)).not_to include('test_module') end it 'should not modify the overrides when adding modules' do module_opts = { git: 'git@example.com:puppet/test_module.git' } subject.add_module('puppet/test_module', module_opts) expect(subject.instance_variable_get("@overrides")[:modules]).to eq({exclude_spec: false}) end it 'should read the `exclude_spec` setting in the module definition and override the overrides' do module_opts = { git: 'git@example.com:puppet/test_module.git', exclude_spec: true } subject.add_module('puppet/test_module', module_opts) expect(subject.modules[0].instance_variable_get("@exclude_spec")).to be true end it 'should set :spec_deletable to true for modules in the basedir' do module_opts = { git: 'git@example.com:puppet/test_module.git' } subject.add_module('puppet/test_module', module_opts) expect(subject.modules[0].spec_deletable).to be true end it 'should set :spec_deletable to false for modules outside the basedir' do module_opts = { git: 'git@example.com:puppet/test_module.git', install_path: 'some/path' } subject.add_module('puppet/test_module', module_opts) expect(subject.modules[0].spec_deletable).to be false end it 'should accept non-Forge modules with a hash arg' do module_opts = { git: 'git@example.com:puppet/test_module.git' } expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', subject.moduledir, module_opts, anything).and_call_original expect { subject.add_module('puppet/test_module', module_opts) }.to change { subject.modules } expect(subject.modules.collect(&:name)).to include('test_module') end it 'should accept non-Forge modules with a valid relative :install_path option' do module_opts = { install_path: 'vendor', git: 'git@example.com:puppet/test_module.git', } expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', File.join(basedir, 'vendor'), module_opts, anything).and_call_original expect { subject.add_module('puppet/test_module', module_opts) }.to change { subject.modules } expect(subject.modules.collect(&:name)).to include('test_module') end it 'should accept non-Forge modules with a valid absolute :install_path option' do install_path = File.join(basedir, 'vendor') module_opts = { install_path: install_path, git: 'git@example.com:puppet/test_module.git', } expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', install_path, module_opts, anything).and_call_original expect { subject.add_module('puppet/test_module', module_opts) }.to change { subject.modules } expect(subject.modules.collect(&:name)).to include('test_module') end it 'should reject non-Forge modules with an invalid relative :install_path option' do module_opts = { install_path: '../../vendor', git: 'git@example.com:puppet/test_module.git', } expect { subject.add_module('puppet/test_module', module_opts) }.to raise_error(R10K::Error, /cannot manage content.*is not within/i).and not_change { subject.modules } end it 'should reject non-Forge modules with an invalid absolute :install_path option' do module_opts = { install_path: '/tmp/mydata/vendor', git: 'git@example.com:puppet/test_module.git', } expect { subject.add_module('puppet/test_module', module_opts) }.to raise_error(R10K::Error, /cannot manage content.*is not within/i).and not_change { subject.modules } end it 'should disable and not add modules that conflict with the environment' do env = instance_double('R10K::Environment::Base') mod = instance_double('R10K::Module::Base', name: 'conflict', origin: :puppetfile, 'origin=': nil) allow(env).to receive(:name).and_return('conflict') loader = R10K::ModuleLoader::Puppetfile.new(basedir: basedir, environment: env) allow(env).to receive(:'module_conflicts?').with(mod).and_return(true) allow(mod).to receive(:spec_deletable=) expect(R10K::Module).to receive(:from_metadata).with('conflict', anything, anything, anything).and_return(mod) expect { loader.add_module('conflict', {}) }.not_to change { loader.modules } end end describe '#purge_exclusions' do let(:managed_dirs) { ['dir1', 'dir2'] } subject { R10K::ModuleLoader::Puppetfile.new(basedir: '/test/basedir') } it 'includes managed_directories' do expect(subject.send(:determine_purge_exclusions, managed_dirs)).to match_array(managed_dirs) end context 'when belonging to an environment' do let(:env_contents) { ['env1', 'env2' ] } let(:env) { double(:environment, desired_contents: env_contents) } before { allow(env).to receive(:name).and_return('env1') } subject { R10K::ModuleLoader::Puppetfile.new(basedir: '/test/basedir', environment: env) } it "includes environment's desired_contents" do expect(subject.send(:determine_purge_exclusions, managed_dirs)).to match_array(managed_dirs + env_contents) end end end describe '#managed_directories' do let(:basedir) { '/test/basedir' } subject { R10K::ModuleLoader::Puppetfile.new(basedir: basedir) } before do allow(subject).to receive(:puppetfile_content).and_return('') end it 'returns an array of paths that #purge! will operate within' do expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', subject.moduledir, hash_including(version: '1.2.3'), anything).and_call_original subject.add_module('puppet/test_module', '1.2.3') subject.load! expect(subject.modules.length).to be 1 expect(subject.managed_directories).to match_array([subject.moduledir]) end context "with a module with install_path == ''" do it "basedir isn't in the list of paths to purge" do module_opts = { install_path: '', git: 'git@example.com:puppet/test_module.git' } expect(R10K::Module).to receive(:from_metadata).with('puppet/test_module', basedir, module_opts, anything).and_call_original subject.add_module('puppet/test_module', module_opts) subject.load! expect(subject.modules.length).to be 1 expect(subject.managed_directories).to be_empty end end end describe 'evaluating a Puppetfile' do def expect_wrapped_error(error, pf_path, error_type) expect(error).to be_a_kind_of(R10K::Error) expect(error.message).to eq("Failed to evaluate #{pf_path}") expect(error.original).to be_a_kind_of(error_type) end subject { described_class.new(basedir: @path) } it 'wraps and re-raises syntax errors' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'invalid-syntax') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.to raise_error do |e| expect_wrapped_error(e, pf_path, SyntaxError) end end it 'wraps and re-raises load errors' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'load-error') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.to raise_error do |e| expect_wrapped_error(e, pf_path, LoadError) end end it 'wraps and re-raises argument errors' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'argument-error') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.to raise_error do |e| expect_wrapped_error(e, pf_path, ArgumentError) end end describe 'forge declaration' do before(:each) do PuppetForge.host = "" end it 'is respected if `allow_puppetfile_override` is true' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'forge-override') puppetfile = R10K::ModuleLoader::Puppetfile.new(basedir: @path, overrides: { forge: { allow_puppetfile_override: true } }) puppetfile.load! expect(PuppetForge.host).to eq("my.custom.forge.com/") end it 'is ignored if `allow_puppetfile_override` is false' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'forge-override') puppetfile = R10K::ModuleLoader::Puppetfile.new(basedir: @path, overrides: { forge: { allow_puppetfile_override: false } }) expect(PuppetForge).not_to receive(:host=).with("my.custom.forge.com") puppetfile.load! expect(PuppetForge.host).to eq("/") end end it 'rejects Puppetfiles with duplicate module names' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'duplicate-module-error') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.to raise_error(R10K::Error, /Puppetfiles cannot contain duplicate module names/i) end it 'wraps and re-raises name errors' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'name-error') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.to raise_error do |e| expect_wrapped_error(e, pf_path, NameError) end end it 'accepts a forge module with a version' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.not_to raise_error end describe 'setting a custom moduledir' do it 'allows setting an absolute moduledir' do @path = '/fake/basedir' allow(subject).to receive(:puppetfile_content).and_return('moduledir "/fake/moduledir"') subject.load! expect(subject.instance_variable_get(:@moduledir)).to eq('/fake/moduledir') end it 'roots relative moduledirs in the basedir' do @path = '/fake/basedir' allow(subject).to receive(:puppetfile_content).and_return('moduledir "my/moduledir"') subject.load! expect(subject.instance_variable_get(:@moduledir)).to eq(File.join(@path, 'my/moduledir')) end end it 'accepts a forge module without a version' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-without-version') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.not_to raise_error end it 'creates a git module and applies the default branch specified in the Puppetfile' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'default-branch-override') pf_path = File.join(@path, 'Puppetfile') expect { subject.load! }.not_to raise_error git_module = subject.modules[0] expect(git_module.default_ref).to eq 'here_lies_the_default_branch' end it 'creates a git module and applies the provided default_branch_override' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'default-branch-override') pf_path = File.join(@path, 'Puppetfile') default_branch_override = 'default_branch_override_name' subject.default_branch_override = default_branch_override expect { subject.load! }.not_to raise_error git_module = subject.modules[0] expect(git_module.default_override_ref).to eq default_branch_override expect(git_module.default_ref).to eq 'here_lies_the_default_branch' end describe 'using module metadata' do it 'properly loads module metadata' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'various-modules') metadata = subject.load_metadata[:modules].map { |mod| [ mod.name, mod.version ] }.to_h expect(metadata['apt']).to eq('2.1.1') expect(metadata['stdlib']).to eq(nil) expect(metadata['concat']).to eq(nil) expect(metadata['rpm']).to eq('2.1.1-pre1') expect(metadata['foo']).to eq(nil) expect(metadata['bar']).to eq('v1.2.3') expect(metadata['baz']).to eq('123abc456') expect(metadata['fizz']).to eq('1234567890abcdef1234567890abcdef12345678') expect(metadata['buzz']).to eq(nil) expect(metadata['canary']).to eq('0.0.0') end it 'does not load module implementations for static versions unless the module install path does not exist on disk' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'various-modules') subject.load_metadata modules = subject.load[:modules].map { |mod| [ mod.name, mod ] }.to_h expect(modules['apt']).to be_a_kind_of(R10K::Module::Definition) expect(modules['stdlib']).to be_a_kind_of(R10K::Module::Forge) expect(modules['concat']).to be_a_kind_of(R10K::Module::Forge) expect(modules['rpm']).to be_a_kind_of(R10K::Module::Definition) expect(modules['foo']).to be_a_kind_of(R10K::Module::Git) expect(modules['bar']).to be_a_kind_of(R10K::Module::Git) expect(modules['baz']).to be_a_kind_of(R10K::Module::Definition) expect(modules['fizz']).to be_a_kind_of(R10K::Module::Definition) expect(modules['buzz']).to be_a_kind_of(R10K::Module::Git) expect(modules['canary']).to be_a_kind_of(R10K::Module::Definition) end it 'loads module implementations whose static versions are different' do fixture_path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'various-modules') @path = Dir.mktmpdir unsynced_pf_path = File.join(fixture_path, 'Puppetfile') FileUtils.cp(unsynced_pf_path, @path) subject.load_metadata synced_pf_path = File.join(fixture_path, 'Puppetfile.new') FileUtils.cp(synced_pf_path, File.join(@path, 'Puppetfile')) modules = subject.load[:modules].map { |mod| [ mod.name, mod ] }.to_h expect(modules['apt']).to be_a_kind_of(R10K::Module::Forge) end end describe 'using module-exclude-regex' do it 'can exclude a module from being installed' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'various-modules') puppetfile = R10K::ModuleLoader::Puppetfile.new(basedir: @path, module_exclude_regex: '^concat$') puppetfile.load! expect(puppetfile.modules.collect(&:name)).not_to include('concat') end it 'can exclude multiple modules from being installed' do @path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'various-modules') puppetfile = R10K::ModuleLoader::Puppetfile.new(basedir: @path, module_exclude_regex: '^ba[rz]$') puppetfile.load! expect(puppetfile.modules.collect(&:name)).not_to include('bar') expect(puppetfile.modules.collect(&:name)).not_to include('baz') end end end end r10k-4.0.2/spec/unit/module_spec.rb000066400000000000000000000102721460033767200170410ustar00rootroot00000000000000require 'spec_helper' require 'r10k/module' describe R10K::Module do describe 'delegating to R10K::Module::Git' do [ {git: 'git url'}, {type: 'git', source: 'git url'}, ].each do |scenario| it "accepts a name matching 'test' and args #{scenario.inspect}" do obj = R10K::Module.new('test', '/modulepath', scenario) expect(obj).to be_a_kind_of(R10K::Module::Git) expect(obj.send(:instance_variable_get, :'@remote')).to eq('git url') end end end describe 'delegating to R10K::Module::Svn' do [ {svn: 'svn url'}, {type: 'svn', source: 'svn url'}, ].each do |scenario| it "accepts a name matching 'test' and args #{scenario.inspect}" do obj = R10K::Module.new('test', '/modulepath', scenario) expect(obj).to be_a_kind_of(R10K::Module::SVN) expect(obj.send(:instance_variable_get, :'@url')).to eq('svn url') end end end describe 'delegating to R10K::Module::Forge' do [ 'bar/quux', 'bar-quux', ].each do |scenario| it "accepts a name matching #{scenario} and version nil" do obj = R10K::Module.new(scenario, '/modulepath', { type: 'forge', version: nil }) expect(obj).to be_a_kind_of(R10K::Module::Forge) end end [ {type: 'forge', version: '8.0.0'}, ].each do |scenario| it "accepts a name matching bar-quux and args #{scenario.inspect}" do obj = R10K::Module.new('bar-quux', '/modulepath', scenario) expect(obj).to be_a_kind_of(R10K::Module::Forge) expect(obj.send(:instance_variable_get, :'@expected_version')).to eq('8.0.0') end end describe 'when the module is ostensibly on disk' do before do owner = 'theowner' module_name = 'themodulename' @title = "#{owner}-#{module_name}" metadata = <<~METADATA { "name": "#{@title}", "version": "1.2.0" } METADATA @dirname = Dir.mktmpdir module_path = File.join(@dirname, module_name) FileUtils.mkdir(module_path) File.open("#{module_path}/metadata.json", 'w') do |file| file.write(metadata) end end it 'sets the expected version to what is found in the metadata' do obj = R10K::Module.new(@title, @dirname, {type: 'forge', version: nil}) expect(obj.send(:instance_variable_get, :'@expected_version')).to eq('1.2.0') end end end it "raises an error if delegation fails" do expect { R10K::Module.new('bar!quux', '/modulepath', {version: ["NOPE NOPE NOPE NOPE!"]}) }.to raise_error RuntimeError, /doesn't have an implementation/ end describe 'Given a set of initialization parameters for R10K::Module' do [ ['name', {git: 'git url'}], ['name', {type: 'git', source: 'git url'}], ['name', {svn: 'svn url'}], ['name', {type: 'svn', source: 'svn url'}], ['namespace-name', {type: 'forge', version: '8.0.0'}] ].each do |(name, options)| it 'can handle the default_branch_override option' do expect { obj = R10K::Module.new(name, '/modulepath', options.merge({default_branch_override: 'foo'})) expect(obj).to be_a_kind_of(R10K::Module::Base) }.not_to raise_error end describe 'the exclude_spec setting' do it 'sets the exclude_spec instance variable to true by default' do obj = R10K::Module.new(name, '/modulepath', options) expect(obj.instance_variable_get("@exclude_spec")).to eq(true) end it 'cannot be overridden by the settings from the cli, r10k.yaml, or settings default' do options = options.merge({exclude_spec: false, overrides: {modules: {exclude_spec: true}}}) obj = R10K::Module.new(name, '/modulepath', options) expect(obj.instance_variable_get("@exclude_spec")).to eq(false) end it 'reads the setting from the cli, r10k.yaml, or settings default when not provided directly' do options = options.merge({overrides: {modules: {exclude_spec: false}}}) obj = R10K::Module.new(name, '/modulepath', options) expect(obj.instance_variable_get("@exclude_spec")).to eq(false) end end end end end r10k-4.0.2/spec/unit/puppetfile_spec.rb000066400000000000000000000263201460033767200177320ustar00rootroot00000000000000require 'spec_helper' require 'r10k/puppetfile' describe R10K::Puppetfile do subject do described_class.new( '/some/nonexistent/basedir', {puppetfile_name: 'Puppetfile.r10k'} ) end describe "a custom puppetfile_name" do it "is the basedir joined with '/Puppetfile.r10k' path" do expect(subject.puppetfile_path).to eq '/some/nonexistent/basedir/Puppetfile.r10k' end end end describe R10K::Puppetfile do describe "a custom relative puppetfile_path" do it "is the basedir joined with the puppetfile_path" do relative_subject = described_class.new('/some/nonexistent/basedir', {puppetfile_path: 'relative/Puppetfile'}) expect(relative_subject.puppetfile_path).to eq '/some/nonexistent/basedir/relative/Puppetfile' end end describe "a custom absolute puppetfile_path" do it "is the puppetfile_path as given" do absolute_subject = described_class.new('/some/nonexistent/basedir', {puppetfile_path: '/some/absolute/custom/Puppetfile'}) expect(absolute_subject.puppetfile_path).to eq '/some/absolute/custom/Puppetfile' end end end describe R10K::Puppetfile do subject do described_class.new( '/some/nonexistent/basedir', {}) end describe "backwards compatibility with older calling conventions" do it "honors all arguments correctly" do puppetfile = described_class.new('/some/nonexistant/basedir', '/some/nonexistant/basedir/site-modules', nil, 'Pupupupetfile', true) expect(puppetfile.force).to eq(true) expect(puppetfile.moduledir).to eq('/some/nonexistant/basedir/site-modules') expect(puppetfile.puppetfile_path).to eq('/some/nonexistant/basedir/Pupupupetfile') expect(puppetfile.overrides).to eq({}) end it "handles defaults correctly" do puppetfile = described_class.new('/some/nonexistant/basedir', nil, nil, nil) expect(puppetfile.force).to eq(false) expect(puppetfile.moduledir).to eq('/some/nonexistant/basedir/modules') expect(puppetfile.puppetfile_path).to eq('/some/nonexistant/basedir/Puppetfile') expect(puppetfile.overrides).to eq({}) end end describe "the default moduledir" do it "is the basedir joined with '/modules' path" do expect(subject.moduledir).to eq '/some/nonexistent/basedir/modules' end end describe "the default puppetfile" do it "is the basedir joined with '/Puppetfile' path" do expect(subject.puppetfile_path).to eq '/some/nonexistent/basedir/Puppetfile' end end describe "setting moduledir" do it "changes to given moduledir if it is an absolute path" do subject.set_moduledir('/absolute/path/moduledir') expect(subject.moduledir).to eq '/absolute/path/moduledir' end it "joins the basedir with the given moduledir if it is a relative path" do subject.set_moduledir('relative/moduledir') expect(subject.moduledir).to eq '/some/nonexistent/basedir/relative/moduledir' end end describe "loading a Puppetfile" do context 'using load' do it "returns the loaded content" do path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {}) loaded_content = subject.load expect(loaded_content).to be_an_instance_of(Hash) has_some_data = loaded_content.values.none?(&:empty?) expect(has_some_data).to be true end it "handles a relative basedir" do path = File.join('spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {}) loaded_content = subject.load expect(loaded_content).to be_an_instance_of(Hash) has_some_data = loaded_content.values.none?(&:empty?) expect(has_some_data).to be true end it "is idempotent" do path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {}) expect(subject.loader).to receive(:load!).and_call_original.once loaded_content1 = subject.load expect(subject.loaded?).to be true loaded_content2 = subject.load expect(loaded_content2).to eq(loaded_content1) end it "returns nil if Puppetfile doesn't exist" do path = '/rando/path/that/wont/exist' subject = described_class.new(path, {}) expect(subject.load).to eq nil end end context 'using load!' do it "returns the loaded content" do path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {}) loaded_content = subject.load! expect(loaded_content).to be_an_instance_of(Hash) has_some_data = loaded_content.values.none?(&:empty?) expect(has_some_data).to be true end it "raises if Puppetfile doesn't exist" do path = '/rando/path/that/wont/exist' subject = described_class.new(path, {}) expect { subject.load! }.to raise_error(/No such file or directory.*\/rando\/path\/.*/) end end end describe 'default_branch_override' do it 'is passed correctly to module loader init' do # This path doesn't matter so long as it has a Puppetfile within it path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {overrides: {environments: {default_branch_override: 'foo'}}}) repo = instance_double('R10K::Git::StatefulRepository') allow(repo).to receive(:resolve).with('foo').and_return(true) allow(R10K::Git::StatefulRepository).to receive(:new).and_return(repo) allow(subject.loader).to receive(:puppetfile_content).and_return <<-EOPF # Track control branch and fall-back to main if no matching branch. mod 'hieradata', :git => 'git@git.example.com:organization/hieradata.git', :branch => :control_branch, :default_branch => 'main' EOPF expect(subject.logger).not_to receive(:warn). with(/Mismatch between passed and initialized.*preferring passed value/) subject.load loaded_module = subject.modules.first expect(loaded_module.version).to eq('foo') end it 'overrides module loader init if needed' do # This path doesn't matter so long as it has a Puppetfile within it path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {overrides: {environments: {default_branch_override: 'foo'}}}) repo = instance_double('R10K::Git::StatefulRepository') allow(repo).to receive(:resolve).with('bar').and_return(true) allow(R10K::Git::StatefulRepository).to receive(:new).and_return(repo) allow(subject.loader).to receive(:puppetfile_content).and_return <<-EOPF # Track control branch and fall-back to main if no matching branch. mod 'hieradata', :git => 'git@git.example.com:organization/hieradata.git', :branch => :control_branch, :default_branch => 'main' EOPF expect(subject.logger).to receive(:warn). with(/Mismatch between passed and initialized.*preferring passed value/) subject.load('bar') loaded_module = subject.modules.first expect(loaded_module.version).to eq('bar') end it 'does not warn if passed and initialized default_branch_overrides match' do # This path doesn't matter so long as it has a Puppetfile within it path = File.join(PROJECT_ROOT, 'spec', 'fixtures', 'unit', 'puppetfile', 'valid-forge-with-version') subject = described_class.new(path, {overrides: {environments: {default_branch_override: 'foo'}}}) repo = instance_double('R10K::Git::StatefulRepository') allow(repo).to receive(:resolve).with('foo').and_return(true) allow(R10K::Git::StatefulRepository).to receive(:new).and_return(repo) allow(subject.loader).to receive(:puppetfile_content).and_return <<-EOPF # Track control branch and fall-back to main if no matching branch. mod 'hieradata', :git => 'git@git.example.com:organization/hieradata.git', :branch => :control_branch, :default_branch => 'main' EOPF expect(subject.logger).not_to receive(:warn). with(/Mismatch between passed and initialized.*preferring passed value/) subject.load('foo') loaded_module = subject.modules.first expect(loaded_module.version).to eq('foo') end end describe "accepting a visitor" do it "passes itself to the visitor" do visitor = spy('visitor') expect(visitor).to receive(:visit).with(:puppetfile, subject) subject.accept(visitor) end it "synchronizes each module if the visitor yields" do visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :puppetfile expect(other).to eq subject block.call end mod1 = instance_double('R10K::Module::Base', :cachedir => :none) mod2 = instance_double('R10K::Module::Base', :cachedir => :none) expect(mod1).to receive(:sync) expect(mod2).to receive(:sync) expect(subject).to receive(:modules).and_return([mod1, mod2]) subject.accept(visitor) end it "creates a thread pool to visit concurrently if pool_size setting is greater than one" do pool_size = 3 subject.settings[:pool_size] = pool_size visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :puppetfile expect(other).to eq subject block.call end mod1 = instance_double('R10K::Module::Base', :cachedir => :none) mod2 = instance_double('R10K::Module::Base', :cachedir => :none) expect(mod1).to receive(:sync) expect(mod2).to receive(:sync) expect(subject).to receive(:modules).and_return([mod1, mod2]) expect(Thread).to receive(:new).exactly(pool_size).and_call_original expect(Queue).to receive(:new).and_call_original.twice subject.accept(visitor) end it "Creates queues of modules grouped by cachedir" do visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :puppetfile expect(other).to eq subject block.call end m1 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/A') m2 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/B') m3 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/C') m4 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/C') m5 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/D') m6 = instance_double('R10K::Module::Base', :cachedir => '/dev/null/D') modules = [m1, m2, m3, m4, m5, m6] queue = R10K::ContentSynchronizer.modules_visit_queue(modules, visitor, subject) expect(queue.length).to be 4 queue_array = 4.times.map { queue.pop } expect(queue_array).to match_array([[m1], [m2], [m3, m4], [m5, m6]]) end end end r10k-4.0.2/spec/unit/settings/000077500000000000000000000000001460033767200160535ustar00rootroot00000000000000r10k-4.0.2/spec/unit/settings/collection_spec.rb000066400000000000000000000074131460033767200215520ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/collection' require 'r10k/settings/definition' describe R10K::Settings::Collection do let(:symbol_defn) { R10K::Settings::Definition.new(:symbol_defn, :validate => lambda { |x| raise TypeError unless x.is_a?(Symbol) }) } let(:default_defn) { R10K::Settings::Definition.new(:default_defn, :default => lambda { "Defaults are fun" }) } subject do described_class.new(:collection, [symbol_defn, default_defn]) end it_behaves_like "a setting with ancestors" describe "#evaluate" do it "assigns values, validates them, and resolves a final value" do expect(subject).to receive(:assign).with({:default_defn => :squid}) expect(subject).to receive(:validate) expect(subject).to receive(:resolve) subject.evaluate({:default_defn => :squid}) end end describe '#assign' do it "assigns values to the appropriate setting" do subject.assign({:symbol_defn => :hello}) expect(symbol_defn.value).to eq :hello end it "can accept invalid settings" do subject.assign({:hardly_a_setting => "nope nope nope"}) end it "silently ignores attempts to assign nil" do subject.assign(nil) end end describe '#validate' do it "raises an error containing a hash of nested validation errors" do subject.assign({:symbol_defn => "Definitely not a symbol"}) expect { errors = subject.validate }.to raise_error do |error| expect(error).to be_a_kind_of(R10K::Settings::Collection::ValidationError) errors = error.errors expect(errors.size).to eq 1 expect(errors[:symbol_defn]).to be_a_kind_of(TypeError) end end it "it does not raise an error if no errors were found" do subject.assign({:default_defn => "yep"}) expect(subject.validate).to be_nil end end describe '#resolve' do it "returns a frozen hash of all settings" do subject.assign({:symbol_defn => :some_value}) rv = subject.resolve expect(rv).to be_frozen expect(rv).to eq({:symbol_defn => :some_value, :default_defn => "Defaults are fun"}) end end end describe R10K::Settings::Collection::ValidationError do let(:flat_errors) do described_class.new("Validation failures for some group", errors: { some_defn: ArgumentError.new("some_defn is wrong, somehow."), uri_setting: ArgumentError.new("uri_setting NOTAURI is not a URI.") }) end let(:flat_error_text) do [ "Validation failures for some group:", " some_defn:", " some_defn is wrong, somehow.", " uri_setting:", " uri_setting NOTAURI is not a URI."] .join("\n") end let(:nested_errors) do described_class.new("Validation failures for some nesting group", errors: { file_setting: ArgumentError.new("file_setting is a potato, not a file."), nested: flat_errors }) end let(:nested_error_text) do [ "Validation failures for some nesting group:", " file_setting:", " file_setting is a potato, not a file.", " nested:", " Validation failures for some group:", " some_defn:", " some_defn is wrong, somehow.", " uri_setting:", " uri_setting NOTAURI is not a URI." ].join("\n") end describe "formatting a human readable error message" do describe "no with no nested validation errors" do it "generates a human readable set of validation errors." do expect(flat_errors.format).to eq flat_error_text end end describe "with nested validation errors" do it "generates a human readable set of validation errors." do expect(nested_errors.format).to eq nested_error_text end end end end r10k-4.0.2/spec/unit/settings/container_spec.rb000066400000000000000000000044621460033767200214020ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings' describe R10K::Settings::Container do describe 'validating keys' do it 'can add new valid keys' do subject.add_valid_key(:v) subject[:v] end it 'can check if a key is valid' do subject.add_valid_key(:v) expect(subject.valid_key?(:v)).to be_truthy end it 'can list all valid keys' do subject.add_valid_key(:v) subject.add_valid_key(:w) expect(subject.valid_keys).to include :v expect(subject.valid_keys).to include :w end end describe 'specifying settings' do it 'fails if a setting application uses an invalid key' do expect { subject[:invalid] = 'fail' }.to raise_error R10K::Settings::Container::InvalidKey end it 'can look up values that it sets' do subject.add_valid_key :v subject[:v] = 'set' expect(subject[:v]).to eq 'set' end end describe 'looking up settings' do before do subject.add_valid_key :v end it 'fails if a setting lookup uses an invalid key' do expect { subject[:invalid] }.to raise_error R10K::Settings::Container::InvalidKey end it 'returns nil if a key is valid but no setting is present' do expect(subject[:v]).to be_nil end describe 'with a parent container' do let(:parent) { described_class.new.tap { |p| p.add_valid_key :v } } subject { described_class.new(parent) } it 'uses its setting over a parent value' do subject[:v] = 'child' parent[:v] = 'parent' expect(subject[:v]).to eq 'child' end it 'duplicates and stores the parent object to avoid modifying the parent object' do parent[:v] = {} subject[:v][:hello] = "world" expect(subject[:v]).to eq({hello: "world"}) expect(parent[:v]).to eq({}) end it 'falls back to the parent value if it does not have a value' do parent[:v] = 'parent' expect(subject[:v]).to eq 'parent' end end end describe "resetting" do before do subject.add_valid_key :v end it "unsets all settings" do subject[:v] = "hi" subject.reset! expect(subject[:v]).to be_nil end it "doesn't remove valid values" do subject.reset! expect(subject.valid_key?(:v)).to be_truthy end end end r10k-4.0.2/spec/unit/settings/definition_spec.rb000066400000000000000000000047741460033767200215560ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/definition' require 'r10k/settings/collection' require 'r10k/settings/list' describe R10K::Settings::Definition do subject { described_class.new(:setting) } it_behaves_like "a setting with ancestors" describe "#evaluate" do it "assigns a value, validates it, and resolves a final value" do expect(subject).to receive(:assign).with("myvalue") expect(subject).to receive(:validate) expect(subject).to receive(:resolve) subject.evaluate("myvalue") end end describe "#assign" do it 'stores the provided value' do subject.assign("I'm the value") expect(subject.value).to eq "I'm the value" end it "normalizes the stored value when a normalize hook is set" do subject = described_class.new(:setting, :normalize => lambda { |input| input.to_sym }) subject.assign("symbolizeme") expect(subject.value).to eq :symbolizeme end end describe "#validate" do it "does nothing if a value has not been assigned" do subject = described_class.new(:setting, :validate => lambda { |_| raise "Shouldn't be called" }) subject.validate end it "does nothing if a validate hook has not been assigned" do subject.assign("I'm the value") subject.validate end it "raises up errors raised from the validate hook" do subject = described_class.new(:satellite, :validate => lambda { |input| raise ArgumentError, "Invalid value #{input}: that's no moon!" }) subject.assign("Alderaan") expect { subject.validate }.to raise_error(ArgumentError, "Invalid value Alderaan: that's no moon!") end it "returns if the validate hook did not raise an error" do subject = described_class.new(:setting, :validate => lambda { |_| "That's a moon" }) subject.assign("Mun") subject.validate end end describe "#resolve" do it "returns the value when the value has been given" do subject.assign("Mun") expect(subject.resolve).to eq "Mun" end it "resolves the default when the default is a proc" do subject = described_class.new(:setting, :default => lambda { "Minmus" }) expect(subject.resolve).to eq "Minmus" end it "returns the default when the default is not a proc" do subject = described_class.new(:setting, :default => "Ike") expect(subject.resolve).to eq "Ike" end it "returns nil when there is no value nor default" do expect(subject.resolve).to be_nil end end end r10k-4.0.2/spec/unit/settings/enum_definition_spec.rb000066400000000000000000000011131460033767200225620ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/enum_definition' describe R10K::Settings::EnumDefinition do subject { described_class.new(:enum, :enum => %w[one two three]) } describe '#validate' do it "doesn't raise an error when given an expected value" do subject.assign('two') subject.validate end it "raises an error when given a value outside the enum" do subject.assign('dos') expect { subject.validate }.to raise_error(ArgumentError, "Setting enum should be one of #{%w[one two three].inspect}, not 'dos'") end end end r10k-4.0.2/spec/unit/settings/inheritance_spec.rb000066400000000000000000000017421460033767200217070ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/collection' require 'r10k/settings/definition' RSpec.describe 'R10K::Settings inheritance' do subject do R10K::Settings::Collection.new(:parent_settings, [ R10K::Settings::Definition.new(:banana, { :default => 'cavendish', }), R10K::Settings::Collection.new(:child_settings, [ R10K::Settings::Definition.new(:banana, { :default => :inherit, }), ]), ]) end describe "child settings" do let(:setting) { subject[:child_settings][:banana] } context "when child value is not set" do it "should resolve to default value from parent" do expect(setting.value).to be_nil expect(setting.resolve).to eq 'cavendish' end end context "when child value is set" do before(:each) { setting.assign('gros michel') } it "should resolve to child value" do expect(setting.resolve).to eq 'gros michel' end end end end r10k-4.0.2/spec/unit/settings/list_spec.rb000066400000000000000000000051731460033767200203730ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/list' require 'r10k/settings/collection' require 'r10k/settings/definition' require 'r10k/settings/uri_definition' describe R10K::Settings::List do let(:item_proc) do lambda { R10K::Settings::URIDefinition.new(nil, { :desc => "A URI in a list" }) } end subject do described_class.new(:test_list, item_proc, { :desc => "A test setting list" }) end it_behaves_like "a setting with ancestors" describe '#assign' do it "calls item_proc for each item assigned" do expect(R10K::Settings::URIDefinition).to receive(:new).and_call_original.exactly(3).times subject.assign([ "uri_1", "uri_2", "uri_3"]) end it "claims ownership of newly added items" do subject.assign([ "uri_1", "uri_2", "uri_3"]) item_parents = subject.instance_variable_get(:@items).collect { |i| i.parent } expect(item_parents).to all(eq subject) end it "assigns value to each item" do new_values = [ "uri_1", "uri_2", "uri_3"] subject.assign(new_values) item_values = subject.instance_variable_get(:@items).collect { |i| i.value } expect(item_values).to eq new_values end it "silently ignores attempts to assign nil" do subject.assign(nil) end end describe '#validate' do it "raises an error containing a list of every item with validation errors" do subject.assign([ "uri 1", "uri 2", "http://www.example.com"]) expect { subject.validate }.to raise_error do |error| expect(error).to be_a_kind_of(R10K::Settings::List::ValidationError) errors = error.errors.collect { |key, val| val } expect(errors.size).to eq 2 expect(errors).to all(be_a_kind_of(ArgumentError)) expect(errors.collect { |e| e.message }).to all(match /requires a URL.*could not be parsed/i) end end it "it does not raise an error if no errors were found" do subject.assign([ "http://www.example.com" ]) expect(subject.validate).to be_nil end end describe '#resolve' do it "returns a frozen list of all items" do subject.assign([ "uri_1", "uri_2" ]) rv = subject.resolve expect(rv).to be_frozen expect(rv).to eq([ "uri_1", "uri_2" ]) end end end describe R10K::Settings::List::ValidationError do subject do described_class.new("Sample List Validation Errors", errors: { 2 => ArgumentError.new("Sample List Item Error"), }) end it "generates a human readable error message for the invalid item" do message = subject.format expect(message).to match /sample list validation errors.*item 2.*sample list item error/im end end r10k-4.0.2/spec/unit/settings/loader_spec.rb000066400000000000000000000104251460033767200206620ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/loader' describe R10K::Settings::Loader do context 'populate_loadpath' do it 'includes /etc/puppetlabs/r10k/r10k.yaml in the loadpath' do expect(subject.loadpath).to include('/etc/puppetlabs/r10k/r10k.yaml') end it 'includes /etc/r10k.yaml in the loadpath' do expect(subject.loadpath).to include('/etc/r10k.yaml') end it 'does include the current working directory in the loadpath' do allow(Dir).to receive(:getwd).and_return '/some/random/path/westvletren' expect(subject.loadpath).to include('/some/random/path/westvletren/r10k.yaml') end it 'does not include /some/random/path/atomium/r10k.yaml in the loadpath' do expect(subject.loadpath).not_to include('/some/random/path/atomium/r10k.yaml') end end context 'search' do it 'returns the correct default location' do allow(File).to receive(:file?).and_return false allow(File).to receive(:file?).with('/etc/puppetlabs/r10k/r10k.yaml').and_return true allow(File).to receive(:file?).with('/etc/r10k.yaml').and_return true expect(subject.search).to eq '/etc/puppetlabs/r10k/r10k.yaml' end it 'issues a warning if both default locations are present' do allow(File).to receive(:file?).and_return false allow(File).to receive(:file?).with('/etc/puppetlabs/r10k/r10k.yaml').and_return true allow(File).to receive(:file?).with('/etc/r10k.yaml').and_return true logger_dbl = double('Logging') allow(subject).to receive(:logger).and_return logger_dbl expect(logger_dbl).to receive(:warn).with('Both /etc/puppetlabs/r10k/r10k.yaml and /etc/r10k.yaml configuration files exist.') expect(logger_dbl).to receive(:warn).with('/etc/puppetlabs/r10k/r10k.yaml will be used.') subject.search end it 'issues a warning if the old location is used' do allow(File).to receive(:file?).and_return false allow(File).to receive(:file?).with('/etc/puppetlabs/r10k/r10k.yaml').and_return false allow(File).to receive(:file?).with('/etc/r10k.yaml').and_return true logger_dbl = double('Logging') allow(subject).to receive(:logger).and_return logger_dbl expect(logger_dbl).to receive(:warn).with("The r10k configuration file at /etc/r10k.yaml is deprecated.") expect(logger_dbl).to receive(:warn).with('Please move your r10k configuration to /etc/puppetlabs/r10k/r10k.yaml.') subject.search end describe 'using an override value' do it 'uses the override when set and ignores files in the load path' do expect(File).to_not receive(:file?) expect(subject.search('/some/override/r10k.yaml')).to eq '/some/override/r10k.yaml' end it 'ignores a nil override value' do allow(File).to receive(:file?).and_return false allow(File).to receive(:file?).with('/etc/puppetlabs/r10k/r10k.yaml').and_return true allow(File).to receive(:file?).with('/etc/r10k.yaml').and_return true expect(subject.search(nil)).to eq('/etc/puppetlabs/r10k/r10k.yaml') end end end context '#read' do it "raises an error if no config file could be found" do expect(subject).to receive(:search).and_return nil expect { subject.read }.to raise_error(R10K::Settings::Loader::ConfigError, "No configuration file given, no config file found in current directory, and no global config present") end it "raises an error if the YAML file load raises an error" do expect(subject).to receive(:search).and_return '/some/path/r10k.yaml' expect(YAML).to receive(:load_file).and_raise(Errno::ENOENT, "/no/such/file") expect { subject.read }.to raise_error(R10K::Settings::Loader::ConfigError, "Couldn't load config file: No such file or directory - /no/such/file") end it "recursively replaces string keys with symbol keys in the parsed structure" do expect(subject).to receive(:search).and_return '/some/path/r10k.yaml' expect(YAML).to receive(:load_file).and_return({ 'cachedir' => '/var/cache/r10k', 'git' => { 'provider' => 'rugged', } }) expect(subject.read).to eq({ :cachedir => '/var/cache/r10k', :git => { :provider => 'rugged', } }) end end end r10k-4.0.2/spec/unit/settings/uri_definition_spec.rb000066400000000000000000000012021460033767200224140ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings/uri_definition' describe R10K::Settings::URIDefinition do subject { described_class.new(:uri) } it "passes validation if a value has not been set" do expect(subject.validate).to be_nil end it "passes validation when given a valid url" do subject.assign("http://definitely.a/url") expect(subject.validate).to be_nil end it "raises an error when given an invalid URL" do subject.assign("That's no URI!") expect { subject.validate }.to raise_error(ArgumentError, "Setting uri requires a URL but 'That's no URI!' could not be parsed as a URL") end end r10k-4.0.2/spec/unit/settings_spec.rb000066400000000000000000000275511460033767200174240ustar00rootroot00000000000000require 'spec_helper' require 'r10k/settings' require 'r10k/util/exec_env' describe R10K::Settings do describe "git settings" do subject { described_class.git_settings } describe "provider" do it "normalizes valid values to a symbol" do output = subject.evaluate("provider" => "rugged") expect(output[:provider]).to eq(:rugged) end end describe "username" do it "defaults to the current user" do expect(Etc).to receive(:getlogin).and_return("puppet") output = subject.evaluate({}) expect(output[:username]).to eq("puppet") end it "passes values through unchanged" do output = subject.evaluate("username" => "git") expect(output[:username]).to eq("git") end end describe "private_key" do it "passes values through unchanged" do output = subject.evaluate("private_key" => "/etc/puppetlabs/r10k/id_rsa") expect(output[:private_key]).to eq("/etc/puppetlabs/r10k/id_rsa") end end describe "proxy" do it "accepts valid URIs" do output = subject.evaluate("proxy" => "http://proxy.tessier-ashpool.freeside:3128") expect(output[:proxy]).to eq "http://proxy.tessier-ashpool.freeside:3128" end it "rejects invalid URIs" do expect { subject.evaluate("proxy" => "that's no proxy!") }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'git' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:proxy]).to be_a_kind_of(ArgumentError) expect(err.errors[:proxy].message).to match(/could not be parsed as a URL/) end end end end describe "forge settings" do subject { described_class.forge_settings } describe "proxy" do it "accepts valid URIs" do output = subject.evaluate("proxy" => "http://proxy.tessier-ashpool.freeside:3128") expect(output[:proxy]).to eq "http://proxy.tessier-ashpool.freeside:3128" end it "rejects invalid URIs" do expect { subject.evaluate("proxy" => "that's no proxy!") }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'forge' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:proxy]).to be_a_kind_of(ArgumentError) expect(err.errors[:proxy].message).to match(/could not be parsed as a URL/) end end end describe "baseurl" do it "accepts valid URIs" do output = subject.evaluate("baseurl" => "https://forge.tessier-ashpool.freeside") expect(output[:baseurl]).to eq "https://forge.tessier-ashpool.freeside" end it "rejects invalid URIs" do expect { subject.evaluate("baseurl" => "that's no forge!") }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'forge' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:baseurl]).to be_a_kind_of(ArgumentError) expect(err.errors[:baseurl].message).to match(/could not be parsed as a URL/) end end end describe "allow_puppetfile_override" do it 'is false by default' do expect(subject.evaluate({})[:allow_puppetfile_override]).to eq(false) end it 'can be set to true' do expect(subject.evaluate({"allow_puppetfile_override" => true})[:allow_puppetfile_override]).to eq(true) end it "raises an error for non-boolean values" do expect { subject.evaluate({"allow_puppetfile_override" => 'invalid_string'}) }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'forge' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:allow_puppetfile_override]).to be_a_kind_of(ArgumentError) expect(err.errors[:allow_puppetfile_override].message).to match(/`allow_puppetfile_override` can only be a boolean value, not 'invalid_string'/) end end end end describe "deploy settings" do subject { described_class.deploy_settings } describe 'exclude_spec' do it 'is true by default' do expect(subject.evaluate({})[:exclude_spec]).to eq(true) end it 'can be set to false' do expect(subject.evaluate({"exclude_spec" => false})[:exclude_spec]).to eq(false) end it "raises an error for non-boolean values" do expect { subject.evaluate({"exclude_spec" => 'invalid_string'}) }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'deploy' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:exclude_spec]).to be_a_kind_of(ArgumentError) expect(err.errors[:exclude_spec].message).to match(/`exclude_spec` can only be a boolean value, not 'invalid_string'/) end end end describe "write_lock" do it "accepts a string with a reason for the write lock" do output = subject.evaluate("write_lock" => "No maintenance window active, code freeze till 2038-01-19") expect(output[:write_lock]).to eq("No maintenance window active, code freeze till 2038-01-19") end it "accepts false and null values for the write lock" do output = subject.evaluate("write_lock" => false) expect(output[:write_lock]).to eq false end it "rejects non-string truthy values for the write lock" do expect { subject.evaluate("write_lock" => %w[list of reasons why deploys are locked]) }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'deploy' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:write_lock]).to be_a_kind_of(ArgumentError) expect(err.errors[:write_lock].message).to match(/should be a string containing the reason/) end end end describe 'puppet_path' do it 'when executable raises no error' do expect(File).to receive(:executable?).with('/nonexistent').and_return(true) expect { subject.evaluate('puppet_path' => '/nonexistent') }.not_to raise_error end it 'when not executable raises error' do expect(File).to receive(:executable?).with('/nonexistent') expect { subject.evaluate('puppet_path' => '/nonexistent') }.to raise_error(R10K::Settings::Collection::ValidationError) end end describe 'puppet_conf' do it 'when file raises no error' do allow(File).to receive(:readable?).with('/nonexistent').and_return(true) expect { subject.evaluate('puppet_conf' => '/nonexistent') }.not_to raise_error end it 'when not file raises error' do allow(File).to receive(:readable?).with('/nonexistent').and_return(false) expect { subject.evaluate('puppet_conf' => '/nonexistent') }.to raise_error(R10K::Settings::Collection::ValidationError) end end end describe "global settings" do subject { described_class.global_settings } describe "sources" do it "passes values through unchanged" do output = subject.evaluate("sources" => {"puppet" => {"remote" => "https://git.tessier-ashpool.freeside"}}) expect(output[:sources]).to eq({"puppet" => {"remote" => "https://git.tessier-ashpool.freeside"}}) end end describe "cachedir" do it "passes values through unchanged" do output = subject.evaluate("cachedir" => "/srv/r10k/git") expect(output[:cachedir]).to eq("/srv/r10k/git") end end describe "postrun" do it "accepts an argument vector" do output = subject.evaluate("postrun" => ["curl", "-F", "deploy=done", "http://reporting.tessier-ashpool.freeside/r10k"]) expect(output[:postrun]).to eq(["curl", "-F", "deploy=done", "http://reporting.tessier-ashpool.freeside/r10k"]) end it "rejects a string command" do expect { subject.evaluate("postrun" => "curl -F 'deploy=done' https://reporting.tessier-ashpool.freeside/r10k") }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'global' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:postrun]).to be_a_kind_of(ArgumentError) expect(err.errors[:postrun].message).to eq("The postrun setting should be an array of strings, not a String") end end end describe "pool_size" do it "accepts integers greater than zero" do output = subject.evaluate("pool_size" => 5) expect(output[:pool_size]).to eq 5 end it "rejects non integer values" do expect { subject.evaluate("pool_size" => "5") }.to raise_error do |err| expect(err.errors.size).to eq 1 expect(err.errors[:pool_size]).to be_a_kind_of(ArgumentError) expect(err.errors[:pool_size].message).to match(/The pool_size setting should be an integer/) end end it "rejects integers smaller than one" do expect { subject.evaluate("pool_size" => 0) }.to raise_error do |err| expect(err.errors.size).to eq 1 expect(err.errors[:pool_size]).to be_a_kind_of(ArgumentError) expect(err.errors[:pool_size].message).to match(/The pool_size setting should be greater than zero/) end expect { subject.evaluate("pool_size" => -3) }.to raise_error do |err| expect(err.errors.size).to eq 1 expect(err.errors[:pool_size]).to be_a_kind_of(ArgumentError) expect(err.errors[:pool_size].message).to match(/The pool_size setting should be greater than zero/) end end end describe "proxy" do it "accepts valid URIs" do output = subject.evaluate("proxy" => "http://proxy.tessier-ashpool.freeside:3128") expect(output[:proxy]).to eq "http://proxy.tessier-ashpool.freeside:3128" end it "rejects invalid URIs" do expect { subject.evaluate("proxy" => "that's no proxy!") }.to raise_error do |err| expect(err.message).to match(/Validation failed for 'global' settings group/) expect(err.errors.size).to eq 1 expect(err.errors[:proxy]).to be_a_kind_of(ArgumentError) expect(err.errors[:proxy].message).to match(/could not be parsed as a URL/) end end describe "setting a default value" do %w[HTTPS_PROXY https_proxy HTTP_PROXY http_proxy].each do |env_var| it "respects the #{env_var} environment variable" do R10K::Util::ExecEnv.withenv(env_var => "http://proxy.value/#{env_var}") do output = subject.evaluate({}) expect(output[:proxy]).to eq("http://proxy.value/#{env_var}") end end end end end describe "git settings" do it "passes settings through to the git settings" do output = subject.evaluate("git" => {"provider" => "shellgit", "username" => "git"}) expect(output[:git]).to include(:provider => :shellgit, :username => "git") end end describe "forge settings" do it "passes settings through to the forge settings" do output = subject.evaluate("forge" => {"baseurl" => "https://forge.tessier-ashpool.freeside", "proxy" => "https://proxy.tessier-ashpool.freesize:3128", "authorization_token" => "faketoken", "allow_puppetfile_override" => true}) expect(output[:forge]).to eq(:baseurl => "https://forge.tessier-ashpool.freeside", :proxy => "https://proxy.tessier-ashpool.freesize:3128", :authorization_token => "faketoken", :allow_puppetfile_override => true) end end end end r10k-4.0.2/spec/unit/source/000077500000000000000000000000001460033767200155135ustar00rootroot00000000000000r10k-4.0.2/spec/unit/source/base_spec.rb000066400000000000000000000016271460033767200177720ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source::Base do subject { described_class.new('base', '/some/nonexistent/path') } describe "accepting a visitor" do it "passes itself to the visitor" do visitor = spy('visitor') expect(visitor).to receive(:visit).with(:source, subject) subject.accept(visitor) end it "passes the visitor to each environment if the visitor yields" do visitor = spy('visitor') expect(visitor).to receive(:visit) do |type, other, &block| expect(type).to eq :source expect(other).to eq subject block.call end env1 = spy('environment') expect(env1).to receive(:accept).with(visitor) env2 = spy('environment') expect(env2).to receive(:accept).with(visitor) expect(subject).to receive(:environments).and_return([env1, env2]) subject.accept(visitor) end end end r10k-4.0.2/spec/unit/source/exec_spec.rb000066400000000000000000000055251460033767200200050ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' require 'json' require 'yaml' describe R10K::Source::Exec do let(:environments_hash) do { 'production' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'release-141', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.0' } } }, 'development' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'master', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.1' } } } } end describe 'initialize' do context 'with a valid command' do context 'that produces valid output' do it 'accepts json' do allow_any_instance_of(R10K::Util::Subprocess) .to receive(:execute) .and_return(double('result', stdout: environments_hash.to_json)) source = described_class.new('execsource', '/some/nonexistent/dir', command: '/path/to/command') expect(source.environments.map(&:name)).to contain_exactly('production', 'development') end it 'accepts yaml' do allow_any_instance_of(R10K::Util::Subprocess) .to receive(:execute) .and_return(double('result', stdout: environments_hash.to_yaml)) source = described_class.new('execsource', '/some/nonexistent/dir', command: '/path/to/command') expect(source.environments.map(&:name)).to contain_exactly('production', 'development') end end context 'that produces invalid output' do it 'raises an error for non-json, non-yaml data' do allow_any_instance_of(R10K::Util::Subprocess) .to receive(:execute) .and_return(double('result', stdout: "one:\ntwo\n")) source = described_class.new('execsource', '/some/nonexistent/dir', command: '/path/to/command') expect { source.environments }.to raise_error(/Error parsing command output/) end it 'raises an error for yaml data that is not a hash' do allow_any_instance_of(R10K::Util::Subprocess) .to receive(:execute) .and_return(double('result', stdout: "[one, two]")) source = described_class.new('execsource', '/some/nonexistent/dir', command: '/path/to/command') expect { source.environments }.to raise_error(R10K::Error, /Environment source execsource.*did not return valid environment data.*one.*two.*/m) end end end end end r10k-4.0.2/spec/unit/source/git_spec.rb000066400000000000000000000173401460033767200176420ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source::Git do subject do described_class.new('mysource', '/some/nonexistent/dir', {:remote => 'https://git-server/repo.git'}) end it "stores the name" do expect(subject.name).to eq 'mysource' end it "stores the basedir" do expect(subject.basedir).to eq '/some/nonexistent/dir' end describe "preloading" do it "fetches the git cache" do expect(subject.cache).to receive(:sync) subject.preload! end end describe "lazily generating environments" do it "returns an empty list of environments when the cache has not been created" do allow(subject.cache).to receive(:cached?).and_return false expect(subject.environments).to be_empty end it "generates environments when the cache is present and environments have not been loaded" do allow(subject.cache).to receive(:cached?).and_return true allow(subject).to receive(:generate_environments).and_return %w[hi] expect(subject.environments.size).to eq(1) end it "doesn't recreate environments if they have already been loaded" do allow(subject.cache).to receive(:cached?).and_return true allow(subject).to receive(:generate_environments).once.and_return %w[hi] expect(subject.environments.size).to eq(1) expect(subject.environments.size).to eq(1) end end describe "eagerly generating environments" do before do allow(subject.cache).to receive(:branches).and_return %w[master] end let(:master_env) { subject.generate_environments.first } it "creates an environment for each branch" do expect(subject.generate_environments.size).to eq(1) end it "copies the source remote to the environment" do expect(master_env.remote).to eq subject.remote end it "uses the branch name as the directory by default" do expect(master_env.dirname).to eq 'master' end end describe "generate_environments respects ignore_branch_prefixes setting" do before do allow(subject.cache).to receive(:branches).and_return ['master', 'development', 'production', 'not_dev_test_me', 'dev_test', 'dev', 'test_2'] subject.instance_variable_set(:@ignore_branch_prefixes, ['dev', 'test']) end let(:environments) { subject.generate_environments } it "creates an environment for each branch not in ignore_branch_prefixes" do expect(subject.generate_environments.size).to eq(3) end it "copies the source remote to the environment" do expect(environments[0].remote).to eq subject.remote expect(environments[1].remote).to eq subject.remote expect(environments[2].remote).to eq subject.remote end it "uses the branch name as the directory by default" do expect(environments[0].dirname).to eq 'master' expect(environments[1].dirname).to eq 'production' expect(environments[2].dirname).to eq 'not_dev_test_me' end end describe "filtering branches with ignore prefixes" do let(:branches) { ['master', 'development', 'production', 'not_dev_test_me', 'dev_test', 'dev', 'test_2'] } let(:ignore_prefixes) { ['dev', 'test'] } it "filters branches" do expect(subject.filter_branches_by_regexp(branches, ignore_prefixes)).to eq(['master', 'production', 'not_dev_test_me']) end end describe "filtering branches with command" do let(:branches) { ['master', 'development', 'production'] } if R10K::Util::Platform.windows? let(:filter_command) { 'powershell.exe if ($env:R10K_BRANCH.equals(\"development\")) {exit 1} else {exit 0}' } else let(:filter_command) { 'sh -c "[ $R10K_BRANCH != development ]"' } end it "filters branches" do expect(subject.filter_branches_by_command(branches, filter_command)).to eq(['master', 'production']) end end describe "generate_environments respects filter_command setting" do before do allow(subject.cache).to receive(:branches).and_return ['master', 'development', 'production'] if R10K::Util::Platform.windows? subject.instance_variable_set(:@filter_command, 'powershell.exe if ($env:R10K_BRANCH.equals(\"master\")) {exit 1} else {exit 0}') else subject.instance_variable_set(:@filter_command, '[ $R10K_BRANCH != master ]') end end let(:environments) { subject.generate_environments } it "creates an environment for each branch not filtered by filter_command" do expect(subject.generate_environments.size).to eq(2) end end describe "generate_environments respects filter_command setting and name" do before do allow(subject.cache).to receive(:branches).and_return ['master', 'development', 'production'] if R10K::Util::Platform.windows? subject.instance_variable_set(:@filter_command, 'powershell.exe if ($env:R10K_NAME.equals(\"mysource\")) {exit 0} else {exit 1}') else subject.instance_variable_set(:@filter_command, '[ $R10K_NAME = mysource ]') end end let(:environments) { subject.generate_environments } it "creates an environment for each branch not filtered by filter_command" do expect(subject.generate_environments.size).to eq(3) end end end describe R10K::Source::Git, "handling invalid branch names" do %w[correct_and_warn correct].each do |setting| describe "when invalid is #{setting}" do subject do described_class.new('/some/nonexistent/dir', 'mysource', { :remote => 'https://git-server/repo.git', :invalid_branches => setting }) end before do allow(subject.cache).to receive(:branches).and_return ['master', 'invalid-branch'] end it "creates an environment for each branch" do expect(subject.generate_environments.size).to eq(2) end it "removes invalid characters from branch names" do invalid_env = subject.generate_environments.last expect(invalid_env.dirname).to eq 'invalid_branch' end end end describe "when invalid is 'error'" do subject do described_class.new('/some/nonexistent/dir', 'mysource', { :remote => 'https://git-server/repo.git', :invalid_branches => 'error', }) end before do allow(subject.cache).to receive(:branches).and_return ['master', 'invalid-branch'] end it "only creates an environment for valid branches" do expect(subject.generate_environments.size).to eq(1) end end end # Since prefixing is an immutable property of a source, it's easier to create # a new context and duplicate stubs in a single location rather than packing a # single test with all the stubs that entirely recreate the source. describe R10K::Source::Git, 'when prefixing is enabled' do subject do described_class.new( 'prefixed', '/some/nonexistent/dir', { :prefix => true, :remote => 'https://git-server/repo.git', }) end describe "generating prefixed environments" do before do allow(subject.cache).to receive(:cached?).and_return true allow(subject.cache).to receive(:branches).and_return %w[master other] end let(:environments) { subject.environments } it "creates an environment for each branch" do expect(subject.environments.size).to eq(2) end it "prefixes the source name to environments when prefixing is enabled" do expect(environments[0].dirname).to eq 'prefixed_master' expect(environments[1].dirname).to eq 'prefixed_other' end end end describe R10K::Source::Git, 'registering as a source' do it "registers with the :git key" do expect(R10K::Source.retrieve(:git)).to eq described_class end it "registers with the nil key" do expect(R10K::Source.retrieve(nil)).to eq described_class end end r10k-4.0.2/spec/unit/source/hash_spec.rb000066400000000000000000000030101460033767200177670ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source::Hash do describe '.valid_environments_hash?' do it "rejects strings" do expect(R10K::Source::Hash.valid_environments_hash?('200 OK')) .to eq false end end let(:environments_hash) do { 'production' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'release-141', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.0' } } }, 'development' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'master', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.1' } } } } end describe "with a prefix" do subject do described_class.new('hashsource', '/some/nonexistent/dir', prefix: 'prefixed', environments: environments_hash) end it "prepends environment names with a prefix" do environments = subject.environments expect(environments[0].dirname).to eq 'prefixed_production' expect(environments[1].dirname).to eq 'prefixed_development' end end end r10k-4.0.2/spec/unit/source/svn_spec.rb000066400000000000000000000137611460033767200176700ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source::SVN do subject do described_class.new('mysource', '/some/nonexistent/dir', {:remote => 'https://svn-server.site/repo'}) end it "stores the name" do expect(subject.name).to eq 'mysource' end it "stores the basedir" do expect(subject.basedir).to eq '/some/nonexistent/dir' end describe "lazily generating environments" do it "generates environments when they have not been loaded" do expect(subject).to receive(:generate_environments).and_return %w[hi] expect(subject.environments).to eq %w[hi] end it "doesn't recreate environments if they have already been loaded" do expect(subject).to receive(:generate_environments).once.and_return %w[hi] subject.environments subject.environments end end describe "eagerly generating environments" do before do allow(subject.svn_remote).to receive(:branches).and_return %w[apache dns robobutler] end let(:environments) { subject.generate_environments } it "creates an environment for each branch and the trunk" do expect(environments[0].name).to eq 'production' expect(environments[1].name).to eq 'apache' expect(environments[2].name).to eq 'dns' expect(environments[3].name).to eq 'robobutler' end it "maps trunk to production" do expect(environments[0].remote).to eq 'https://svn-server.site/repo/trunk' end it "sets the remote for branch environments to subdirectories of the branches/ directory" do expect(environments[1].remote).to eq 'https://svn-server.site/repo/branches/apache' expect(environments[2].remote).to eq 'https://svn-server.site/repo/branches/dns' expect(environments[3].remote).to eq 'https://svn-server.site/repo/branches/robobutler' end it "uses the branch name as the directory by default" do expect(environments[0].dirname).to eq 'production' expect(environments[1].dirname).to eq 'apache' expect(environments[2].dirname).to eq 'dns' expect(environments[3].dirname).to eq 'robobutler' end end describe "generate_environments respects ignore_branch_prefixes setting" do before do allow(subject.svn_remote).to receive(:branches).and_return ['master', 'development', 'not_dev_test_me', 'dev_test', 'dev', 'test_2'] subject.instance_variable_set(:@ignore_branch_prefixes, ['dev', 'test']) end let(:environments) { subject.generate_environments } it "creates an environment for each branch not in ignore_branch_prefixes" do expect(subject.generate_environments.size).to eq(3) end it "uses the branch name as the directory by default" do expect(environments[0].name).to eq 'production' expect(environments[1].name).to eq 'master' expect(environments[2].name).to eq 'not_dev_test_me' end end describe "filtering branches with ignore prefixes" do let(:branches) { ['master', 'development', 'production', 'not_dev_test_me', 'dev_test', 'dev', 'test_2'] } let(:ignore_prefixes) { ['dev', 'test'] } it "filters branches" do expect(subject.filter_branches(branches, ignore_prefixes)).to eq(['master', 'production', 'not_dev_test_me']) end end end describe R10K::Source::SVN, 'when prefixing is enabled' do subject do described_class.new( 'mysource', '/some/nonexistent/dir', { :remote => 'https://svn-server.site/repo', :prefix => true } ) end describe "generating prefixed environments" do before do allow(subject.svn_remote).to receive(:branches).and_return %w[apache dns robobutler] end let(:environments) { subject.generate_environments } it "creates an environment for each branch and the trunk" do expect(environments.size).to eq(4) end it "prefixes the source name to environments" do expect(environments[0].dirname).to eq 'mysource_production' expect(environments[1].dirname).to eq 'mysource_apache' expect(environments[2].dirname).to eq 'mysource_dns' expect(environments[3].dirname).to eq 'mysource_robobutler' end end end describe R10K::Source::SVN, 'when prefixing is disabled' do subject do described_class.new( 'mysource', '/some/nonexistent/dir', { :remote => 'https://svn-server.site/repo', :prefix => false } ) end describe "generating environments" do before do allow(subject.svn_remote).to receive(:branches).and_return %w[apache dns robobutler] end let(:environments) { subject.generate_environments } it "creates an environment for each branch and the trunk" do expect(environments.size).to eq(4) end it "does not prefix environments" do expect(environments[0].dirname).to eq 'production' expect(environments[1].dirname).to eq 'apache' expect(environments[2].dirname).to eq 'dns' expect(environments[3].dirname).to eq 'robobutler' end end end describe R10K::Source::SVN, 'when prefixing is overridden' do subject do described_class.new( 'mysource', '/some/nonexistent/dir', { :remote => 'https://svn-server.site/repo', :prefix => "tenant1" } ) end describe "generating prefixed environments" do before do allow(subject.svn_remote).to receive(:branches).and_return %w[apache dns robobutler] end let(:environments) { subject.generate_environments } it "creates an environment for each branch and the trunk" do expect(environments.size).to eq(4) end it "prefixes the prefix name to environments" do expect(environments[0].dirname).to eq 'tenant1_production' expect(environments[1].dirname).to eq 'tenant1_apache' expect(environments[2].dirname).to eq 'tenant1_dns' expect(environments[3].dirname).to eq 'tenant1_robobutler' end end end describe R10K::Source::SVN, 'registering as a source' do it "registers with the :svn key" do expect(R10K::Source.retrieve(:svn)).to eq described_class end end r10k-4.0.2/spec/unit/source/yaml_spec.rb000066400000000000000000000024171460033767200200200ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source::Yaml do let(:environments_hash) do { 'production' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'release-141', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.0' } } }, 'development' => { 'remote' => 'https://git.example.com/puppet/control-repo.git', 'ref' => 'master', 'modules' => { 'puppetlabs-stdlib' => '6.1.0', 'puppetlabs-ntp' => '8.1.0', 'example-myapp1' => { 'git' => 'https://git.example.com/puppet/example-myapp1.git', 'ref' => 'v1.3.1' } } } } end describe "with valid yaml file" do it "produces environments" do allow(YAML).to receive(:load_file).with('/envs.yaml').and_return(environments_hash) source = described_class.new('yamlsource', '/some/nonexistent/dir', config: '/envs.yaml') expect(source.environments.map(&:name)).to contain_exactly('production', 'development') end end end r10k-4.0.2/spec/unit/source_spec.rb000066400000000000000000000004361460033767200170550ustar00rootroot00000000000000require 'spec_helper' require 'r10k/source' describe R10K::Source do it "implementds methods for a keyed factory" do expect(described_class).to respond_to :register expect(described_class).to respond_to :retrieve expect(described_class).to respond_to :generate end end r10k-4.0.2/spec/unit/svn/000077500000000000000000000000001460033767200150215ustar00rootroot00000000000000r10k-4.0.2/spec/unit/svn/remote_spec.rb000066400000000000000000000012251460033767200176530ustar00rootroot00000000000000require 'r10k' require 'r10k/svn' describe R10K::SVN::Remote do subject { described_class.new('https://svn-server.site/repo') } it "generates the trunk URL by appending '/trunk' to the base URL" do expect(subject.trunk).to eq 'https://svn-server.site/repo/trunk' end describe "retrieving branches" do let(:branches) do %[apache/\ndns/\nrobobutler/\nstaging/\n] end it "enumerates the /branches directory of the base URL" do allow(subject).to receive(:svn).with(['ls', 'https://svn-server.site/repo/branches']).and_return(branches) expect(subject.branches).to eq(%w[apache dns robobutler staging]) end end end r10k-4.0.2/spec/unit/svn/working_dir_spec.rb000066400000000000000000000035161460033767200207030ustar00rootroot00000000000000require 'spec_helper' require 'r10k/svn/working_dir' describe R10K::SVN::WorkingDir, "initializing" do let(:pathname) { Pathname.new("/some/imaginary/path") } it "stores the provided path" do subject = described_class.new(pathname) expect(subject.path).to eq Pathname.new("/some/imaginary/path") end describe "when auth is provided" do it "raises an error when only the username is provided" do expect { described_class.new(pathname, :username => "root") }.to raise_error(ArgumentError, "Both username and password must be specified") end it "raises an error when only the password is provided" do expect { described_class.new(pathname, :password => "hunter2") }.to raise_error(ArgumentError, "Both username and password must be specified") end it "does not raise an error when both username and password are provided" do o = described_class.new(pathname, :username => "root", :password => "hunter2") expect(o.username).to eq("root") expect(o.password).to eq("hunter2") end end end describe R10K::SVN::WorkingDir, "when authentication credentials are given" do let(:pathname) { Pathname.new("/some/imaginary/path") } subject { described_class.new(pathname, :username => "root", :password => "hunter2") } def check_args(args) expect(args).to include("--username") expect(args).to include("root") expect(args).to include("--password") expect(args).to include("hunter2") end it "invokes 'svn checkout' with the given credentials" do expect(subject).to receive(:svn) do |args, _| check_args(args) end subject.checkout('https://some.svn.url/trunk') end it "invokes 'svn update' with the given credentials" do expect(subject).to receive(:svn) do |args, _| check_args(args) end subject.update end end r10k-4.0.2/spec/unit/tarball_spec.rb000066400000000000000000000024371460033767200172010ustar00rootroot00000000000000require 'spec_helper' require 'r10k/tarball' describe R10K::Tarball do include_context 'Tarball' subject { described_class.new('fixture-tarball', fixture_tarball, checksum: fixture_checksum) } describe 'initialization' do it 'initializes' do expect(subject).to be_kind_of(described_class) end end describe 'downloading and caching' do it 'downloads the source to the cache' do # No cache present initially expect(File.exist?(subject.cache_path)).to be(false) expect(subject.cache_valid?).to be(false) subject.get expect(subject.cache_valid?).to be(true) expect(File.exist?(subject.cache_path)).to be(true) end let(:raw_content) {[ './', './Puppetfile', './metadata.json', './spec/', './environment.conf', './spec/1', ]} let(:clean_content) {[ 'Puppetfile', 'metadata.json', 'spec', 'environment.conf', 'spec/1', ]} it 'returns clean paths when listing cached tarball content' do iterator = allow(subject).to receive(:each_tarball_entry) raw_content.each { |entry| iterator.and_yield(entry) } expect(subject.paths).to eq(clean_content) end end describe 'http sources' describe 'file sources' describe 'syncing' end r10k-4.0.2/spec/unit/util/000077500000000000000000000000001460033767200151705ustar00rootroot00000000000000r10k-4.0.2/spec/unit/util/attempt_spec.rb000066400000000000000000000043061460033767200202100ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/attempt' describe R10K::Util::Attempt do describe "with a single truthy value" do subject(:attempt) { described_class.new("hello") } it "invokes the next action with the value" do value = nil attempt.try { |inner| value = inner } attempt.run expect(attempt).to be_ok expect(value).to eq "hello" end it "returns the resulting value from the block" do attempt.try { |inner| inner + " world" } result = attempt.run expect(attempt).to be_ok expect(result).to eq "hello world" end end describe "with a false value" do subject(:attempt) { described_class.new(nil) } it "does not evaluate the block" do value = "outside of block" attempt.try { |inner| value = "ran block" } attempt.run expect(attempt).to be_ok expect(value).to eq "outside of block" end it "does not continue execution" do attempt.try { |_| "something" }.try { raise } expect(attempt.run).to be_nil end end describe "with an array" do subject(:attempt) { described_class.new([1, 2, 3, 4, 5]) } it "runs the block for each element in the array" do sum = 0 attempt.try { |inner| sum += inner } attempt.run expect(attempt).to be_ok expect(sum).to eq 15 end it "returns the result of the operation on each array member" do sum = 0 attempt.try { |inner| sum += inner } result = attempt.run expect(result).to eq([1, 3, 6, 10, 15]) end end describe "when an exception is raised" do subject(:attempt) { described_class.new("initial") } it "returns the exception" do attempt.try { |_| raise RuntimeError } result = attempt.run expect(attempt).to_not be_ok expect(result).to be_a_kind_of RuntimeError end it "does not continue execution" do attempt.try { |_| raise RuntimeError }.try { |_| "This should not be run" } result = attempt.run expect(result).to be_a_kind_of RuntimeError end it "only rescues descendants of StandardError" do attempt.try { |_| raise Exception } expect { attempt.run }.to raise_error(Exception) end end end r10k-4.0.2/spec/unit/util/cacheable_spec.rb000066400000000000000000000012301460033767200204120ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/cacheable' RSpec.describe R10K::Util::Cacheable do subject { Object.new.extend(R10K::Util::Cacheable) } describe "dirname sanitization" do let(:input) { 'https://some/git/remote' } it 'sanitizes URL to directory name' do expect(subject.sanitized_dirname(input)).to eq('https---some-git-remote') end context 'with username and password' do let(:input) { 'https://"user:pa$$w0rd:@authenticated/git/remote' } it 'sanitizes authenticated URL to directory name' do expect(subject.sanitized_dirname(input)).to eq('https---authenticated-git-remote') end end end end r10k-4.0.2/spec/unit/util/commands_spec.rb000066400000000000000000000031671460033767200203370ustar00rootroot00000000000000require 'r10k/util/commands' require 'r10k/util/exec_env' require 'tmpdir' describe R10K::Util::Commands do describe "#which" do before do allow(File).to receive(:executable?).and_return false allow(File).to receive(:file?).and_return false end def stub_executable(exe) allow(File).to receive(:executable?).with(exe).and_return true allow(File).to receive(:file?).with(exe).and_return true end describe "when ENV['PATHEXT'] is unset" do let(:path) { Dir.mktmpdir } around(:each) do |example| R10K::Util::ExecEnv.withenv('PATHEXT' => nil, 'PATH' => path) do example.run end end it "returns the first matching command in PATH" do exe = File.join(path, 'git') stub_executable(exe) expect(described_class.which("git")).to eq exe end it "returns nil if the command could not be found" do exe = File.join(path, 'git') expect(described_class.which("git")).to be_nil end end describe "when ENV['PATHEXT'] is set" do let(:path) { Dir.mktmpdir } around(:each) do |example| R10K::Util::ExecEnv.withenv('PATHEXT' => '.bat;.exe;.cmd', 'PATH' => path) do example.run end end it "returns the first matching command in PATH" do exe = File.join(path, 'git.exe') stub_executable(exe) expect(described_class.which("git")).to eq exe end it "returns nil if the command could not be found" do exe = File.join(path, 'git.exe') expect(described_class.which("git")).to be_nil end end end end r10k-4.0.2/spec/unit/util/downloader_spec.rb000066400000000000000000000077071460033767200207000ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/downloader' describe R10K::Util::Downloader do subject(:downloader) do subj = Object.new subj.extend(R10K::Util::Downloader) subj.singleton_class.class_eval { public :download } subj.singleton_class.class_eval { public :http_get } subj.singleton_class.class_eval { public :file_digest } subj end let(:tmpdir) { Dir.mktmpdir } after(:each) { FileUtils.remove_entry_secure(tmpdir) } describe 'http_get' do let(:src_url) { 'https://example.com' } let(:dst_file) { File.join(tmpdir, 'test.out') } let(:tarball_uri) { URI('http://tarball.example.com/tarball.tar.gz') } let(:redirect_uri) { URI('http://redirect.example.com/redirect') } let(:proxy_uri) { URI('http://user:password@proxy.example.com') } it 'downloads a simple file' do mock_session = instance_double('Net::HTTP', active?: true) tarball_response = instance_double('Net::HTTPSuccess') expect(Net::HTTP).to receive(:new).with(tarball_uri.host, any_args).and_return(mock_session) expect(Net::HTTPSuccess).to receive(:===).with(tarball_response).and_return(true) expect(mock_session).to receive(:request_get).and_yield(tarball_response) expect(mock_session).to receive(:start).once expect(mock_session).to receive(:finish).once expect { |b| downloader.http_get(tarball_uri, &b) }.to yield_with_args(tarball_response) end it 'follows redirects' do mock_session_1 = instance_double('Net::HTTP', active?: false) mock_session_2 = instance_double('Net::HTTP', active?: true) redirect_response = instance_double('Net::HTTPRedirection') tarball_response = instance_double('Net::HTTPSuccess') expect(Net::HTTP).to receive(:new).with(redirect_uri.host, any_args).and_return(mock_session_1).once expect(Net::HTTP).to receive(:new).with(tarball_uri.host, any_args).and_return(mock_session_2).once expect(Net::HTTPRedirection).to receive(:===).with(redirect_response).and_return(true) expect(Net::HTTPSuccess).to receive(:===).with(tarball_response).and_return(true) allow(Net::HTTPRedirection).to receive(:===).and_call_original expect(mock_session_1).to receive(:request_get).and_yield(redirect_response) expect(mock_session_2).to receive(:request_get).and_yield(tarball_response) # The redirect response should be queried for the redirect location expect(redirect_response).to receive(:[]).with('location').and_return(tarball_uri.to_s) # Both sessions should start and finish cleanly expect(mock_session_1).to receive(:start).once expect(mock_session_1).to receive(:finish).once expect(mock_session_2).to receive(:start).once expect(mock_session_2).to receive(:finish).once expect { |b| downloader.http_get(redirect_uri, &b) }.to yield_with_args(tarball_response) end it 'can use a proxy' do mock_session = instance_double('Net::HTTP', active?: true) expect(Net::HTTP).to receive(:new) .with(tarball_uri.host, tarball_uri.port, proxy_uri.host, proxy_uri.port, proxy_uri.user, proxy_uri.password, any_args) .and_return(mock_session) expect(mock_session).to receive(:request_get).and_return(:not_yielded) expect(mock_session).to receive(:start).once expect(mock_session).to receive(:finish).once downloader.http_get(tarball_uri, proxy: proxy_uri) end end describe 'checksums' do let(:fixture_checksum) { '0bcea17aa0c5e868c18f0fa042feda770e47c1a4223229f82116ccb3ca33c6e3' } let(:fixture_tarball) do File.expand_path('spec/fixtures/integration/git/puppet-boolean-bare.tar', PROJECT_ROOT) end it 'checksums files' do expect(downloader.file_digest(fixture_tarball)).to eql(fixture_checksum) end end end r10k-4.0.2/spec/unit/util/exec_env_spec.rb000066400000000000000000000025271460033767200203310ustar00rootroot00000000000000require 'r10k/util/exec_env' describe R10K::Util::ExecEnv do describe "withenv" do it "adds the keys to the environment during the block" do val = nil described_class.withenv('VAL' => 'something') do val = ENV['VAL'] end expect(val).to eq 'something' end it "doesn't modify values that were not modified by the passed hash" do origpath = ENV['PATH'] path = nil described_class.withenv('VAL' => 'something') do path = ENV['PATH'] end expect(path).to eq origpath end it "removes new values after the block" do val = nil described_class.withenv('VAL' => 'something') { } expect(ENV['VAL']).to be_nil end it "restores old values after the block" do path = ENV['PATH'] described_class.withenv('PATH' => '/usr/bin') { } expect(ENV['PATH']).to eq path end end describe "reset" do after { ENV.delete('VAL') } it "replaces environment keys with the specified keys" do ENV['VAL'] = 'hi' newenv = ENV.to_hash newenv['VAL'] = 'bye' described_class.reset(newenv) expect(ENV['VAL']).to eq 'bye' end it "removes any keys that were not provided" do env = ENV.to_hash ENV['VAL'] = 'hi' described_class.reset(env) expect(ENV['VAL']).to be_nil end end end r10k-4.0.2/spec/unit/util/purgeable_spec.rb000066400000000000000000000243601460033767200205020ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/purgeable' require 'r10k/util/cleaner' RSpec.describe R10K::Util::Purgeable do let(:managed_directories) do [ 'spec/fixtures/unit/util/purgeable/managed_one', 'spec/fixtures/unit/util/purgeable/managed_two', ] end let(:desired_contents) do [ 'spec/fixtures/unit/util/purgeable/managed_one/expected_1', 'spec/fixtures/unit/util/purgeable/managed_one/new_1', 'spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1', 'spec/fixtures/unit/util/purgeable/managed_one/managed_symlink_dir', 'spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_expected_1', 'spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/subdir_new_1', 'spec/fixtures/unit/util/purgeable/managed_one/managed_subdir_1/managed_symlink_file', 'spec/fixtures/unit/util/purgeable/managed_two/expected_2', 'spec/fixtures/unit/util/purgeable/managed_two/new_2', 'spec/fixtures/unit/util/purgeable/managed_two/.hidden', ] end subject { R10K::Util::Cleaner.new(managed_directories, desired_contents) } context 'without recurse option' do let(:recurse) { false } describe '#current_contents' do it 'collects direct contents of all managed directories' do expect(subject.current_contents(recurse)).to contain_exactly(/\/expected_1/, /\/expected_2/, /\/unmanaged_1/, /\/unmanaged_2/, /\/managed_subdir_1/, /\/managed_symlink_dir/, /\/unmanaged_symlink_file/) end end describe '#pending_contents' do it 'collects desired_contents that do not yet exist' do expect(subject.pending_contents(recurse)).to include(/\/new_1/, /\/new_2/) end end describe '#stale_contents' do context 'with no whitelist or exclusions' do let(:exclusions) { [] } let(:whitelist) { [] } it 'collects current_contents that should not exist' do expect(subject.stale_contents(recurse, exclusions, whitelist)).to contain_exactly(/\/unmanaged_1/, /\/unmanaged_2/, /\/unmanaged_symlink_file/) end end context 'with whitelisted items' do let(:exclusions) { [] } let(:whitelist) { ['**/unmanaged_1'] } it 'collects current_contents that should not exist except whitelisted items' do expect(subject.logger).to receive(:debug).with(/unmanaged_1.*whitelist match/i) expect(subject.stale_contents(recurse, exclusions, whitelist)).to contain_exactly(/\/unmanaged_2/, /\/unmanaged_symlink_file/) end end context 'with excluded items' do let(:exclusions) { ['**/unmanaged_2'] } let(:whitelist) { [] } it 'collects current_contents that should not exist except excluded items' do expect(subject.logger).to receive(:debug2).with(/unmanaged_2.*internal exclusion match/i) expect(subject.stale_contents(recurse, exclusions, whitelist)).to contain_exactly(/\/unmanaged_1/, /\/unmanaged_symlink_file/) end end end describe '#purge!' do let(:exclusions) { [] } let(:whitelist) { [] } let(:purge_opts) { { recurse: recurse, whitelist: whitelist } } it 'does nothing when there is no stale_contents' do allow(subject).to receive(:stale_contents).and_return([]) expect(FileUtils).to_not receive(:rm_rf) subject.purge!(purge_opts) end it 'recursively deletes all stale_contents' do subject.stale_contents(recurse, exclusions, whitelist).each do |stale| expect(FileUtils).to receive(:rm_r).with(stale, hash_including(secure: true)) end subject.purge!(purge_opts) end end end context 'with recurse option' do let(:recurse) { true } describe '#current_contents' do it 'collects contents of all managed directories recursively' do expect(subject.current_contents(recurse)). to contain_exactly(/\/expected_1/, /\/expected_2/, /\/unmanaged_1/, /\/unmanaged_2/, /\/managed_symlink_dir/, /\/unmanaged_symlink_file/, /\/managed_subdir_1/, /\/subdir_expected_1/, /\/subdir_unmanaged_1/, /\/managed_symlink_file/, /\/unmanaged_symlink_dir/, /\/subdir_allowlisted_2/, /\/ignored_1/, /\/\.hidden/) end end describe '#pending_contents' do it 'collects desired_contents that do not yet exist recursively' do expect(subject.pending_contents(recurse)).to include(/\/new_1/, /\/new_2/, /\/subdir_new_1/) end end describe '#stale_contents' do context 'with no whitelist or exclusions' do let(:exclusions) { [] } let(:whitelist) { [] } it 'collects current_contents that should not exist recursively' do expect(subject.stale_contents(recurse, exclusions, whitelist)). to contain_exactly(/\/unmanaged_1/, /\/unmanaged_2/, /\/unmanaged_symlink_file/, /\/subdir_unmanaged_1/, /\/ignored_1/, /\/subdir_allowlisted_2/, /\/unmanaged_symlink_dir/) end end context 'with whitelisted items' do let(:exclusions) { [] } let(:whitelist) { ['**/unmanaged_1'] } it 'collects current_contents that should not exist except whitelisted items' do expect(subject.logger).to receive(:debug).with(/unmanaged_1.*whitelist match/i) expect(subject.stale_contents(recurse, exclusions, whitelist)). to contain_exactly(/\/unmanaged_2/, /\/subdir_unmanaged_1/, /\/unmanaged_symlink_file/, /\/ignored_1/, /\/subdir_allowlisted_2/, /\/unmanaged_symlink_dir/) end it 'does not collect contents that match recursive globbed whitelist items as intermediate values' do recursive_whitelist = ['**/managed_subdir_1/**/*'] expect(subject.logger).not_to receive(:debug).with(/ignored_1/) expect(subject.stale_contents(recurse, exclusions, recursive_whitelist)). to contain_exactly(/\/unmanaged_2/, /\/managed_one\/unmanaged_1/, /\/managed_one\/unmanaged_symlink_file/) end end context 'with excluded items' do let(:exclusions) { ['**/unmanaged_2'] } let(:whitelist) { [] } it 'collects current_contents that should not exist except excluded items' do expect(subject.logger).to receive(:debug2).with(/unmanaged_2.*internal exclusion match/i) expect(subject.stale_contents(recurse, exclusions, whitelist)). to contain_exactly(/\/unmanaged_1/, /\/unmanaged_symlink_file/, /\/subdir_unmanaged_1/, /\/ignored_1/, /\/subdir_allowlisted_2/, /\/unmanaged_symlink_dir/) end it 'does not collect contents that match recursive globbed exclusion items as intermediate values' do recursive_exclusions = ['**/managed_subdir_1/**/*'] expect(subject.logger).not_to receive(:debug).with(/ignored_1/) expect(subject.stale_contents(recurse, recursive_exclusions, whitelist)). to contain_exactly(/\/unmanaged_2/, /\/unmanaged_symlink_file/, /\/managed_one\/unmanaged_1/) end end end describe '#purge!' do let(:exclusions) { [] } let(:whitelist) { [] } let(:purge_opts) { { recurse: recurse, whitelist: whitelist } } it 'does nothing when there is no stale_contents' do allow(subject).to receive(:stale_contents).and_return([]) expect(FileUtils).to_not receive(:rm_r) subject.purge!(purge_opts) end it 'recursively deletes all stale_contents' do subject.stale_contents(recurse, exclusions, whitelist).each do |stale| expect(FileUtils).to receive(:rm_r).with(stale, hash_including(secure: true)) end subject.purge!(purge_opts) end end end describe "user whitelist functionality" do context "non-recursive whitelist glob" do let(:whitelist) { managed_directories.collect { |dir| File.join(dir, "*unmanaged*") } } let(:purge_opts) { { recurse: true, whitelist: whitelist } } describe '#purge!' do it 'does not purge items matching glob at root level' do allow(FileUtils).to receive(:rm_r) expect(FileUtils).to_not receive(:rm_r).with(/\/unmanaged_[12]/, anything) expect(FileUtils).to_not receive(:rm_r).with(/\/unmanaged_symlink_file/, anything) expect(subject.logger).to receive(:debug).with(/whitelist match/i).at_least(:once) subject.purge!(purge_opts) end end end context "recursive whitelist glob" do let(:whitelist) do managed_directories.flat_map do |dir| [File.join(dir, "**", "*unmanaged*"), File.join(dir, "**", "subdir_allowlisted_2")] end end let(:purge_opts) { { recurse: true, whitelist: whitelist } } describe '#purge!' do it 'does not purge items matching glob at any level' do expect(FileUtils).to_not receive(:rm_r) expect(subject.logger).to receive(:debug).with(/whitelist match/i).at_least(:once) subject.purge!(purge_opts) end end end end describe "internal exclusions functionality" do let(:purge_opts) { { recurse: true, whitelist: [] } } let(:exclusions) { [ File.join('**', 'unmanaged_1') ] } context "when class implements #purge_exclusions" do describe '#purge!' do it 'does not purge items matching exclusion glob' do expect(subject).to receive(:purge_exclusions).and_return(exclusions) allow(FileUtils).to receive(:rm_r) expect(FileUtils).to_not receive(:rm_r).with(/\/unmanaged_1/, anything) expect(subject.logger).to receive(:debug2).with(/unmanaged_1.*internal exclusion match/i) subject.purge!(purge_opts) end end end context "when class does not implement #purge_exclusions" do describe '#purge!' do it 'purges normally' do expect(FileUtils).to receive(:rm_r).at_least(4).times subject.purge!(purge_opts) end end end end end r10k-4.0.2/spec/unit/util/setopts_spec.rb000066400000000000000000000044361460033767200202370ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/setopts' describe R10K::Util::Setopts do let(:klass) do Class.new do include R10K::Util::Setopts attr_reader :valid, :alsovalid, :truthyvalid def initialize(opts = {}) setopts(opts, { :valid => :self, :duplicate => :valid, :alsovalid => :self, :truthyvalid => true, :validalias => :valid, :ignoreme => nil }) end end end it "can handle an empty hash of options" do o = klass.new() expect(o.valid).to be_nil expect(o.alsovalid).to be_nil end it "can handle a single valid option" do o = klass.new(:valid => 'yep') expect(o.valid).to eq 'yep' expect(o.alsovalid).to be_nil end it "can handle multiple valid options" do o = klass.new(:valid => 'yep', :alsovalid => 'yarp') expect(o.valid).to eq 'yep' expect(o.alsovalid).to eq 'yarp' end it "can handle options marked with TrueClass" do o = klass.new(:truthyvalid => 'so truthy') expect(o.truthyvalid).to eq 'so truthy' end it "can handle aliases marked with :self" do o = klass.new(:validalias => 'yuuup') expect(o.valid).to eq 'yuuup' end it "raises an error when given an unhandled option" do expect { klass.new(:valid => 'yep', :notvalid => 'newp') }.to raise_error(ArgumentError, /cannot handle option 'notvalid'/) end it "warns when given an unhandled option and raise_on_unhandled=false" do test = Class.new { include R10K::Util::Setopts }.new allow(test).to receive(:logger).and_return(spy) test.send(:setopts, {valid: :value, invalid: :value}, {valid: :self}, raise_on_unhandled: false) expect(test.logger).to have_received(:warn).with(%r{cannot handle option 'invalid'}) end it "ignores values that are marked as unhandled" do klass.new(:ignoreme => "IGNORE ME!") end it "warns when given conflicting options" do test = Class.new { include R10K::Util::Setopts }.new allow(test).to receive(:logger).and_return(spy) test.send(:setopts, {valid: :one, duplicate: :two}, {valid: :arg, duplicate: :arg}) expect(test.logger).to have_received(:warn).with(%r{valid.*duplicate.*conflict.*not both}) end end r10k-4.0.2/spec/unit/util/subprocess/000077500000000000000000000000001460033767200173605ustar00rootroot00000000000000r10k-4.0.2/spec/unit/util/subprocess/result_spec.rb000066400000000000000000000022721460033767200222400ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess' describe R10K::Util::Subprocess::Result do describe "formatting" do it "includes the exit code" do result = described_class.new(%w[/usr/bin/gti --zoom], '', '', 42) expect(result.format).to match(%r[Exit code: 42]) end describe "stdout" do it "is omitted when empty" do result = described_class.new(%w[/usr/bin/gti --zoom], '', '', 42) expect(result.format).to_not match(%r[Stdout]) end it "is included when non-empty" do result = described_class.new(%w[/usr/bin/gti --zoom], 'stuff here', '', 42) expect(result.format).to match(%r[Stdout:]) expect(result.format).to match(%r[stuff here]) end end describe "stderr" do it "is omitted when empty" do result = described_class.new(%w[/usr/bin/gti --zoom], '', '', 42) expect(result.format).to_not match(%r[Stderr]) end it "is included when non-empty" do result = described_class.new(%w[/usr/bin/gti --zoom], '', 'other stuff', 42) expect(result.format).to match(%r[Stderr:]) expect(result.format).to match(%r[other stuff]) end end end end r10k-4.0.2/spec/unit/util/subprocess/runner/000077500000000000000000000000001460033767200206715ustar00rootroot00000000000000r10k-4.0.2/spec/unit/util/subprocess/runner/posix_spec.rb000066400000000000000000000004521460033767200233730ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess/runner' describe R10K::Util::Subprocess::Runner::POSIX, :if => R10K::Util::Platform.posix? do fixture_root = File.expand_path('spec/fixtures/unit/util/subprocess/runner', PROJECT_ROOT) it_behaves_like 'a subprocess runner', fixture_root end r10k-4.0.2/spec/unit/util/subprocess/runner/pump_spec.rb000066400000000000000000000041241460033767200232120ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess/runner/pump' describe R10K::Util::Subprocess::Runner::Pump do let(:pair) { IO.pipe } let(:r) { pair.first } let(:w) { pair.last } after do pair.each { |io| io.close unless io.closed? } end subject { described_class.new(r) } it "returns an empty string if nothing has been read" do expect(subject.string).to eq('') end describe "reading all data in the stream" do it "reads data until the stream reaches EOF" do subject.start w << "hello" w << " " w << "world!" w.close subject.wait expect(subject.string).to eq("hello world!") end end describe "halting" do it "does not read any more information read off the pipe" do subject.min_delay = 0.01 subject.start w << "hello" # This should ensure that we yield to the pumping thread. If this test # sporadically fails then we may need to increase the timeout. sleep 0.1 subject.halt! w << " world!" expect(subject.string).to eq("hello") end end describe "backing off" do it "does not back off more than the max delay time" do max_delay = subject.min_delay * 2 subject.max_delay = max_delay subject.start sleep max_delay * 2 Timeout.timeout(max_delay * 1.5) do subject.halt! end end end # Linux 2.6.11+ has a maximum pipe capacity of 64 KiB, and writing to the # pipe when the pipe is at capacity will block. To make sure the pump is # actively removing contents from the pipe we need to attempt to fill up # the entire pipe. # # See man pipe(7) it "does not block if more than 64 kilobytes are fed into the pipe" do # The maximum pipe buffer size is 2 ** 16 bytes, so that's the minimum # amount of data needed to cause further writes to block. We then double # this value to make sure that we are continuously emptying the pipe. pipe_buffer_size = 2 ** 17 blob = "buffalo!" * pipe_buffer_size subject.start Timeout.timeout(60) { w << blob } w.close subject.wait end end r10k-4.0.2/spec/unit/util/subprocess/runner/windows_spec.rb000066400000000000000000000004561460033767200237270ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess/runner' describe R10K::Util::Subprocess::Runner::Windows, :if => R10K::Util::Platform.windows? do fixture_root = File.expand_path('spec/fixtures/unit/util/subprocess/runner', PROJECT_ROOT) it_behaves_like 'a subprocess runner', fixture_root end r10k-4.0.2/spec/unit/util/subprocess/subprocess_error_spec.rb000066400000000000000000000013301460033767200243150ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess' describe R10K::Util::Subprocess::SubprocessError do let(:result) do R10K::Util::Subprocess::Result.new(%w[/usr/bin/gti --zoom], "zooming on stdout", "zooming on stderr", 42) end describe "formatting the message" do subject(:message) { described_class.new("Execution failed", :result => result).message } it "includes the exception message and formatted result" do expect(message).to eq( [ "Execution failed:", "Command: /usr/bin/gti --zoom", "Stdout:", "zooming on stdout", "Stderr:", "zooming on stderr", "Exit code: 42", ].join("\n") ) end end end r10k-4.0.2/spec/unit/util/subprocess_spec.rb000066400000000000000000000040071460033767200207200ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/subprocess' describe R10K::Util::Subprocess do describe "selecting the runner implementation" do it "uses the windows implementation on Windows platforms" do expect(R10K::Util::Platform).to receive(:windows?).and_return true expect(described_class.runner).to eq R10K::Util::Subprocess::Runner::Windows end it "uses the posix implementation when not on windows" do expect(R10K::Util::Platform).to receive(:windows?).and_return true expect(described_class.runner).to eq R10K::Util::Subprocess::Runner::Windows end end describe "running commands" do subject { described_class.new(['/bin/echo', 'hello', 'world']) } let(:runner) do double('R10K::Util::Subprocess::Runner').tap do |i| allow(i).to receive(:run).and_return(result) end end let(:result) { double('R10K::Util::Subprocess::Result').as_null_object } before do allow(described_class).to receive(:runner).and_return(double(:new => runner)) end it "copies the cwd to the runner if a cwd is given" do expect(runner).to receive(:cwd=).with('/tmp') subject.cwd = '/tmp' subject.execute end it "returns the result from the execution" do expect(subject.execute).to eq result end describe "when the command returned with a non-zero exit status" do before do allow(runner).to receive(:crashed?).and_return true end it "raises an exception if raise_on_fail is true" do subject.raise_on_fail = true allow(result).to receive(:exit_code).and_return(255) allow(result).to receive(:stderr).and_return('Command not found') expect { subject.execute }.to raise_error(R10K::Util::Subprocess::SubprocessError, /Command exited with non-zero exit code/) end it "doesn't raise an exception if raise_on_fail is false" do subject.raise_on_fail = false expect { subject.execute }.to_not raise_error end end end end r10k-4.0.2/spec/unit/util/symbolize_keys_spec.rb000066400000000000000000000041241460033767200216000ustar00rootroot00000000000000require 'spec_helper' require 'r10k/util/symbolize_keys' describe R10K::Util::SymbolizeKeys do it "deletes all keys that are strings" do hash = {'foo' => 'bar', :baz => 'quux'} described_class.symbolize_keys!(hash) expect(hash).to_not have_key('foo') end it "replaces the deleted keys with interned strings" do hash = {'foo' => 'bar', :baz => 'quux'} described_class.symbolize_keys!(hash) expect(hash[:foo]).to eq 'bar' end it "raises an error if there is an existing symbol for a given string key" do hash = {'foo' => 'bar', :foo => 'quux'} expect { described_class.symbolize_keys!(hash) }.to raise_error(TypeError, /An existing interned key/) end it "does not modify existing symbol entries" do hash = {'foo' => 'bar', :baz => 'quux'} described_class.symbolize_keys!(hash) expect(hash[:baz]).to eq 'quux' end it "does not modify keys that are not strings or symbols" do key = %w[foo] hash = {key => 'bar', :baz => 'quux'} described_class.symbolize_keys!(hash) expect(hash[key]).to eq 'bar' end context "when symbolizing recursively" do it "can recursively symbolize keys in nested hash values" do hash = {'foo' => {'bar' => 'baz'}} described_class.symbolize_keys!(hash, true) expect(hash).to eq({:foo => {:bar => 'baz'}}) end it "recurses into hash values that had symbol keys" do hash = {:foo => {'bar' => {'baz' => 'quux'}}} described_class.symbolize_keys!(hash, true) expect(hash).to eq({:foo => {:bar => {:baz => 'quux'}}}) end it "recurses into array values whose items are hashes" do hash = {'foo' => [ {'item1_key' => 'val'}, {'item2_key' => 'val'} ]} described_class.symbolize_keys!(hash, true) expect(hash).to eq({:foo => [ {:item1_key => 'val'}, {:item2_key => 'val'} ]}) end it "ignores nested array items that are not hashes" do hash = {'foo' => [ {'item1_key' => 'val'}, 'banana' ]} described_class.symbolize_keys!(hash, true) expect(hash).to eq({:foo => [ {:item1_key => 'val'}, 'banana' ]}) end end end