usethis/0000755000175000017500000000000014154505162012073 5ustar nileshnileshusethis/MD50000644000175000017500000004410714154505162012411 0ustar nileshnilesheb256943c93f96b71b437ace28faffdb *DESCRIPTION 764bb5aec2dc3bef4e44f1387ec895b3 *LICENSE 71840033660ef4066d1d220132bcc7d3 *NAMESPACE eef974dee2289f56e0e4b75e8142be25 *NEWS.md 4efe06b731ed3b39dda06394d21220e3 *R/addin.R 8b4ce5691f66f7326bbe8ec9c76e28f1 *R/badge.R b4a1c6386b3c72ebc672eb2702b1cba3 *R/block.R 797227a33a3aa505ebedea25cc9ce4fe *R/browse.R d9de0e80f8872c2a0e2559dfedceb4e6 *R/ci.R b6fde1f297a1dbf0ea0a0a8e5c4e9959 *R/citation.R 4ff3f40d4f9794eb30d710d0814e3d1b *R/code-of-conduct.R 733061e896d4c1d9d0d13ce06dfc5054 *R/course.R f242dc51a034c7bf0291a81403049e6e *R/coverage.R d14ad07401893e4f1ba52052073426dc *R/cpp11.R e02f77d7596d089e1f52a6fd02b5b310 *R/cran.R 2a9c620e84af889626af03a87db71820 *R/create.R e81e8d834210a941d1203c42a8f75ead *R/data-table.R 342be5e9acf9848203ef09fa9139298e *R/data.R 96b45b734104aec3cb1df0af5ad6060a *R/description.R d51fbcd720bbbfc595d7627293183aec *R/directory.R 29fab823d9a1cfa4926cf7ce2580de7c *R/documentation.R e93d34286be4d6563181272ae163a6aa *R/edit.R a307e9ae182cb7b9e53db59b421b517f *R/git-default-branch.R 810d6fb0f99cc98a061e81cc5500ce51 *R/git.R a2e1d2f7d8370e7bc6e45016281d7acb *R/github-actions.R e10dee2ad78ceb7f5e2ddfc72dca72dc *R/github-labels.R 0d920466e9681a394154f9717b0ea373 *R/github-pages.R 95c289c2c83b88ba0983ec44c7243636 *R/github.R 28258462364272f8d76a54c780e8a354 *R/github_token.R d4aa482a7782d4954f30089c2921d156 *R/helpers.R 13a7c322ae38929340718e793b2769d6 *R/ignore.R 46d3570da3e491cf7c9b026497a6e4c8 *R/issue.R 97ba2f678e028bcd3ad06c9b27ab0082 *R/jenkins.R bb88c714b954bde3da54795649e74fd1 *R/latest-dependencies.R cd77ee49c6dc16826055a4627ad8dc21 *R/license.R ee466a1d332008bbf16c0cf61f5cfee6 *R/lifecycle.R 5b9f7feee8c08e0fb1b48bedaa04060d *R/line-ending.R 38986ef0aa6ec9258910bfb8c6fa0fa1 *R/logo.R 9c6ec3c1aa124dfb95220bd201d1a46c *R/make.R 42ba7a73157a61c56270bb92a526a163 *R/namespace.R 14f90473929a7f54e2a864945582e914 *R/news.R 39e8c1b75884daa4b4259a2b332bd6e3 *R/package.R 3acc24a6de0cdc4264f46169ca30facd *R/pipe.R 775881b5459975e96949b6263c06bb10 *R/pkgdown.R 76bd769991e644c1b23ea6cd4bf02f4e *R/pr.R 7b68b6d64b8c1435355a7433d0d7fdb2 *R/proj.R 9715b062d1c471d19ca126b1f552a541 *R/r.R f37870eccfe935e35ee5fff2212bb70d *R/rcpp.R 92935bc7fd990dd3d38d0a6ffc8431fc *R/readme.R e29ef2d1dc96e58ec0fba61deec4a13b *R/release.R 855bbce379f173b44c7b8ad04a389aca *R/revdep.R 7de9a6985934982fdc6468f007c4cc79 *R/rmarkdown.R bddbcc94da8d1822d2d09c7bb6d746fe *R/roxygen.R e1877dc004a8bcf93d9bc2729b8b57c2 *R/rprofile.R b6d79ebda454f6a89c9e88bb20f5485b *R/rstudio.R a5f274523f8fb15499e958ae73e7770c *R/sitrep.R 3463aa6083680ff12c9dd3997c864a8f *R/spelling.R bc2e60b244769dec67e3601f96d1b561 *R/template.R 8afd76509707aa16f0f29eefcc0ff038 *R/test.R 47e45db3fb43107bd688f404fa9ab1aa *R/tibble.R 8d06a133c43fb6b89424305518510d59 *R/tidy-upkeep.R 9a198588312cd81bd5503543bf825c2f *R/tidyverse.R 30bcdb6eafa101bc92df7fd2b9763df3 *R/tutorial.R 8f06d43f7eb2785534ca9f144d33a09d *R/ui.R 7c83cbe758a1e1efecd9c02fb6d000e5 *R/use_github_file.R 50b029d0821c2a2c244569befc220e94 *R/use_import_from.R 29b01438671c5575d597ccee4787e89c *R/usethis-defunct.R 2edd6d56baf6a8489b16003ad9968232 *R/usethis-package.R ec7e3c2a712363d1c59c88a88f1d23ba *R/utils-gh.R 60013e80fe646a953608a3e742865e3e *R/utils-git.R dc5c5eacae552865d0d13097bea6d2b2 *R/utils-github.R ab2402e681c0f52003a3513213217016 *R/utils-glue.R 89f78abb93e6703767941c91a3cc9542 *R/utils-rematch2.R 341ba56cbb85e36afdfccc58547462c1 *R/utils-roxygen.R 5ebca0227bd2a9a64678ac294972ea62 *R/utils-ui.R 2682432d0c2fec38efc3f4812930e6fb *R/utils.R 4c05d828a79a49407ab6f45b45799b88 *R/version.R 0beb3c09b7b223af8b50fde3f8e06a5b *R/vignette.R 9b02d81b79385d2bc9ae8308ae12c6f0 *R/write.R 9154a0e2894baefd5718956f748d446c *README.md 0fb095df64c73cf786382729a5953ea8 *inst/WORDLIST 27c331bd79e9c921454533ddd3699df9 *inst/templates/CODE_OF_CONDUCT.md 3012f896ad62324283af46ec288ca3b5 *inst/templates/Jenkinsfile 1d9471302aec5402576ad12134bcecdc *inst/templates/Makefile 47c71dc87a79d8c52c8964d6b7c0e45c *inst/templates/NEWS.md 2ef78790c51e3159c7e067a9c4565291 *inst/templates/addins.dcf fd61b39adbd7fe113ffe01719429ebff *inst/templates/appveyor.yml 2fc4bbb9a984818ed8018545119b31a3 *inst/templates/article.Rmd 86f115fb743f9009aeecdac11e0e0b31 *inst/templates/circleci-config.yml a7ce42a527c5410eb1b40307f4aa78c4 *inst/templates/citation-template.R 3eec90e112c3f1af036433fb8e7b6365 *inst/templates/code-cpp11.cpp ea9234ef2e8fae457c79094c018603f7 *inst/templates/code.c 2950d8e59ca018b7632ef0a21c00fc23 *inst/templates/code.cpp 3df907b50c4f00bc26ee6d988e0c849f *inst/templates/codecov.yml 37497f21f3011dbabefd9659eeb087c0 *inst/templates/cran-comments.md 44c4b1df8df8ac69891ec238311f299c *inst/templates/gitlab-ci.yml 17b944f7f4423005e7402c2b2a7eead5 *inst/templates/junit-testthat.R fb01625ef01973490bcf4efb9ade23ae *inst/templates/license-AGPL-3.md 3d82780e8917b360cbee7b9ec3e40734 *inst/templates/license-GPL-2.md 29a9012941a6bcb26bf0fb4382c5dd75 *inst/templates/license-GPL-3.md 8f5107d98757711ecc1b07ac33877564 *inst/templates/license-LGPL-2.1.md c160dd417c123daff7a62852761d8706 *inst/templates/license-LGPL-3.md f4eda51018051de136d3b3742e9a7a40 *inst/templates/license-apache-2.md 3bedcaeda57cf8e31f791dd9e127eb0f *inst/templates/license-cc0.md 3e19557ffd5cbc68a1fd63ac8d687a95 *inst/templates/license-ccby-4.md 8c2921a5e3b7594dee4426ce2e641120 *inst/templates/license-mit.md c6c9f877dd63276ed8f5fb74c6c8356b *inst/templates/license-proprietary.txt cb1e46f469cfbbbde29c8b5113e1d789 *inst/templates/lifecycle-archived.svg c0d2e5a54f1fa4ff02bf9533079dd1f7 *inst/templates/lifecycle-defunct.svg a1b8c987c676c16af790f563f96cbb1f *inst/templates/lifecycle-deprecated.svg c3978703d8f40f2679795335715e98f4 *inst/templates/lifecycle-experimental.svg 952b59dc07b171b97d5d982924244f61 *inst/templates/lifecycle-maturing.svg 27b879bf3677ea76e3991d56ab324081 *inst/templates/lifecycle-questioning.svg 53b3f893324260b737b3c46ed2a0e643 *inst/templates/lifecycle-stable.svg 1c1fe7a759b86dc6dbcbe7797ab8246c *inst/templates/lifecycle-superseded.svg 9a248a8fb2615a2d07f84a7687cc669d *inst/templates/package-README 263378e6f4f25a1188a90af96673d314 *inst/templates/packagename-data-prep.R df6cc46bc7fae1a55b713f3d5065b35a *inst/templates/packagename-package.R b56ff2b193ecba52f9ec7f3b10cca008 *inst/templates/pipe.R e4537b4d7d40ffe61673372fb3b602c7 *inst/templates/project-README 8c4b3f14fb3d6ffc74a28f361771b9c9 *inst/templates/readme-rmd-pre-commit.sh 34952a5944548ea4fc132a9899f8e6ab *inst/templates/revdep-email.yml 7fa100ac70c48d6f71765ac00943b3cd *inst/templates/rmarkdown-template.Rmd 10d8de4f671a94595a1aacce4ac35620 *inst/templates/rmarkdown-template.yml 74e57aedf4cfb7222415f4ee84b3603e *inst/templates/template.Rproj 225e3812b897786befc574f805c96dc2 *inst/templates/test-example-2.1.R b112494cb8925006b418b0d4634063be *inst/templates/testthat.R 9283fdd7a5c9593351869fda10dd6175 *inst/templates/tidy-contributing.md 16c46a50d04677a232e20bfd80a51509 *inst/templates/tidy-eval.R d70637779adf10ee78bcc20a86fe3e45 *inst/templates/tidy-issue.md 0ddc9db5cb12f8881b8e0e6e9e4b918d *inst/templates/tidy-support.md d0503678f52f9a3046f3417328ce6b73 *inst/templates/tidy-travis.yml be16f48dbc391aad2794c60f48a0b8d8 *inst/templates/travis.yml 5208572ed77e349e0d80c8d545ecb158 *inst/templates/tutorial-template.Rmd ed229c502d26f6c2d4de018e0f48ea66 *inst/templates/vignette.Rmd 77639515db0fda8f3b1bffef4cfe3a74 *inst/templates/year-copyright.txt a73335c5dc9e027c581fa12eadc37be4 *man/badges.Rd b335bf3bbd5e5c8c1ddb30d4f6698bfa *man/browse-this.Rd 4b3a846fd0a136ffb1382a63c01546c4 *man/ci.Rd 38af3e0ef7658e94e84bfb9cbb8a6ab4 *man/create_from_github.Rd 564384303bc2e4c4b032ade3541833cf *man/create_package.Rd 95262022a9aecaa41b1021d0df6cae05 *man/edit.Rd 00198b1c402fc0a2a8af112e6a8e26d5 *man/edit_file.Rd cb1e46f469cfbbbde29c8b5113e1d789 *man/figures/lifecycle-archived.svg c0d2e5a54f1fa4ff02bf9533079dd1f7 *man/figures/lifecycle-defunct.svg a1b8c987c676c16af790f563f96cbb1f *man/figures/lifecycle-deprecated.svg c3978703d8f40f2679795335715e98f4 *man/figures/lifecycle-experimental.svg 952b59dc07b171b97d5d982924244f61 *man/figures/lifecycle-maturing.svg 27b879bf3677ea76e3991d56ab324081 *man/figures/lifecycle-questioning.svg 6902bbfaf963fbc4ed98b86bda80caa2 *man/figures/lifecycle-soft-deprecated.svg 53b3f893324260b737b3c46ed2a0e643 *man/figures/lifecycle-stable.svg 1c1fe7a759b86dc6dbcbe7797ab8246c *man/figures/lifecycle-superseded.svg 553f7d586d7d13e1d9a51c6a1ca7d0f2 *man/figures/logo.png d110c97a7282b95e81713bff0897674d *man/git-default-branch.Rd cf731932b1745a73cc3206b73e3c6f88 *man/git_credentials.Rd 0ac8cb005cb03c2d3e9e3560b97910d4 *man/git_protocol.Rd f41d77ca070d07fdfabfeaaf2a1fa2d0 *man/git_sitrep.Rd 8df4786666e3a82e93def049b46816f2 *man/git_vaccinate.Rd 5fab5627366d695da95077ae624baca2 *man/github-token.Rd b45b5d09e159fdf4cfb76b066fe2952d *man/github_actions.Rd d479b79514d73f118bef909ad91fc523 *man/issue-this.Rd a6fd12ccf86c477eefddfaa661249702 *man/licenses.Rd 6f7b59931da730d9ffae7a04b9b2c275 *man/proj_activate.Rd 27251420bd41c4ce26a4ed82580818f1 *man/proj_sitrep.Rd d8ebf1e980b567127893724ed7302564 *man/proj_utils.Rd d09b188049afed6f4e915d0633abbdf4 *man/pull-requests.Rd 61aeba01fef6ef237187f73e8243fa89 *man/rename_files.Rd a4b7550663efd206a22a42414bb776a4 *man/roxygen/templates/double-auth.R 6dbc7476bc0ad0e83ed5590344ce1586 *man/rprofile-helper.Rd af0cd9c7d68aa60a0e39b0964f30e29d *man/tidyverse.Rd c47efbc01445d28efa6fd0a9fc4c49eb *man/ui-questions.Rd b97a06d054b755457bf0e3d6a6489165 *man/ui.Rd e535458be477c38e6dcf25e94ff4a220 *man/use_addin.Rd 41bc1648c59e65ce8fd2422e5c98fc1b *man/use_blank_slate.Rd 72e280c5a84e0a89d830217b599a9dc6 *man/use_build_ignore.Rd 5e970fc688b95c93593b334347b003b5 *man/use_citation.Rd 32c10cf4abaa4296f48da2b26004851c *man/use_code_of_conduct.Rd 1dbb38c84d007fababd030290ee76ac6 *man/use_course_details.Rd 76d15762ea8ee81790b5b3c5c9324c89 *man/use_coverage.Rd 0526a7a85bd6476a951a8a71deb4f297 *man/use_cpp11.Rd 7e314616b7b579d5e5fe753928899614 *man/use_cran_comments.Rd b905c1994e9b968bac3977475cf79051 *man/use_data.Rd 2a093de21c0e2c72c05f6e6a9aa8d722 *man/use_data_table.Rd c58777a8e822855e95bfaea1a1641b3c *man/use_description.Rd bb84842ca4e3ab9906d17bfe2fe31362 *man/use_directory.Rd e267716593436e5853dde0d2b1de13a9 *man/use_git.Rd 06e8dba5b8ee86d3d0704b7a6b34afa1 *man/use_git_config.Rd 47ecde4901197ea25b6547b317e44dbf *man/use_git_hook.Rd be5af7ab82ebce496919b658f96766f4 *man/use_git_ignore.Rd 95591cbb0f6c45521670522e33962939 *man/use_git_remote.Rd 654ecabc1899c4451a05c72f80366f70 *man/use_github.Rd 4240f149e2193cd9bc16842c87ffb48f *man/use_github_file.Rd 9fc4ff26851f5481c283d6e77fb7f7a7 *man/use_github_labels.Rd ebcbfb93952ed3454795f02e8a43c072 *man/use_github_links.Rd 384741fb3d619a8988d0adbf84ca9e63 *man/use_github_pages.Rd dd80f9632a0ed37532dc34b40fec1ec5 *man/use_github_release.Rd 38a0b10a146ff3be639459c43a4d7247 *man/use_import_from.Rd 9343e7601d112ff9a44fdf49568f9574 *man/use_jenkins.Rd cceb642057d6f3405284f982c2dad645 *man/use_latest_dependencies.Rd 3eb05b1cbeb13864410aa0d5f98db898 *man/use_lifecycle.Rd 1fefdf5cb71771db91e7b297788ac281 *man/use_logo.Rd 5f75bb0ea883860456f6755f24e9347d *man/use_make.Rd 0923254f17da4e3a5bf49cbf5a0cbb78 *man/use_namespace.Rd fcf2f80e22a75d6061e7284c3f89c1c2 *man/use_news_md.Rd d4ebcb8f6831d682fd1e702020ec88fa *man/use_package.Rd 4226499beb3afaae90843b6548d08356 *man/use_package_doc.Rd 0e0bc7aea94f1f692c40b702dfa572cb *man/use_pipe.Rd f9bfc6cb263ab7aa3b59eaf7cf71deb8 *man/use_pkgdown.Rd 1e39efad153a64e6959b7ba9fd871ad9 *man/use_r.Rd b6e93fe722f0fffcb60597b0819e9960 *man/use_rcpp.Rd c418d7a01ab692e13976ce36c0e3dd2c *man/use_readme_rmd.Rd 4584fd0b30dc32474365bcbaccad73b8 *man/use_release_issue.Rd 28efdcd91c2af1186cb94076232d1c3a *man/use_revdep.Rd 2cb4c5b437beb2e0706f456723f5c915 *man/use_rmarkdown_template.Rd e4a32847ce22c7883fa1a4b730efa3c6 *man/use_roxygen_md.Rd be293007105650f3d3eacc9b7796c580 *man/use_rstudio.Rd ad45bbad2f422897aabbe00b959f94c5 *man/use_spell_check.Rd 68f3cfe12ffea85b6885ac8a0f43f159 *man/use_template.Rd f14b1635f177828384e44fad3c1e74ba *man/use_testthat.Rd ea71ceda6a6cf10bf8e766198e3eeaa2 *man/use_tibble.Rd afe946572105ec9cc3ef9f7b1ea32b62 *man/use_tidy_thanks.Rd 4983290f4a1ca1c82765cabe41a810fd *man/use_tutorial.Rd 84dcd0feebda460bf6725634d866617f *man/use_version.Rd c7af081a5e433ef916500b915c47f9b6 *man/use_vignette.Rd 1f32563b8f7a6a7990f02a6516bf289b *man/usethis-defunct.Rd c2fb6695c06916eb7d9dd884a6b50d8c *man/usethis-package.Rd 1322b810e2c864246c236e6cefeab26d *man/write-this.Rd 037e990efac0b1bd7f784ef021b57d45 *man/zip-utils.Rd d4bbddb01053b52c56f14f47a306e0c1 *tests/spelling.R 3d8bfd8d9e35decfeadde3ffa53fbffd *tests/testthat.R 5fb582ed945cdc471d562e3f1f49cbbb *tests/testthat/_snaps/badge.md ae834bcc81ba96e4d7beacd173ceba76 *tests/testthat/_snaps/course.md fc812f53414e1fb5b6480aeb52213ad4 *tests/testthat/_snaps/data-table.md 49a6a898245b398a6feaeac44d231d53 *tests/testthat/_snaps/git-default-branch.md 60247f300ff6a0ef5b6c90ed36940c99 *tests/testthat/_snaps/github-actions.md f3cd8f6861312c40008f8a29c1d018e7 *tests/testthat/_snaps/lifecycle.md 50e66be87332965890c80ab375268006 *tests/testthat/_snaps/package.md 687109c63e193bdc5c12d45fd778816f *tests/testthat/_snaps/readme.md c0290b84554aa7b75bcae3d5710df775 *tests/testthat/_snaps/release.md 50e7e174c0403453cf2a0064691feda8 *tests/testthat/_snaps/tibble.md 471147d9696638780039c6261c038941 *tests/testthat/_snaps/tidy-upkeep.md f07f6d80edaf8672c58eb2c3feda1aa6 *tests/testthat/_snaps/tidyverse.md 340c64762aed4214082fefe2ae546150 *tests/testthat/_snaps/ui.md 141e5d63a9666f1ed131970089178fe1 *tests/testthat/_snaps/use_import_from.md 7e8d07e39e21e39a2681d919ae6e5a09 *tests/testthat/_snaps/utils-github.md eaa174782b0f6f496bd6c7c1e95bb54a *tests/testthat/_snaps/vignette.md 5e174038e8abe9ca5cda26ad4c00d53c *tests/testthat/helper.R e6d8bf8f39c02c20c1e38e6d4395370c *tests/testthat/ref/README.Rmd 9a09db58b3c33c9df56c59ab67e93e1e *tests/testthat/ref/README.md bf89b424b9fbb8d488d29e701d174437 *tests/testthat/ref/foo-loose-dropbox.zip 6ab595f90421218fd607c5a5aff55fc0 *tests/testthat/ref/foo-loose-regular.zip 0e0b018a02ceaaa6a28498d8571aebe1 *tests/testthat/ref/foo-not-loose.zip 44f3b3cab6cbfc399258c007e0a1359f *tests/testthat/ref/foo/file.txt 67f5e8d94d8c417642d28f4ed96adc06 *tests/testthat/ref/yo-loose-dropbox.zip 352346fd102a8e978a35279f93bb077c *tests/testthat/ref/yo-loose-regular.zip 31dfd4a6b6a7233abacb8028e408805d *tests/testthat/ref/yo-not-loose.zip 58a1423de7e870c98b9fb948ea4d9f9c *tests/testthat/ref/yo/subdir1/file1.txt a783d3efd49413330a902d71111392f8 *tests/testthat/ref/yo/subdir2/file2.txt d870983cb0f431f89c512abd3e905fac *tests/testthat/setup.R f773d2b1f06201c96d541ce025e4ab13 *tests/testthat/teardown.R be3f37f889600d912c941dc32561f7e6 *tests/testthat/test-addin.R 3bcfc46927e78d9722c0dc24912b2e30 *tests/testthat/test-badge.R b7fb3ef24a5baed105e5a75eb9b95638 *tests/testthat/test-block.R ea6e987ef40d529799342169061728f2 *tests/testthat/test-browse.R f85f5b723a19b0bb32ef9b7ecbbc5e6f *tests/testthat/test-ci.R b279bd713dabebad7aec7a771c1fcb05 *tests/testthat/test-code-of-conduct.R 2d7644744b975e0be2ffe1db1a681d58 *tests/testthat/test-course.R 9d1e916ec07ffc0dd4fe140b80b7f654 *tests/testthat/test-cpp11.R 429fff6097b4739fcf08c5aea867bf66 *tests/testthat/test-cran.R bcdc244da5e720dea23db8c543a199a8 *tests/testthat/test-create.R 69ac6c7c3a14c4f87872251040cbd336 *tests/testthat/test-data-table.R 4c117430a4f5f1d943c01890d29bf35c *tests/testthat/test-data.R 8b3f7beccfc6ae39448db81d8d140d86 *tests/testthat/test-description.R 27a8b1c514d6b232d30d5d8a45a0487f *tests/testthat/test-directory.R 94089899f583791ac60e4ddff0ea0e44 *tests/testthat/test-documentation.R e561ace1b857b308d6ff94959235fe38 *tests/testthat/test-edit.R 965edde7df44a944d82e7b2de1024ac0 *tests/testthat/test-git-default-branch.R a3ea1ffaafec4580f55b1d612beb76dc *tests/testthat/test-git.R 00ffacbe60d7767dbbd69f14e7ec3e94 *tests/testthat/test-github-actions.R 2e21c8b76fc237dc073fbdf0f2df0f94 *tests/testthat/test-github_token.R 2e09ed6855129f05b46d7f07f50812cf *tests/testthat/test-helpers.R 78f6758cbac1d0bcb01ffacac2de48ad *tests/testthat/test-ignore.R 2cbfec6ac278e91cf278c4b201144b84 *tests/testthat/test-jenkins.R d5ef8a43487ca611f4474f39fc1322d3 *tests/testthat/test-latest-dependencies.R 7d34b0fbb608501c9d2b0f326c22f0ca *tests/testthat/test-license.R e18130023cafce2e4e12934aa16f8598 *tests/testthat/test-lifecycle.R 31b740bd1cf5746d21abe31de7f33e2f *tests/testthat/test-line-ending.R 2c7149278f85251c71e638a849ad9951 *tests/testthat/test-logo.R 2dbba0b9cb53d8a1257fcd97e4c407e1 *tests/testthat/test-make.R e27044b978f56042d53d0c45cf5e2caf *tests/testthat/test-package.R 8a627ea69ee72a6794663457f6b475eb *tests/testthat/test-pipe.R fc025924fdecc2100237850021f53769 *tests/testthat/test-pkgdown.R 7db319b6e7410812a1f5c2fdaebec658 *tests/testthat/test-proj.R 98e8397aa137e7df39378a9539c60e11 *tests/testthat/test-r.R df4c593202dc4c88d87e3688130e3bca *tests/testthat/test-rcpp.R 9d3d971c53e1db635d7d05cc76c81675 *tests/testthat/test-readme.R 9e1a773ada57b9be29798dad86ca680a *tests/testthat/test-release.R 6f8d83177af60365fa34172bb1a9abb0 *tests/testthat/test-revdep.R 7b2a47558f48da375a38d084477ad781 *tests/testthat/test-rmarkdown.R 4c5e6265d0c6c4fcee7c1fb07635ac76 *tests/testthat/test-roxygen.R 7f90369109b293aa44bdef02cd024340 *tests/testthat/test-rstudio.R e3c5f3587528c3fe467aa9e111eaecec *tests/testthat/test-template.R 02b6cbe72fab99b5cc68fac1bc61109d *tests/testthat/test-test.R 75cf1321400b8c3a8b100ec422b16fad *tests/testthat/test-testthat.R c009e37e55002191ec28c51972741479 *tests/testthat/test-tibble.R d880dd81dcae7f17d142aa4ee7db0004 *tests/testthat/test-tidy-upkeep.R b93ec556e728da8cb441c16b33781040 *tests/testthat/test-tidyverse.R 6e3d0bc7c596eee67b4ff9646a21cc33 *tests/testthat/test-tutorial.R ae224e355733662cefea3cfb5d0681bf *tests/testthat/test-ui.R bb57f3391161b0658c4aa551b2964c20 *tests/testthat/test-use_github_file.R dda02a9df01a4f2db9bf1346ea628594 *tests/testthat/test-use_import_from.R 2c4de578d2c199d0e3a466cf5c61e992 *tests/testthat/test-utils-git.R 0100c847247f42b9970f6a980581e43f *tests/testthat/test-utils-github.R 2d3170981c90ef8b0ab07df4bef48d82 *tests/testthat/test-utils-glue.R f035b5c40ad3ffeb4a77745ea91ee09a *tests/testthat/test-utils.R 04fc28f7c2115842d66af7529ebad910 *tests/testthat/test-version.R 4e9f457decc5dd3135eeab7f79f2e6e0 *tests/testthat/test-vignette.R 4e46ce3a7e27221625ba72d5b651281b *tests/testthat/test-write.R usethis/NEWS.md0000644000175000017500000015700614154446611013205 0ustar nileshnilesh# usethis 2.1.5 pkgdown-related functions no longer automatically strip a trailing slash from the pkgdown site URL, in order to play more nicely with CRAN's URL checks (#1526). `edit_pkgdown_config()` is a new function that opens the pkgdown YAML configuration file for the current Project, if such a file exists. The error thrown when reporting an unsupported GitHub configuration has been fixed for forward compatibility with a future version of rlang, i.e. what is anticipated to be rlang v1.0.0. Version 2.1.4 was never released. Version was advanced from 2.1.4 to 2.1.5 strictly for CRAN (re-)submission purposes. # usethis 2.1.3 Modified a test to ensure that intermittent GitHub rate limiting does not lead to ungraceful failure on CRAN. # usethis 2.1.2 `git_default_branch_rename()` no longer errors on repos where README exists, but has no badge block. `git_default_branch_rediscover()` prunes the defunct remote ref to the old default branch, e.g. `origin/master`. # usethis 2.1.0 ## Git default branch support usethis has a more sophisticated understanding of the default branch and gains several functions to support default branch renaming. * `git_branch_default()` has been renamed to `git_default_branch()`, to place it logically in the new family of functions. The old name still works, but that won't be true forever. * `git_default_branch()` is much more diligent about figuring out the default branch. Instead of only consulting the local repo, now we integrate local info with the default branch reported by the `upstream` or `origin` remote, if applicable. - This is intended to surface the case where a project has renamed its default branch and the local repo needs sync up with that. * `git_default_branch_rediscover()` is a new function that helps contributors update their local repo (and personal fork, if applicable) when a project/repo renames its default branch. * `git_default_branch_rename()` is a new function that helps a repo owner rename the default branch (both on GitHub and locally). * `git_default_branch_configure()` is a new function to set the new Git configuration option `init.defaultBranch`, which controls the name of the initial branch of new local repos. * `git_sitrep()` exposes `init.defaultBranch` and surfaces the more sophisticated analysis of `git_default_branch()`. ## Other GitHub-related changes * `git_sitrep()` and `gh_token_help()` try even harder to help people get on the happy path with respect to their GitHub PAT (#1400, #1413, #1488, #1489, #1497). * The minimum version of gh has been bumped to help / force more people to upgrade to the gh version that supports current GitHub PAT formats (@ijlyttle, #1454). * `use_github_file()` is a new function related to `use_template()`. Instead of starting from a local file, `use_github_file()` grabs the contents of an arbitrary file on GitHub that the user has permission to read. It supports targeting a specific branch, tag, or commit and can follow a symlink (#1407). `use_github_file()` now powers `use_github_action()` and friends. * `use_github_release()` is much more diligent about using any information left behind by `devtools::submit_cran()` or `devtools::release()`. Specifically, this applies to determining which SHA is to be tagged in the release. And this SHA, in turn, determines the consulted versions of DESCRIPTION (for package version) and NEWS.md (for release notes) (#1380). * `use_release_issue()` also takes bullets from `release_questions()`, for compatibility with `devtools::release()`. * `git_vaccinate()`, `edit_git_ignore()`, and `git_sitrep()` are more careful to consult, reveal, and set the `core.excludesFile` setting in user's Git configuration (#1461). * `use_github_action_check_full()` has been removed. It's overkill for the majority of R packages, which are better off with `use_github_actions()` or `use_github_action_check_standard()` (#1490). * `use_github_pages()` and `use_pkgdown_github_pages()` use a new method for creating an empty, orphan `gh-pages` branch. This is necessary due to new GitHub behaviour, where it has become essentially impossible to refer to the empty tree (#1472). * `use_github()` can create repositories with `"internal"` visibility, a feature that exists within GitHub Enterprise products (#1505). ## Package development * `use_readme_[r]?md()` no longer includes CRAN installation instructions in the initial template; instead, we only include GitHub-based install instructions or otherwise prompt the user to update instructions (#1507). * `use_import_from()` is a new function that puts `@importFrom pkg fun` directives into a package in a consistent location (@malcolmbarrett, #1377). * `DESCRIPTION` files generated by usethis no longer include `LazyData` by default, as per new CRAN checks; instead, `LazyData` is now added the first time you use `use_data()` (@malcolmbarrett, #1404). * `use_tidy_eval()` has been updated to reflect current recommendations for using (and therefore exposing) tidy eval in other packages (@lionel-, #1445). * `use_pkgdown()` automatically uses Bootstrap 5 if the pkgdown version supports it (anticipated for pkgdown 2.0.0). * `use_lifecycle()` now imports `lifecycle::deprecated()` (#1419). * `use_code_of_conduct()` now requires a `contact` argument to supply contact details for reporting CoC violations (#1269). * `use_package()` no longer guides the user on how to use a dependency when no change was made (@malcolmbarrett, #1384). ### Aimed at the tidyverse team These functions are exported for anyone to use, but are aimed primarily at the maintainers of tidyverse, r-lib, and tidymodels packages. * `use_tidy_dependencies()` is a new function that sets up standard dependencies used by all tidyverse packages, except those that are designed to be dependency free (#1423). * `use_tidy_upkeep_issue()` is a new function similar to `use_release_issue()` that creates a checklist-style issue to prompt various updates (#1416). * `use_tidy_release_test_env()` has been deleted since we no longer recommend including test environments in `cran-comments.md`. There's no evidence that CRAN finds it useful, and it's annoying to keep up-to-date (#1365). * `use_tidy_github_labels()` is the new name for `use_tidy_labels()` (#1430). * `use_tidy_github_actions()` takes over for `use_tidy_ci()`, which is now deprecated. ## User-level configuration * `"usethis.overwrite"` is a new option. When set to `TRUE`, usethis overwrites an existing file without asking for user confirmation if the file is inside a Git repo. The normal Git workflow makes it easy to see and selectively accept/discard any proposed changes. This behaviour is strictly opt-in (#1424). * Functions that provide code to load packages in your `.Rprofile` now use `rlang::check_installed()` to make sure the package is installed locally (@malcolmbarrett, #1398). * `edit_rstudio_prefs()` and `edit_rstudio_snippets()` should work now on case-sensitive OSes, due to a path fix re: the location of RStudio's config files (@charliejhadley, #1420). # usethis 2.0.1 * All functions that require a package now ask you if you'd like to install it. * Added `edit_template()` for opening and creating files in `inst/templates` (for use with `use_template()`) (@malcolmbarrett, #1319). * `use_article()` now creates the file in the `vignettes/articles/` (#548). * `use_lifecycle()` has been updated for changes in our lifecycle workflow (#1323). * `use_tidy_pkgdown()` has been renamed to `use_pkgdown_github_pages()` since the function is useful for anyone who wants to automatically publish to GitHub pages, not just the tidyverse team (#1308). * `use_release_issue()` includes a bunch of minor improvements. Most importantly, for initial CRAN release we now include a number of common things that CRAN checks for that aren't in `R CMD check`. * `use_readme_rmd()`, `use_readme_md()`, `use_tidy_contributing()`, and `use_tidy_support()` use updated logic for determining the `OWNER/REPO` spec of the target repo (#1312). # usethis 2.0.0 ## Adoption of gert and changes to Git/GitHub credential handling Usethis has various functions that help with Git-related tasks, which break down into two categories: 1. Git tasks, such as clone, push, and pull. These are things you could do with command line Git. 1. GitHub tasks, such as fork, release, and open an issue or pull request. These are things you could do in the browser or with the GitHub API. We've switched from git2r to the gert package for Git operations (). We continue to use the gh package for GitHub API work (). The big news in this area is that these lower-level dependencies are getting better at finding Git credentials, finding the same credentials as command line Git (and, therefore, the same as RStudio), and finding the same credentials as each other. This allows usethis to shed some of the workarounds we have needed in the past, to serve as a remedial "credential valet". Under the hood, both gert and gh are now consulting your local Git credential store, when they need credentials. At the time of writing, they are using two different even-lower-level packages to do this: * gert uses the credentials package () * gh uses the gitcreds package () Even now, gert and gh should discover the same credentials, at least for github.com. In the future, these two packages may merge into one. Git/GitHub credential management is covered in a new article: [Managing Git(Hub) Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) The main user-facing changes in usethis are: * usethis should discover and use the same credentials as command line Git. * usethis should be able to work with any GitHub deployment. While github.com is the default, GitHub Enterprise deployments are fully supported. The target GitHub host is determined from the current project's configured GitHub remotes, whenever possible. As a result, several functions are deprecated and several other functions have some deprecated arguments. * Deprecated functions: - `use_git_credentials()` - `git_credentials()` - `github_token()` * Functions with (deprecated arguments): - `create_from_github()` (`auth_token`, `credentials`) - `use_github()` (`auth_token`, `credentials`) - `use_github_links()` (`host`, `auth_token`) - `use_github_labels()` (`repo_spec`, `host`, `auth_token`) - `use_tidy_labels()` (`repo_spec`, `host`, `auth_token`) - `use_github_release()` (`host`, `auth_token`) The switch to gert + credentials should eliminate most credential-finding fiascos. Gert also takes a different approach to wrapping libgit2, the underlying C library that does Git operations. The result is more consistent support for SSH and TLS, across all operating systems, without requiring special effort at install time. More users should enjoy Git remote operations that "just work", for both SSH and HTTPS remotes. There should be fewer "unsupported protocol" errors. ## GitHub remote configuration Usethis gains a more formal framework for characterizing a GitHub remote configuration. We look at: * Which GitHub repositories `origin` and `upstream` point to * Whether you can push to them * How they relate to each other, e.g. fork-parent relationship This is an internal matter, but users will notice that usethis is more clear about which configurations are supported by various functions and which are not. The most common configurations are reviewed in a [section of Happy Git](https://happygitwithr.com/common-remote-setups.html). When working in a fork, there is sometimes a question whether to target the fork or its parent repository. For example, `use_github_links()` adds GitHub links to the URL and BugReports fields of DESCRIPTION. If someone calls `use_github_links()` when working in a fork, they probably want those links to refer to the *parent* or *source* repo, not to their fork, because the user is probably preparing a pull request. Usethis should now have better default behaviour in these situations and, in some cases, will present an interactive choice. ## Default branch There is increasing interest in making the name of a repo's default branch configurable. Specifically, `main` is emerging as a popular alternative to `master`. Usethis now discovers the current repo's default branch and uses that everywhere that, previously, we had hard-wired `master`. `git_branch_default()` is a newly exported function that is also what's used internally. `use_course()`, `use_zip()`, and `create_download_url()` all have some support for forming the URL to download a `.zip` archive of a repo, based on a repo specification (e.g. `OWNER/REPO`) or a browser URL. These helpers now form a URL that targets `HEAD` of the repo, i.e. the default branch. ## Changes to Git/GitHub functionality The default Git protocol is now "https" and we no longer provide an interactive choice, by default, in interactive sessions. As always, a user can express a preference for "ssh" in individual function calls, for an R session via `use_git_protocol()`, and for all R sessions via the `usethis.protocol` option (#1262). `pr_resume()` is a new function for resuming work on an existing local PR branch. It can be called argument-less, to select a branch interactively. `pr_fetch()` can also be called with no arguments, to select a PR interactively. The `owner` argument is replaced by `target`, with a choice of the source (default) or primary repo. `pr_forget()` is a new function for abandoning a PR you initiated locally or fetched from GitHub. It only does local clean up and, for example, doesn't delete a remote branch or close a PR (#1263). `pr_view()` can now be called with no arguments. If the current branch is associated with an open PR, we target that and, otherwise, we offer an interactive selection. `pr_finish()` deletes the remote PR branch if the PR has been merged and the current user has the power to do so, i.e. an external contributor deleting their own branch or a maintainer deleting a branch associated with an internal PR (#1150). It no longer errors if the PR branch has already been deleted (#1196). `pr_pull_upstream()` is renamed to `pr_merge_main()` to emphasize that it merges the **main** line of development into the current branch, where the main line of development is taken to mean the default branch, as reported by `git_branch_default()`, of the source repo, which could be either `upstream` or `origin`, depending on the situation. `create_from_github()` will only create a read-only clone, due to lack of a GitHub personal access token, if explicitly directed to do so via `fork = FALSE`. `create_from_github()` and `use_tidy_thanks()` accept browser and Git URLs as the `repo_spec` argument, to be friendlier to copy/paste. When a URL is passed, the `host` is also extracted from it. `create_github_token()` is a new name for the function previously known as `browse_github_token()` and `browse_github_pat()`. `issue_close_community()` and `issue_reprex_needed()` are two new functions for maintainers who process lots of GitHub issues. They automate canned replies and actions, e.g. labelling or closing (#940). GitHub Actions is the preferred platform for continuous integration, because that is what the tidyverse team currently uses and maintains. Functions related to Travis-CI and AppVeyor are soft-deprecated to raise awareness about this change and to make it clear that, if substantial maintenance becomes necessary, we may elect to retire the function (#1169). `browse_github_actions()` is a new function to open the Actions page of the respective repo on GitHub, similar to existing `browse_*()` functions (@pat-s, #1102). `use_github_pages()` is a new function to activate or reconfigure the GitHub Pages site associated with a repository (#224). `use_tidy_pkgdown()` implements the complete pkgdown configuration used by the tidyverse team (#224). `pr_sync()` is defunct and can be replicated by calling `pr_pull()`, `pr_merge_main()`, then `pr_push()`. ## Licensing improvements All `use_*_license()` functions now work for projects, not just packages. `use_apl2_license()` (not `use_apache_license()`) and `use_gpl3_license()` no longer modify the license text (#1198). `use_mit_license()` now sets the default copyright holder to "{package} authors". This makes it more clear that the copyright holders are the contributors to the package; unless you are using a CLA there is no one copyright holder of a package (#1207). New `use_gpl_license()` and `use_agpl_license()` make it easier to pick specific versions of the GPL and AGPL licenses, and to choose whether or not you include future versions of the license. Both default to version 3 (and above). New `use_proprietary_license()` allows your package to pass R CMD check while making it clear that your code is not open source (#1163). Thanks to @atheriel for the blog post suggesting the wording: https://unconj.ca/blog/copyright-in-closed-source-r-packages-the-right-way.html `use_lgpl_license()` now uses version 3 (and above), and gains new `version` and `include_future` argument to control which version is used. `use_gpl3_license()`, `use_agpl3_license()` and `use_apl2_license()` have been deprecated in favour of the new `version` argument to `use_gpl_license()`, `use_agpl_license()` and `use_apache_license()`. The `name` argument to `use_mit_license()` has been changed to `copyright_holder` to make the purpose more clear. The `name` argument has been removed from all other license functions because it is not needed; no other license makes an assertion about who the copyright holder is. ## RStudio preferences usethis is now fully cognizant of the [changes to RStudio preferences](https://www.rstudio.com/blog/rstudio-1-3-preview-configuration/) in RStudio 1.3: `edit_rstudio_snippets()` looks in the new location, and if you have snippets in the old location, will automatically copy them to the new location (#1204) New `edit_rstudio_prefs()` opens RStudio preferences file for editing (#1148). `use_blank_slate()` can now configure your global, i.e. user-level, RStudio preference, in addition to project-level (#1018). ## Other changes `browse_package()` and `browse_project()` are new functions that let the user choose from a list of URLs derived from local Git remotes and DESCRIPTION (local or possibly on CRAN) (#1113). The legacy `"devtools.desc"` option is no longer consulted when populating a new DESCRIPTION file. You must use the `"usethis.description"` now (#1069). `use_dev_package()` gains a `remote` parameter to allow you to specify the remote. The existing behaviour, which adds an `OWNER/REPO` GitHub remote, remains the default (#918, @ijlyttle). `use_cpp11()` is a new function to set up an R package to use cpp11. `create_package(roxygen = FALSE)` once again writes a valid NAMESPACE file (and also has no Roxygen* fields in DESCRIPTION) (#1120). `create_package()`, `create_project()`, `create_from_github()`, and `proj_activate()` work better with relative paths, inside and outside of RStudio (#1122, #954). `use_testthat()` gains an edition argument to support testthat v3.0.0 (#1185) `use_version()` now updates `src/version.c` if it exists and contains a line matching `PKG_version = "x.y.z";`. usethis has been re-licensed as MIT (#1252, #1253). ## Dependency changes New Imports: gert, jsonlite (was already an indirect dependency), lifecycle, rappdirs No longer in Imports: git2r, rematch2 # usethis 1.6.3 Patch release to refactor usage of withr in the tests for forward compatibility with an upcoming withr release. All changes are within the usethis tests. # usethis 1.6.1 Patch release to align some path handling internals with an update coming in the fs package. * `use_github_links()` is a bit more clever about remotes (e.g. `origin` vs. `upstream`), which makes it easier to make a PR that adds GitHub links for a package you've forked. * `use_pkgdown()` now `.gitignore`s the destination directory and only adds the destination directory to the config file if it departs from the default (which is `docs/`). * `use_tidy_ci()` is now deprecated in favour of `use_tidy_github_actions()` (#1098). * `use_github_action_check_standard()` is a new intermediate workflow that checks on more platforms than `_release`, but is less exhaustive than `_full` (@jimhester). * `create_tidy_package()` now uses an MIT license (@topepo, #1096). # usethis 1.6.0 ## GitHub actions * New `use_github_actions()`, `use_github_action_check_release()`, `use_github_action_check_full()`, `use_github_action_pr_commands()`, to set up GitHub Actions for a package (@jimhester). * We now recommend GitHub Actions instead of Travis-CI or AppVeyor, and strongly recommend upgrading your packages. * Fix `use_github_action()` URL parameter to ensure custom URLs are allowed. (@coatless, #1065). ## Package creation * `create_package()` gains a `roxygen` argument. If `TRUE` (the default), it adds a `RoxygenNote` field to the `DESCRIPTION` (which means the first run of `devtools::check()` will re-document the package, #963), and creates an empty `NAMESPACE` (which means you'll always need an explicit `@export` if you want to export functions, #927). It also turns markdown processing on by default (#911). * `use_rstudio()` now sets the `LineEndingConversion` to `Posix` so that packages created using usethis always use LF line endings, regardless of who contributes to them (#1002). * In the `usethis.description` option, you can now set `Authors@R = person()` directly, without having to wrap in additional layer of quotes. If setting this in your `.Rprofile`, you'll need to use `utils::person()` since the utils package isn't loaded until after your profile is executed. ## PR helpers * A new article [Pull request helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) demonstrates how to use the `pr_*()` functions (@mine-cetinkaya-rundel, #802). * `pr_finish()` checks that you don't have any local changes (#805), and can optionally finish any PR, not just the current (#1040). * `pr_pause()` and `pr_fetch()` now automatically pull to get latest changes (#959, #960) and refresh RStudio's git pane (#706). * `pr_push()` now works for a repository with no open pull requests (@maurolepore, #990). * `pr_pull()` gives more information about which files have merge conflicts and automatically opens conflicted files for editing (#1056). ## Other new features * New `rename_files()` makes it easy to rename paired `R/` and `test/` files (#784). * New `ui_silence()` makes it easier to selectively silence some UI output. * New `use_agpl3_license()` (@pachamaltese, #870). * New `use_data_table()` to set up a package for Import-ing `data.table` (@michaelchirico, #897). * `use_latest_dependencies()` replaces `use_tidy_version()` as the new name better reflect its usage (#771). * New `use_lifecycle()` helper to import the lifecycle badges for functions and arguments in your package. Learn more at . * `use_release_issue()` will include additional bullets if your package includes `release_bullets()` function which returns a character vector (and the package has been loaded with `load_all()`) (#941). ## Minor improvements and bug fixes * When writing files, usethis now respects line endings. Default line endings are taken from the `.Rproj` file (if available), otherwise the `DESCRIPTION`, otherwise the first file found in `R/`, then all else failing to your platform default (#767). It should do a better job of preserving UTF-8 files on windows (#969). * `browse_github()` now always goes to the canonical GitHub site: `https://github.com/user/repo`. This is slightly worse than the current behaviour but makes the function more consistent across packages, and considerably simplifies the implementation. * `browse_circle()` opens the project dashboard on Circle CI. * `create_download_url()` is a new helper for making "ZIP file download" URLs suitable for use with `use_course()` and `use_zip()`, starting with the URLs that mere mortals can usually get their hands on in a browser (@fmichonneau, #406). * `create_package()` no longer fails partway through if you have a malformed `usethis.description` option (#961). * `create_package()` will now create a package in a symlink to a directory (#794). * `create_package()` and `use_description()` gain a `check_name` argument to control whether to check for package names invalid for CRAN (@noamross, #883). * `edit_file()` and `use_test()` gain an `open` parameter that allows you to control whether or not the function is opened for editing by the user (#817). * `edit_rstudio_snippets()` makes it more clear which snippet types are allowed and that user's snippets mask the built-in snippets (@GegznaV, #885). * `git_sitrep()` now reports project-specific user name and email, if set (#837), and email(s) associated with your GitHub account (@dragosmg, #724). * `ui_yeah()` and `ui_nope()` allow you to override the default "yes" and "no" strings and to opt-out of shuffling (@rundel, #796). * `use_circleci()` uses correct delimiters in template (@jdblischak, #835). * `use_circleci_badge()` is now exported (@pat-s, #920). * `use_code_of_conduct()` now generates an absolute link to code of conduct on pkgdown website or original source to avoid R CMD check issues (#772). * `use_course()` and `use_zip()` are now equipped with some retry capability, to cope with intermittent failure or the need for a longer connect timeout (#988). * `use_data()` automatically bumps R dependency to 2.10 (#962). * `use_data_raw()` template quotes the dataset name correctly (#736, @mitchelloharawild). * `use_description_defaults()` now shows the default fields combined with any options that you have set. * `use_dev_package()` now supports packages installed from any remote type, not just GitHub (@antoine-sachet, #1071). * `use_git()` will now create initial commit if needed (#852). * `use_github_release()` no longer fails if you have no news bullets (#1048). * `use_github_release()` now tags the latest local commit instead of the latest remote commit on the default branch (@davidchall, #1029). * `use_gpl3_license()` now completes the license by providing additional information in a file named LICENSE, just like `use_mit_license()` and friends (@Cervangirard, #683). * `use_logo()` now generates the correct href if the pkgdown `url` is set (@mitchelloharawild, #986). * `use_make()` gains missing closing parenthesis (@ryapric, #804). * `use_markdown_template()` no longer uses an unexported function in its default arguments (@fmichonneau, #761). * `use_testthat()` and `use_test()` now work in projects, not just packages (#1017). * `use_test()` works on Windows when called without arguments (#901). * `use_tidy_issue_template()` uses current github format (@Maschette, #756). * `use_travis()`, `use_travis_badge()`, and `browse_travis()`, now default to `ext = "com"` since travis-ci.com is now recommended it over travis-ci.org (@riccardoporreca, #1038). * `use_release_issue()` reminds you to re-generate `README.md`, if needed (#767). * `use_r()` and `use_test()` throw a clear error if multiple names are provided (@strboul, #862). * `use_rcpp()` and `use_c()` now ensure `src/` contains at least one `.cpp` or `.c` placeholder file, so that the package can be built (@coatless, #720). * `usethis.destdir` is a new option that is consulted when deciding where to put a new folder created by `use_course()` or `create_from_github()` (@malcolmbarrett, #1015). * `use_lifecycle()` no longer adds the lifecycle package to the DESCRIPTION file. With the new roxygen markdown syntax for including badges, lifecycle has become a build-time dependency. ## Dependency changes New Imports: cli, rematch2, rlang. gh minimum version is bumped to v.1.1.0, due to changed behaviour around requests that return nothing. clisymbols is removed from Imports. # usethis 1.5.1 This is a patch release with various small features and bug fixes. ## Using the pipe `%>%` or the tidy eval toolkit in your package * The templates used by `use_pipe()` and `use_tidy_eval()` use a more robust form of cross-reference links, linking to files rather than topics. This should silence some warnings seen on Windows at install time (#730, #731 @jmgirard). * `use_pipe()` gains a logical `export` argument, so it can do the setup necessary to use the pipe operator when it is re-exported (`export = TRUE`, which is the default and preserves the previous behaviour) and when it is not (`export = FALSE`) (#783). ## Git, GitHub, and pull requests * `use_github()` removes newline `\n` characters from the description that can cause the initial push to fail (#493, @muschellij2). * `git_sitrep()` gives better feedback if we can't validate the GitHub PAT (#725, @ijlyttle). * `create_from_github()` sets remote tracking branch of `master` to `upstream/master`, when it creates (and clones) a fork (#792). * `pr_pause()` can switch back to master even if there is no remote tracking branch (#715, @cderv). ## Build tools and continuous integration * `use_tidy_ci()` is updated for R 3.6, meaning that R 3.2 is the oldest version of R supported through proactive testing. * `use_make()` and `use_jenkins()` add a Makefile and Jenkinsfile, respectively (#501, @ryapric). * `use_circleci()` creates a `.circleci/config.yaml` config file for CircleCI (#703, @jdblischak). ## Other * `use_zip()` is a new variant of `use_course()` that downloads and unpacks a ZIP file, with less pedantic behaviour re: the destination directory. Both functions now also work for ZIP files with MIME type `"application/x-zip-compressed"` (#573). * `use_version()` can detect `"(development version)"` in a NEWS header and update it with an actual version (#768, @DavisVaughan). ## Dependency changes R 3.1 is no longer explicitly supported or tested. Our general practice is to support the current release (3.6, at time of writing), devel, and the 4 previous versions of R (3.5, 3.4, 3.3, 3.2). fs minimum version is stated to be v1.3.0. glue minimum version is stated to be v1.3.0. # usethis 1.5.0 ## Git, GitHub (and GitLab) usethis gains several functions to inspect and manipulate the Git situation for the current project = repository. We also provide more control and visibility into git2r's workings, especially around credentials (usethis uses git2r for all Git operations). * `git_sitrep()` lets you know what's up with your Git, git2r and GitHub config (#328). * `git_vaccinate()` vaccinates your global (i.e. user-level) git ignore file. It adds standard entries for R users, such as `.Rhistory` and `.Rdata`. This decreases the chance that you commit and push files containing confidential information (#469). * `git_remotes()` and `use_git_remote()` are new helpers to inspect or modify Git remote URLs for the repo associated with the active project (#649). * `git_protocol()` + `use_git_protocol()` and `git_credentials()` + `use_git_credentials()` are new helpers to summon or set Git transport protocol (SSH or HTTPS) or git2r credentials, respectively. These functions are primarily for internal use. Most users can rely on default behaviour. Use these helpers to intervene if git2r isn't discovering the right credentials (#653). usethis honors the `usethis.protocol` option, which allows you to express a general preference for SSH vs. HTTPS. Other improvements and bug fixes: * `use_github()` tries harder but also fails earlier, with more informative messages, making it less likely to leave the repo partially configured (#221). * `use_github()` and `create_from_github()` gain a `protocol` argument (#494, @cderv). * `create_from_github()` pulls from upstream master in a fork (#695, @ijlyttle). * `use_release_issue()` creates a GitHub issue containing a release checklist, reflecting the standard practices of the tidyverse team (#338). * `use_github_release()` creates a draft GitHub release using the entries in `NEWS.md` (#137). * `use_gitlab_ci()` creates a `gitlab-ci.yaml` config file for GitLab CI (#565, @overmar). * `use_git_config()` now invisibly returns the previous values of the settings. * `use_github_labels()` has been rewritten be more flexible. You can now supply a repo name, and `descriptions`, and you can set colours/descriptions independently of creating labels. You can also `rename` existing labels (#290). ## GitHub pull requests We've added **experimental** functions to work with GitHub pull requests. They are aimed at both a maintainer (who may make, review, and modify pull requests) and a contributor (who may make or explore pull requests). * `git_sitrep()` includes a section at the end aimed at describing "pull request readiness". Expect that to develop and expand. * `pr_init()`, `pr_fetch()`, `pr_push()`, `pr_pull()`, `pr_finish()`, and `pr_view()` constitute the new family of helpers. They are designed to be smart about the significance of remotes with the standard names of `origin` and `upstream` and to facilitate both internal and external pull requests. ## Partial file management usethis gains tooling to manage part of a file. This is currently used for managing badges in your README and roxygen import tags: * `use_badge()` and friends now automatically add badges if your README contains a specially formatted badge block (#497): ``` <-- badge:start --> <-- badge:end --> ``` * `use_tibble()` and `use_rcpp()` automatically add roxygen tags to to `{package}-package.R` if it contains a specially formatted namespace block (#517): ```R ## usethis namespace: start ## usethis namespace: end NULL ``` Unfortunately this means that `use_rcpp()` no longer supports non-roxygen2 workflows, but I suspect the set of people who use usethis and Rcpp but not roxygen2 is very small. ## Extending and wrapping usethis * New `proj_activate()` lets you activate a project, either opening a new RStudio session (if you use RStudio) or changing the working directory (#511). * `proj_get()` and `proj_set()` no longer have a `quiet` argument. The user-facing message about setting a project is now under the same control as other messages, i.e. `getOption("usethis.quiet", default = FALSE)` (#441). * A new set of `ui_*()` functions makes it possible to give your own code the same user interface as usethis (#308). All use the glue and crayon and packages to power easy interpolation and formatting. There are four families of functions: * block styles: `ui_line()`, `ui_done()`, `ui_todo()`, `ui_oops()`, `ui_info()`. * conditions: `ui_stop()`, `ui_warn()`. * questions: `ui_yeah()`, `ui_nope()`. * inline styles: `ui_field()`, `ui_value()`, `ui_path()`, `ui_code()`. * `with_project()` and `local_project()` are new withr-style functions to temporarily set an active usethis project. They make usethis functions easier to use in an *ad hoc* fashion or from another package (#441). ## Tidyverse standards These standards are (aspirationally) used by all tidyverse packages; you are welcome to use them if you find them helpful. * Call `use_tidy_labels()` to update GitHub labels. Colours are less saturated, docs is now documentation, we use some emoji, and performance is no longer automatically added to all repos (#519). Repo specific issues should be given colour `#eeeeee` and have an emoji. * Call `use_logo()` to update the package logo to the latest specifications: `man/figure/logo.png` should be 240 x 278, and README should contain ``. This gives a nicer display on retina displays. The logo is also linked to the pkgdown site if available (#536). * When creating a new package, use `create_tidy_package()` to start with a package following the tidyverse standards (#461). * `NEWS.md` for the development version should use "(development version)" rather than the specific version (#440). * pkgdown sites should now be built by travis and deployed automatically to GitHub pages. `use_pkgdown_travis()` will help you set that up. * When starting the release process, call `use_release_issue()` to create a release checklist issue (#338). * Prior to CRAN submission call `use_tidy_release_test_env()` to update the test environment section in `cran-comments()` (#496). * After acceptance, try `use_github_release()` to automatically create a release. It's created as a draft so you have a chance to look over before publishing. * `use_vignette()` includes the a standard initialisation chunk with `knitr::opts_chunk$set(comment = "#>", collapse = TRUE)` which should be used for all Rmds. ## New functions not already mentioned * `use_devtools()` (#624), `use_conflicted()` (#362), and `use_reprex()` (#465) help add useful packages to your `.Rprofile`. * `use_partial_warnings()` helps the user add a standard warning block to `.Rprofile` (#64). * `edit_r_buildignore()` opens `.Rbuildignore` for manual editing (#462, @bfgray3). * `use_lgpl_license()` automates set up of the LGL license (#448, @krlmlr). * `use_ccby_license()` adds a CCBY 4.0 license (#547, @njtierney). * `use_rcpp_armadillo()` and `use_rcpp_eigen()` set up a package to use RcppArmadillo or RcppEigen, respectively (#421, @coatless, @duckmayr). * `use_c("foo")` sets up `src/` and creates `src/foo.c` (#117). * `use_covr_ignore()` makes it easy to ignore files in test coverage (#434). * `use_pkgdown_travis()` helps you set up pkgdown for automatic build-and-deploy from Travis-CI to GitHub Pages (#524). * `use_addin()` does setup for RStudio addins (#353, @haozhu233). * `use_tutorial()` creates a new interactive R Markdown tutorial, as implemented by the [`learnr` package](https://rstudio.github.io/learnr/index.html) (@angela-li, #645). * `use_article()` creates articles, vignettes that are automatically added to `.Rbuildignore`. These appear on pkgdown sites, but are not included with the package itself (#281). * `use_citation()` creates a basic `CITATION` template and puts it in the right place (#100). ## Other minor bug fixes and improvements * `write_union()` appends the novel `lines`, but does not remove duplicates from existing lines (#583, @khailper). * `use_rcpp("foo")` now creates `src/foo.cpp` (#117). * `use_data()` gains a `version` argument and defaults to serialization format version 2 (#675). * `use_data_raw()` accepts a name for the to-be-prepared dataset and opens a templated R script (#646). * `browse_github()` now falls back to CRAN organisation (with a warning) if package doesn't have its own GitHub repo (#186). * `create_*()` restore the active project if they error part way through, and use `proj_activate()` (#453, #511). * `edit_r_profile()` and `edit_r_environ()` now respect environment variables `R_PROFILE_USER` and `R_ENVIRON_USER`, respectively (#480). * `use_description()` once again prints the generated description (#287). * `use_description_field()` is no longer sensitive to whitespace, which allows `use_vignette()` to work even if the `VignetteBuilder` field is spread over multiple lines (#439). * `use_logo()` can override existing logo if user gives permission (#454). It also produces retina appropriate logos by default, and matches the aspect ratio to the specification (#499). * `use_news_md()` will optionally commit. * `use_package()` gains a `min_version` argument to specify a minimum version requirement (#498). Set to `TRUE` to use the currently installed version (#386). This is used by `use_tidy_eval()` in order to require version 0.1.2 or greater of rlang (#484). * `use_pkgdown()` is now configurable with site options (@jayhesselberth, #467), and no longer creates the `docs/` directory (#495). * `use_test()` no longer forces the filename to be lowercase (#613, @stufield). * `use_test()` will not include a `context()` in the generated file if used with testthat 2.1.0 and above (the future release of testthat) (#325). * `use_tidy_description()` sets the `Encoding` field in `DESCRIPTION` (#502, @krlmlr). * `use_tidy_eval()` re-exports `:=` (#595, @jonthegeek). * `use_tidy_versions()` has source argument so that you can choose to use local or CRAN versions (#309). * `use_travis()` gains an `ext` argument, defaulting to `"org"`. Use `ext = "com"` for `https://travis-ci.com` (@cderv, #500). * `use_version()` asks before committing. * `use_vignette()` now has a `title` argument which is used in YAML header (in the two places where it is needed). The vignettes also lose the default author and date fields (@rorynolan, #445), and the R Markdown starter material. They gain a standard setup chunk. * `use_version("dev")` now creates a standard "(development version)" heading in `NEWS.md` (#440). * `use_vignette()` now checks if the vignette name is valid (starts with letter and consists of letters, numbers, hyphen, and underscore) and throws an error if not (@akgold, #555). * `restart_rstudio()` now returns `FALSE` in RStudio if no project is open, fixing an issue that caused errors in helpers that suggest restarting RStudio (@gadenbuie, #571). ## Dependency changes * withr moves from Suggests to Imports. * purrr and yaml are new in Imports. # usethis 1.4.0 ## File system All usethis file system operations now use the [fs](https://fs.r-lib.org) package (#177). This should not change how usethis functions, but users may notice these features of fs-mediated paths: - Paths are "tidy", meaning `/` is the path separator and there are never multiple or trailing `/`. - Paths are UTF-8 encoded. - A Windows user's home directory is interpreted as `C:\Users\username` (typical of Unix-oriented tools, like Git and ssh; also matches Python), as opposed to `C:\Users\username\Documents` (R's default on Windows). Read more in [`fs::path_expand()`](https://fs.r-lib.org/reference/path_expand.html). ## Extending or wrapping usethis These changes make it easier for others to extend usethis, i.e. to create workflow packages specific to their organization, or to use usethis in other packages. * `proj_path()` is newly exported. Use it to build paths within the active project. Like `proj_get()` and `proj_set()`, it is not aimed at end users, but rather for use in extension packages. End users should use [rprojroot](https://rprojroot.r-lib.org) or its simpler companion, [here](https://here.r-lib.org), to programmatically detect a project and build paths within it (#415, #425). * `edit_file()`, `write_over()`, and `write_union()` are newly exported helpers. They are mostly for internal use, but can also be useful in packages that extend or customize usethis (#344, #366, #389). * `use_template()` no longer errors when a user chooses not to overwrite an existing file and simply exits with confirmation that the file is unchanged (#348, #350, @boshek). * `getOption("usethis.quiet", default = FALSE)` is consulted when printing user-facing messages. Set this option to `TRUE` to suppress output, e.g., to use usethis functions quietly in another package. For example, use `withr::local_options(list(usethis.quiet = TRUE))` in the calling function (#416, #424). ## New functions * `proj_sitrep()` reports current working directory, the active usethis project, and the active RStudio Project. Call this function if things seem weird and you're not sure what's wrong or how to fix it. Designed for interactive use and debugging, not for programmatic use (#426). * `use_tibble()` does minimum setup necessary for a package that returns or exports a tibble. For example, this guarantees a tibble will print as a tibble (#324 @martinjhnhadley). * `use_logo()` resizes and adds a logo to a package (#358, @jimhester). * `use_spell_check()` adds a whitelist of words and a unit test to spell check package documentation during `R CMD check` (#285 @jeroen). ## Other small changes and bug fixes * usethis has a new logo! (#429) * `use_course()` reports progress during download (#276, #380). * `use_git()` only makes an initial commit of all files if user gives explicit consent (#378). * `create_from_github()`: the `repo` argument is renamed to `repo_spec`, since it takes input of the form "OWNER/REPO" (#376). * `use_depsy_badge()` is defunct. The Depsy project has officially concluded and is no longer being maintained (#354). * `use_github()` fails earlier, with a more informative message, in the absence of a GitHub personal access token (PAT). Also looks for the PAT more proactively in the usual environment variables (i.e., GITHUB_PAT, GITHUB_TOKEN) (#320, #340, @cderv). * The logic for setting DESCRIPTION fields in `create_package()` and `use_description()` got a Spring Cleaning. Fields directly specified by the user take precedence, then the named list in `getOption("usethis.description")` is consulted, and finally defaults built into usethis. `use_description_defaults()` is a new function that reveals fields found in options and built into usethis. Options specific to one DESCRIPTION field, e.g. `devtools.desc.license`, are no longer supported. Instead, use a single named list for all fields, preferably stored in an option named `"usethis.description"` (however,`"devtools.desc"` is still consulted for backwards compatibility). (#159, #233, #367) ## Dependency changes New Imports: fs, glue, utils No longer in Imports: backports, httr, rematch2, rmarkdown (moved to Suggests), styler (moved to Suggests) # usethis 1.3.0 * usethis has a website: (#217). It includes an article with advice on system setup, for usethis and for R development more generally. * `edit_*()` functions now return the target path, invisibly (#255). * `edit_git_ignore(scope = "user")` prefers `~/.gitignore`, but detects an existing `~/.gitignore_global`, if it exists. If a new global gitignore file is created, it is created as `~/.gitignore` and recorded in user's git config as the `core.excludesfile` (#255). * `create_from_github()` gains several arguments and new functionality. The `protocol` argument lets user convey whether remote URLs should be ssh or https. In the case of "fork and clone", the original repo is added as `upstream` remote. It is now possible -- although rarely necessary -- to directly specify the GitHub PAT, credentials (in git2r form), and GitHub host (#214, #214, #253). * `use_github_labels()` can create or update the colour of arbitrary GitHub issue labels, defaulting to a set of labels and colours used by the tidyverse packages, which are now exposed via `tidy_labels()`. That set now includes the labels "good first issue" and "help wanted" (#168, #249). * `appveyor_info()` no longer reverses the repo's URL and image link. Corrects the markdown produced by `use_appveyor_badge()` (#240, @llrs). * `use_cran_badge()` uses an HTTPS URL for the CRAN badge image (#235, @jdblischak). * `create_package()` and `create_project()` return a normalized path, even if target directory does not pre-exist (#227, #228). ## New functions * `use_git_config()` can set user's Git name or email, globally or locally in a project/repo (#267). * `browse_github_pat()` goes to the webpage where a GitHub user can create a personal access token (PAT) for the GitHub API. If the user configures a PAT, they can use functions like `create_from_github()` and `use_github()` to easily create and connect GitHub repos to local projects. (#248, #257, @jeroen, via @jennybc). * `use_version()` increments the version of the active package, including an interactive chooser. `use_dev_version()` is now a special case wrapper around this. (#188, #223, @EmilHvitfeldt). * `use_tidy_github()` creates a standard set of files that make a GitHub repository more navigable for users and contributors: an issue template, contributing guidelines, support documentation, and a code of conduct. All are now placed in a `.github/` subdirectory (#165, @batpigandme). * `use_bioc_badge` creates a Bioconductor badge that links to the build report (#271, @LiNk-NY). * `use_binder_badge()` creates a badge indicating the repository can be launched in an executable environment via [Binder](https://mybinder.org/) (#242, @uribo). # usethis 1.2.0 ## New functions * `use_course()` downloads a folder's worth of materials from a ZIP file, with deliberate choices around the default folder name and location. Developed for use at the start of a workshop. Helps participants obtain materials from, e.g., a DropBox folder or GitHub repo (#196). * `use_blank_slate()` provides a way to opt in to an RStudio workflow where the user's workspace is neither saved nor reloaded between R sessions. Automated for `scope = "project"`. Provides UI instructions for `scope = "user"`, for now (#139). * `use_tidy_style()` styles an entire project according to (#72, #197 @lorenzwalthert). * GitHub conventions common to tidyverse packages are enacted by `use_tidy_contributing()`, `use_tidy_issue_template()`, and `use_tidy_support()` (@batpigandme, #143, #166). Other changes * New projects that don't exhibit other obvious criteria for being a "project" will include a sentinel, empty file named `.here`, so they can be recognized as a project. * Project launching and switching works on RStudio server (#115, #129). * `use_template()` is newly exported, so that other packages can provide templating functions using this framework (@ijlyttle #120). * `use_readme_rmd()` and `use_readme_md()` work, in a similar fashion, for projects that are and are not a package (#131, #135). * `use_readme_rmd()` once again creates a pre-commit git hook, to help keep `README.Rmd` and `README.md` in sync (@PeteHaitch #41). * Substantial increase in unit test coverage. # usethis 1.1.0 ## New helpers * `browse_github()`, `browse_github_issues()`, `browse_github_pulls()`, `browse_cran()` and `browse_travis()` open useful websites related to the current project or a named package. (#96, #103). * `create_from_github()` creates a project from an existing GitHub repository, forking if needed (#109). * `use_cc0_license()` applies a CC0 license, particularly appropriate for data packages (#94) * `use_lifecycle_badge()` creates a badge describing current stage in project lifecycle (#48). * `use_pkgdown()` creates the basics needed for a [pkgdown](https://github.com/r-lib/pkgdown) website (#88). * `use_r("foo")` creates and edit `R/foo.R` file. If you have a test file open, `use_r()` will open the corresponding `.R` file (#105). * `use_tidy_versions()` sets minimum version requirement for all dependencies. ## Bug fixes and improvements * `use_dev_version()` now correctly updates the `Version` field in a package description file. (@tjmahr, #104) * `use_revdep()` now also git-ignores the SQLite database (#107). * `use_tidy_eval()` has been tweaked to reflect current guidance (#106) # usethis 1.0.0 This is a new package that extracts out many functions that previously lived in devtools, as well as providing more building blocks so you can create your own helpers. As well as the many new helpers listed below, there are three main improvements to the package: * More support for general R projects, other than packages. * A notion of an "active" project that all commands operate on. * Refined output. usethis is gradually evolving towards supporting more general R "projects", not just packages. This is still a work in progress, so please let me know if you use a function that you think should work with projects but doesn't. You can also try out the new `create_project()` which creates a basic RStudio project. The concept of the working directory and the "base path" have been refined. Rather than using an argument to specify the active project, all `use_` functions now use a global active project setting, as returned by `proj_get()`. This is cached throughout a session, although it will be updated by `create_package()` and `create_project()`. You'll now get an clear error if you attempt to `use_something()` outside of a project, and `create_something()` will warn if you're trying to create inside an existing project. The output from all usethis commands has been reviewed to be informative but not overwhelming. usethis takes advantage of colour (using crayon and RStudio 1.1) to help chunk the output and clearly differentiate what you need to do vs. what has been done for you. ## New functions * `use_apl2_license()` if you want to use the Apache 2.0 license. * `use_depsy_badge()` allows including a Depsy badge (@gvegayon, #68). * `use_dev_package()` works like `use_package()` but also adds the repo to the `Remotes` field (#32). * `use_github_labels()` will automatically set up a standard set of labels, optionally removing the default labels (#1). * `use_pipe()` creates a template to use magrittr's `%>%` in your package (#15). * `use_tidy_ci()` which sets up travis and codecov using the tidyverse conventions (#14) * `use_tidy_description()` puts description fields in a standard order and alphabetises dependencies. * `use_tidy_eval()` imports and re-exports the recommend set of tidy eval helpers if your package uses tidy eval (#46). * `use_usethis()` opens your `.Rprofile` and gives you the code to copy and paste in. ## New edit functions A new class of functions make it easy to edit common config files: * `edit_r_profile_user()` opens `.Rprofile` * `edit_r_environ_user()` opens `.Renviron` * `edit_r_makevars_user()` opens `.R/Makevars` * `edit_git_config_user()` opens `.gitconfig` * `edit_git_ignore_user()` opens `.gitignore` * `edit_rstudio_snippets(type)` opens `~/R/snippets/{type}.snippets` ## Updates * `use_coverage("codecov")` now sets a default threshold of 1% to try and reduce false positives (#8). * `use_description()` now sets `ByteCompile: true` so you can benefit from the byte compiler (#29) * The license functions (`use_mit_license()`, `use_apl2_license()`, and `use_gpl3_license()`) save a copy of the standard license text in `LICENSE.md`, which is then added to `.Rbuildignore`. This allows you to follow standard licensing best practices while adhering to CRANs requirements (#10). * `use_package_doc()` uses more a modern roxygen2 template that requires less duplication. * `use_test()` will use the name of the currently open file in RStudio if you don't supply an explicit name (#89). * `use_readme_rmd()` now puts images in `man/figures/` and no longer adds to `.Rbuildgnore`. This ensures that the rendered `README.md` will also work on CRAN (#16, #19). The first chunk now uses `include = FALSE` and is named setup (#19). * `use_revdep()` creates structure for use with revdepcheck package, the preferred way to run revdepchecks. (#33) ## Building blocks * New `use_badge()` for adding any badge to a README. Now only prints a todo message if the badge does not already exist. * `use_directory()` is now exported (#27). ## Bug fixes and minor improvements * Functions which require code to be copied now automatically put the code on the clipboard if it is available (#52). * `create_package()` no longer creates a dependency on the current version of R. * `use_build_ignore()` now strips trailing `/` * `use_git()` will restart RStudio if needed (and possible) (#42). * `use_github()` now has an organisation parameter so you can create repos in organisations (#4). * `use_template()` and `use_test()` now convert title to a slug that only contains lowercase letters, numbers, and `-`. * `use_vignette()` now adds `*.html` and `*.R` to your `.gitgnore` so you don't accidentally add in compiled vignette products (#35). * `use_travis_badge()` and `use_appveyor_badge()` are now exported functions, so they can be used even if ci was separately set up (#765, @smwindecker). usethis/DESCRIPTION0000644000175000017500000000415714154505162013610 0ustar nileshnileshPackage: usethis Title: Automate Package and Project Setup Version: 2.1.5 Authors@R: c(person(given = "Hadley", family = "Wickham", role = "aut", email = "hadley@rstudio.com", comment = c(ORCID = "0000-0003-4757-117X")), person(given = "Jennifer", family = "Bryan", role = c("aut", "cre"), email = "jenny@rstudio.com", comment = c(ORCID = "0000-0002-6983-2759")), person(given = "Malcolm", family = "Barrett", role = "aut", email = "malcolmbarrett@gmail.com", comment = c(ORCID = "0000-0003-0299-5825")), person(given = "RStudio", role = c("cph", "fnd"))) Description: Automate package and project setup tasks that are otherwise performed manually. This includes setting up unit testing, test coverage, continuous integration, Git, 'GitHub', licenses, 'Rcpp', 'RStudio' projects, and more. License: MIT + file LICENSE URL: https://usethis.r-lib.org, https://github.com/r-lib/usethis BugReports: https://github.com/r-lib/usethis/issues Depends: R (>= 3.4) Imports: cli (>= 3.0.1), clipr (>= 0.3.0), crayon, curl (>= 2.7), desc (>= 1.4.0), fs (>= 1.3.0), gert (>= 1.4.1), gh (>= 1.2.1), glue (>= 1.3.0), jsonlite, lifecycle (>= 1.0.0), purrr, rappdirs, rlang (>= 0.4.10), rprojroot (>= 1.2), rstudioapi, stats, utils, whisker, withr (>= 2.3.0), yaml Suggests: covr, knitr, magick, mockr, pkgload, rmarkdown, roxygen2 (>= 7.1.2), spelling (>= 1.2), styler (>= 1.2.0), testthat (>= 3.1.0) Config/Needs/website: tidyverse/tidytemplate Config/testthat/edition: 3 Encoding: UTF-8 Language: en-US RoxygenNote: 7.1.2 NeedsCompilation: no Packaged: 2021-12-09 18:41:36 UTC; jenny Author: Hadley Wickham [aut] (), Jennifer Bryan [aut, cre] (), Malcolm Barrett [aut] (), RStudio [cph, fnd] Maintainer: Jennifer Bryan Repository: CRAN Date/Publication: 2021-12-09 23:00:02 UTC usethis/README.md0000644000175000017500000001132514153722476013364 0ustar nileshnilesh # usethis [![R-CMD-check](https://github.com/r-lib/usethis/workflows/R-CMD-check/badge.svg)](https://github.com/r-lib/usethis/actions) [![Codecov test coverage](https://codecov.io/gh/r-lib/usethis/branch/main/graph/badge.svg)](https://app.codecov.io/gh/r-lib/usethis?branch=main) [![CRAN status](https://www.r-pkg.org/badges/version/usethis)](https://CRAN.R-project.org/package=usethis) [![Lifecycle: stable](https://img.shields.io/badge/lifecycle-stable-brightgreen.svg)](https://lifecycle.r-lib.org/articles/stages.html#stable) usethis is a workflow package: it automates repetitive tasks that arise during project setup and development, both for R packages and non-package projects. ## Installation Install the released version of usethis from CRAN: ``` r install.packages("usethis") ``` Or install the development version from GitHub with: ``` r # install.packages("devtools") devtools::install_github("r-lib/usethis") ``` ## Usage Most `use_*()` functions operate on the *active project*: literally, a directory on your computer. If you’ve just used usethis to create a new package or project, that will be the active project. Otherwise, usethis verifies that current working directory is or is below a valid project directory and that becomes the active project. Use `proj_get()` or `proj_sitrep()` to manually query the project and [read more in the docs](https://usethis.r-lib.org/reference/proj_utils.html). A few usethis functions have no strong connections to projects and will expect you to provide a path. usethis is quite chatty, explaining what it’s doing and assigning you tasks. `✔` indicates something usethis has done for you. `●` indicates that you’ll need to do some work yourself. Below is a quick look at how usethis can help to set up a package. But remember, many usethis functions are also applicable to analytical projects that are not packages. ``` r library(usethis) # Create a new package ------------------------------------------------- path <- file.path(tempdir(), "mypkg") create_package(path) #> ✓ Creating '/tmp/Rtmp86lEpD/mypkg/' #> ✓ Setting active project to '/private/tmp/Rtmp86lEpD/mypkg' #> ✓ Creating 'R/' #> ✓ Writing 'DESCRIPTION' #> Package: mypkg #> Title: What the Package Does (One Line, Title Case) #> Version: 0.0.0.9000 #> Authors@R (parsed): #> * First Last [aut, cre] (YOUR-ORCID-ID) #> Description: What the package does (one paragraph). #> License: `use_mit_license()`, `use_gpl3_license()` or friends to pick a #> license #> Encoding: UTF-8 #> Roxygen: list(markdown = TRUE) #> RoxygenNote: 7.1.2 #> ✓ Writing 'NAMESPACE' #> ✓ Setting active project to '' # only needed since this session isn't interactive proj_activate(path) #> ✓ Setting active project to '/private/tmp/Rtmp86lEpD/mypkg' #> ✓ Changing working directory to '/tmp/Rtmp86lEpD/mypkg/' # Modify the description ---------------------------------------------- use_mit_license("My Name") #> ✓ Setting License field in DESCRIPTION to 'MIT + file LICENSE' #> ✓ Writing 'LICENSE' #> ✓ Writing 'LICENSE.md' #> ✓ Adding '^LICENSE\\.md$' to '.Rbuildignore' use_package("ggplot2", "Suggests") #> ✓ Adding 'ggplot2' to Suggests field in DESCRIPTION #> • Use `requireNamespace("ggplot2", quietly = TRUE)` to test if package is installed #> • Then directly refer to functions with `ggplot2::fun()` # Set up other files ------------------------------------------------- use_readme_md() #> ✓ Writing 'README.md' #> • Update 'README.md' to include installation instructions. use_news_md() #> ✓ Writing 'NEWS.md' use_test("my-test") #> ✓ Adding 'testthat' to Suggests field in DESCRIPTION #> ✓ Setting Config/testthat/edition field in DESCRIPTION to '3' #> ✓ Creating 'tests/testthat/' #> ✓ Writing 'tests/testthat.R' #> ✓ Writing 'tests/testthat/test-my-test.R' #> • Edit 'tests/testthat/test-my-test.R' x <- 1 y <- 2 use_data(x, y) #> ✓ Adding 'R' to Depends field in DESCRIPTION #> ✓ Creating 'data/' #> ✓ Setting LazyData to 'true' in 'DESCRIPTION' #> ✓ Saving 'x', 'y' to 'data/x.rda', 'data/y.rda' #> • Document your data (see 'https://r-pkgs.org/data.html') # Use git ------------------------------------------------------------ use_git() #> ✓ Initialising Git repo #> ✓ Adding '.Rproj.user', '.Rhistory', '.Rdata', '.httr-oauth', '.DS_Store' to '.gitignore' ``` ## Code of Conduct Please note that the usethis project is released with a [Contributor Code of Conduct](https://usethis.r-lib.org/CODE_OF_CONDUCT.html). By contributing to this project, you agree to abide by its terms. usethis/man/0000755000175000017500000000000014154446735012660 5ustar nileshnileshusethis/man/use_pkgdown.Rd0000644000175000017500000000335414132400710015454 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pkgdown.R \name{use_pkgdown} \alias{use_pkgdown} \alias{use_pkgdown_github_pages} \alias{use_pkgdown_travis} \title{Use pkgdown} \usage{ use_pkgdown(config_file = "_pkgdown.yml", destdir = "docs") use_pkgdown_github_pages() use_pkgdown_travis() } \arguments{ \item{config_file}{Path to the pkgdown yaml config file} \item{destdir}{Target directory for pkgdown docs} } \description{ \href{https://pkgdown.r-lib.org}{pkgdown} makes it easy to turn your package into a beautiful website. usethis provides two functions to help you use pkgdown: \itemize{ \item \code{use_pkgdown()}: creates a pkgdown config file and adds relevant files or directories to \code{.Rbuildignore} and \code{.gitignore}. \item \code{use_pkgdown_github_pages()}: implements the GitHub setup needed to automatically publish your pkgdown site to GitHub pages: \itemize{ \item (first, it calls \code{use_pkgdown()}) \item \code{\link[=use_github_pages]{use_github_pages()}} prepares to publish the pkgdown site from the \code{github-pages} branch \item \code{\link[=use_github_action]{use_github_action("pkgdown")}} configures a GitHub Action to automatically build the pkgdown site and deploy it via GitHub Pages \item The pkgdown site's URL is added to the pkgdown configuration file, to the URL field of DESCRIPTION, and to the GitHub repo. \item Packages owned by certain GitHub organizations (tidyverse, r-lib, and tidymodels) get some special treatment, in terms of anticipating the (eventual) site URL and the use of a pkgdown template. } } \code{use_pkgdown_travis()} is deprecated; we no longer recommend that you use Travis-CI. } \seealso{ \url{https://pkgdown.r-lib.org/articles/pkgdown.html#configuration} } usethis/man/git-default-branch.Rd0000644000175000017500000001175414133333111016573 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git-default-branch.R \name{git-default-branch} \alias{git-default-branch} \alias{git_default_branch} \alias{git_default_branch_configure} \alias{git_default_branch_rediscover} \alias{git_default_branch_rename} \title{Get or set the default Git branch} \usage{ git_default_branch() git_default_branch_configure(name = "main") git_default_branch_rediscover(current_local_default = NULL) git_default_branch_rename(from = NULL, to = "main") } \arguments{ \item{name}{Default name for the initial branch in new Git repositories.} \item{current_local_default}{Name of the local branch that is currently functioning as the default branch. If unspecified, this can often be inferred.} \item{from}{Name of the branch that is currently functioning as the default branch.} \item{to}{New name for the default branch.} } \value{ Name of the default branch. } \description{ The \verb{git_default_branch*()} functions put some structure around the somewhat fuzzy (but definitely real) concept of the default branch. In particular, they support new conventions around the Git default branch name, globally or in a specific project / Git repository. } \section{Background on the default branch}{ Technically, Git has no official concept of the default branch. But in reality, almost all Git repos have an \emph{effective default branch}. If there's only one branch, this is it! It is the branch that most bug fixes and features get merged in to. It is the branch you see when you first visit a repo on a site such as GitHub. On a Git remote, it is the branch that \code{HEAD} points to. Historically, \code{master} has been the most common name for the default branch, but \code{main} is an increasingly popular choice. } \section{\code{git_default_branch_configure()}}{ This configures \code{init.defaultBranch} at the global (a.k.a user) level. This setting determines the name of the branch that gets created when you make the first commit in a new Git repo. \code{init.defaultBranch} only affects the local Git repos you create in the future. } \section{\code{git_default_branch()}}{ This figures out the default branch of the current Git repo, integrating information from the local repo and, if applicable, the \code{upstream} or \code{origin} remote. If there is a local vs. remote mismatch, \code{git_default_branch()} throws an error with advice to call \code{git_default_branch_rediscover()} to repair the situation. For a remote repo, the default branch is the branch that \code{HEAD} points to. For the local repo, if there is only one branch, that must be the default! Otherwise we try to identify the relevant local branch by looking for specific branch names, in this order: \itemize{ \item whatever the default branch of \code{upstream} or \code{origin} is, if applicable \item \code{main} \item \code{master} \item the value of the Git option \code{init.defaultBranch}, with the usual deal where a local value, if present, takes precedence over a global (a.k.a. user-level) value } } \section{\code{git_default_branch_rediscover()}}{ This consults an external authority -- specifically, the remote \strong{source repo} on GitHub -- to learn the default branch of the current project / repo. If that doesn't match the apparent local default branch (for example, the project switched from \code{master} to \code{main}), we do the corresponding branch renaming in your local repo and, if relevant, in your fork. See \url{https://happygitwithr.com/common-remote-setups.html} for more about GitHub remote configurations and, e.g., what we mean by the source repo. This function works for the configurations \code{"ours"}, \code{"fork"}, and \code{"theirs"}. } \section{\code{git_default_branch_rename()}}{ Note: this only works for a repo that you effectively own. In terms of GitHub, you must own the \strong{source repo} personally or, if organization-owned, you must have \code{admin} permission on the \strong{source repo}. This renames the default branch in the \strong{source repo} on GitHub and then calls \code{git_default_branch_rediscover()}, to make any necessary changes in the local repo and, if relevant, in your personal fork. See \url{https://happygitwithr.com/common-remote-setups.html} for more about GitHub remote configurations and, e.g., what we mean by the source repo. This function works for the configurations \code{"ours"}, \code{"fork"}, and \code{"no_github"}. Regarding \code{"no_github"}: Of course, this function does what you expect for a local repo with no GitHub remotes, but that is not the primary use case. } \examples{ \dontrun{ git_default_branch() } \dontrun{ git_default_branch_configure() } \dontrun{ git_default_branch_rediscover() # you can always explicitly specify the local branch that's been playing the # role of the default git_default_branch_rediscover("unconventional_default_branch_name") } \dontrun{ git_default_branch_rename() # you can always explicitly specify one or both branch names git_default_branch_rename(from = "this", to = "that") } } usethis/man/use_tutorial.Rd0000644000175000017500000000251413737204645015666 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tutorial.R \name{use_tutorial} \alias{use_tutorial} \title{Create a learnr tutorial} \usage{ use_tutorial(name, title, open = rlang::is_interactive()) } \arguments{ \item{name}{Base for file name to use for new \code{.Rmd} tutorial. Should consist only of numbers, letters, \verb{_} and \code{-}. We recommend using lower case.} \item{title}{The human-facing title of the tutorial.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates a new tutorial below \verb{inst/tutorials/}. Tutorials are interactive R Markdown documents built with the \href{https://rstudio.github.io/learnr/index.html}{\code{learnr} package}. \code{use_tutorial()} does this setup: \itemize{ \item Adds learnr to Suggests in \code{DESCRIPTION}. \item Gitignores \verb{inst/tutorials/*.html} so you don't accidentally track rendered tutorials. \item Creates a new \code{.Rmd} tutorial from a template and, optionally, opens it for editing. \item Adds new \code{.Rmd} to \code{.Rbuildignore}. } } \examples{ \dontrun{ use_tutorial("learn-to-do-stuff", "Learn to do stuff") } } \seealso{ The \href{https://rstudio.github.io/learnr/index.html}{learnr package documentation}. } usethis/man/ui-questions.Rd0000644000175000017500000000437013737204645015616 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ui.R \name{ui-questions} \alias{ui-questions} \alias{ui_yeah} \alias{ui_nope} \title{User interface - Questions} \usage{ ui_yeah( x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame() ) ui_nope( x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame() ) } \arguments{ \item{x}{A character vector. For block styles, conditions, and questions, each element of the vector becomes a line, and the result is processed by \code{\link[glue:glue]{glue::glue()}}. For inline styles, each element of the vector becomes an entry in a comma separated list.} \item{yes}{A character vector of "yes" strings, which are randomly sampled to populate the menu.} \item{no}{A character vector of "no" strings, which are randomly sampled to populate the menu.} \item{n_yes}{An integer. The number of "yes" strings to include.} \item{n_no}{An integer. The number of "no" strings to include.} \item{shuffle}{A logical. Should the order of the menu options be randomly shuffled?} \item{.envir}{Used to ensure that \code{\link[glue:glue]{glue::glue()}} gets the correct environment. For expert use only.} } \value{ A logical. \code{ui_yeah()} returns \code{TRUE} when the user selects a "yes" option and \code{FALSE} otherwise, i.e. when user selects a "no" option or refuses to make a selection (cancels). \code{ui_nope()} is the logical opposite of \code{ui_yeah()}. } \description{ These functions are used to interact with the user by posing a simple yes or no question. For details on the other \verb{ui_*()} functions, see the \link{ui} help page. } \examples{ \dontrun{ ui_yeah("Do you like R?") ui_nope("Have you tried turning it off and on again?", n_yes = 1, n_no = 1) ui_yeah("Are you sure its plugged in?", yes = "Yes", no = "No", shuffle = FALSE) } } \seealso{ Other user interface functions: \code{\link{ui}} } \concept{user interface functions} \keyword{internal} usethis/man/use_rmarkdown_template.Rd0000644000175000017500000000223413737204645017721 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rmarkdown.R \name{use_rmarkdown_template} \alias{use_rmarkdown_template} \title{Add an RMarkdown Template} \usage{ use_rmarkdown_template( template_name = "Template Name", template_dir = NULL, template_description = "A description of the template", template_create_dir = FALSE ) } \arguments{ \item{template_name}{The name as printed in the template menu.} \item{template_dir}{Name of the directory the template will live in within \code{inst/rmarkdown/templates}. If none is provided by the user, it will be created from \code{template_name}.} \item{template_description}{Sets the value of \code{description} in \code{template.yml}.} \item{template_create_dir}{Sets the value of \code{create_dir} in \code{template.yml}.} } \description{ Adds files and directories necessary to add a custom rmarkdown template to RStudio. It creates: \itemize{ \item \code{inst/rmarkdown/templates/{{template_dir}}}. Main directory. \item \code{skeleton/skeleton.Rmd}. Your template Rmd file. \item \code{template.yml} with basic information filled in. } } \examples{ \dontrun{ use_rmarkdown_template() } } usethis/man/write-this.Rd0000644000175000017500000000361514117743363015247 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/write.R \name{write-this} \alias{write-this} \alias{write_union} \alias{write_over} \title{Write into or over a file} \usage{ write_union(path, lines, quiet = FALSE) write_over(path, lines, quiet = FALSE) } \arguments{ \item{path}{Path to target file. It is created if it does not exist, but the parent directory must exist.} \item{lines}{Character vector of lines. For \code{write_union()}, these are lines to add to the target file, if not already present. For \code{write_over()}, these are the exact lines desired in the target file.} \item{quiet}{Logical. Whether to message about what is happening.} \item{contents}{Character vector of lines.} } \value{ Logical indicating whether a write occurred, invisibly. } \description{ Helpers to write into or over a new or pre-existing file. Designed mostly for for internal use. File is written with UTF-8 encoding. } \section{Functions}{ \itemize{ \item \code{write_union}: writes lines to a file, taking the union of what's already there, if anything, and some new lines. Note, there is no explicit promise about the line order. Designed to modify simple config files like \code{.Rbuildignore} and \code{.gitignore}. \item \code{write_over}: writes a file with specific lines, creating it if necessary or overwriting existing, if proposed contents are not identical and user is available to give permission. }} \examples{ \dontshow{ .old_wd <- setwd(tempdir()) } write_union("a_file", letters[1:3]) readLines("a_file") write_union("a_file", letters[1:5]) readLines("a_file") write_over("another_file", letters[1:3]) readLines("another_file") write_over("another_file", letters[1:3]) \dontrun{ ## will error if user isn't present to approve the overwrite write_over("another_file", letters[3:1]) } ## clean up file.remove("a_file", "another_file") \dontshow{ setwd(.old_wd) } } \keyword{internal} usethis/man/use_git_ignore.Rd0000644000175000017500000000104514117743363016145 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_ignore} \alias{use_git_ignore} \title{Tell Git to ignore files} \usage{ use_git_ignore(ignores, directory = ".") } \arguments{ \item{ignores}{Character vector of ignores, specified as file globs.} \item{directory}{Directory relative to active project to set ignores} } \description{ Tell Git to ignore files } \seealso{ Other git helpers: \code{\link{use_git_config}()}, \code{\link{use_git_hook}()}, \code{\link{use_git}()} } \concept{git helpers} usethis/man/use_package.Rd0000644000175000017500000000300614117743363015411 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/package.R \name{use_package} \alias{use_package} \alias{use_dev_package} \title{Depend on another package} \usage{ use_package(package, type = "Imports", min_version = NULL) use_dev_package(package, type = "Imports", remote = NULL) } \arguments{ \item{package}{Name of package to depend on.} \item{type}{Type of dependency: must be one of "Imports", "Depends", "Suggests", "Enhances", or "LinkingTo" (or unique abbreviation). Matching is case insensitive.} \item{min_version}{Optionally, supply a minimum version for the package. Set to \code{TRUE} to use the currently installed version.} \item{remote}{By default, an \code{OWNER/REPO} GitHub remote is inserted. Optionally, you can supply a character string to specify the remote, e.g. \code{"gitlab::jimhester/covr"}, using any syntax supported by the \href{https://remotes.r-lib.org/articles/dependencies.html#other-sources}{remotes package}.} } \description{ \code{use_package()} adds a CRAN package dependency to \code{DESCRIPTION} and offers a little advice about how to best use it. \code{use_dev_package()} adds a dependency on an in-development package, adding the dev repo to \code{Remotes} so it will be automatically installed from the correct location. } \examples{ \dontrun{ use_package("ggplot2") use_package("dplyr", "suggests") use_dev_package("glue") } } \seealso{ The \href{https://r-pkgs.org/description.html#dependencies}{dependencies section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_readme_rmd.Rd0000644000175000017500000000276614117743363016131 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/readme.R \name{use_readme_rmd} \alias{use_readme_rmd} \alias{use_readme_md} \title{Create README files} \usage{ use_readme_rmd(open = rlang::is_interactive()) use_readme_md(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates skeleton README files with sections for \itemize{ \item a high-level description of the package and its goals \item R code to install from GitHub, if GitHub usage detected \item a basic example } Use \code{Rmd} if you want a rich intermingling of code and output. Use \code{md} for a basic README. \code{README.Rmd} will be automatically added to \code{.Rbuildignore}. The resulting README is populated with default YAML frontmatter and R fenced code blocks (\code{md}) or chunks (\code{Rmd}). If you use \code{Rmd}, you'll still need to render it regularly, to keep \code{README.md} up-to-date. \code{devtools::build_readme()} is handy for this. You could also use GitHub Actions to re-render \code{README.Rmd} every time you push. An example workflow can be found in the \verb{examples/} directory here: \url{https://github.com/r-lib/actions/}. } \examples{ \dontrun{ use_readme_rmd() use_readme_md() } } \seealso{ The \href{https://r-pkgs.org/release.html#important-files}{important files section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_github_pages.Rd0000644000175000017500000000511514131645451016455 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-pages.R \name{use_github_pages} \alias{use_github_pages} \title{Configure a GitHub Pages site} \usage{ use_github_pages(branch = "gh-pages", path = "/", cname = NA) } \arguments{ \item{branch, path}{Branch and path for the site source. The default of \code{branch = "gh-pages"} and \code{path = "/"} reflects strong GitHub support for this configuration: when a \code{gh-pages} branch is first created, it is \emph{automatically} published to Pages, using the source found in \code{"/"}. If a \code{gh-pages} branch does not yet exist on the host, \code{use_github_pages()} creates an empty, orphan remote branch. The most common alternative is to use the repo's default branch, coupled with \code{path = "/docs"}. It is the user's responsibility to ensure that this \code{branch} pre-exists on the host. Note that GitHub does not support an arbitrary \code{path} and, at the time of writing, only \code{"/"} or \code{"/docs"} are accepted.} \item{cname}{Optional, custom domain name. The \code{NA} default means "don't set or change this", whereas a value of \code{NULL} removes any previously configured custom domain. Note that this \emph{can} add or modify a CNAME file in your repository. If you are using Pages to host a pkgdown site, it is better to specify its URL in the pkgdown config file and let pkgdown manage CNAME.} } \value{ Site metadata returned by the GitHub API, invisibly } \description{ Activates or reconfigures a GitHub Pages site for a project hosted on GitHub. This function anticipates two specific usage modes: \itemize{ \item Publish from the root directory of a \code{gh-pages} branch, which is assumed to be only (or at least primarily) a remote branch. Typically the \code{gh-pages} branch is managed by an automatic "build and deploy" job, such as the one configured by \code{\link[=use_github_action]{use_github_action("pkgdown")}}. \item Publish from the \code{"/docs"} directory of a "regular" branch, probably the repo's default branch. The user is assumed to have a plan for how they will manage the content below \code{"/docs"}. } } \examples{ \dontrun{ use_github_pages() use_github_pages(branch = git_default_branch(), path = "/docs") } } \seealso{ \itemize{ \item \code{\link[=use_pkgdown_github_pages]{use_pkgdown_github_pages()}} combines \code{use_github_pages()} with other functions to fully configure a pkgdown site \item \url{https://docs.github.com/en/free-pro-team@latest/github/working-with-github-pages} \item \url{https://docs.github.com/en/free-pro-team@latest/rest/reference/repos#pages} } } usethis/man/use_rstudio.Rd0000644000175000017500000000167413737204645015522 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rstudio.R \name{use_rstudio} \alias{use_rstudio} \title{Add RStudio Project infrastructure} \usage{ use_rstudio(line_ending = c("posix", "windows")) } \arguments{ \item{line_ending}{Line ending} } \description{ It is likely that you want to use \code{\link[=create_project]{create_project()}} or \code{\link[=create_package]{create_package()}} instead of \code{use_rstudio()}! Both \verb{create_*()} functions can add RStudio Project infrastructure to a pre-existing project or package. \code{use_rstudio()} is mostly for internal use or for those creating a usethis-like package for their organization. It does the following in the current project, often after executing \code{proj_set(..., force = TRUE)}: \itemize{ \item Creates an \code{.Rproj} file \item Adds RStudio files to \code{.gitignore} \item Adds RStudio files to \code{.Rbuildignore}, if project is a package } } usethis/man/zip-utils.Rd0000644000175000017500000000636314153502006015076 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/course.R \name{zip-utils} \alias{zip-utils} \alias{use_course} \alias{use_zip} \title{Download and unpack a ZIP file} \usage{ use_course(url, destdir = getOption("usethis.destdir")) use_zip( url, destdir = getwd(), cleanup = if (rlang::is_interactive()) NA else FALSE ) } \arguments{ \item{url}{Link to a ZIP file containing the materials. To reduce the chance of typos in live settings, these shorter forms are accepted:\preformatted{* GitHub repo spec: "OWNER/REPO". Equivalent to `https://github.com/OWNER/REPO/DEFAULT_BRANCH.zip`. * bit.ly or rstd.io shortlinks: "bit.ly/xxx-yyy-zzz" or "rstd.io/foofy". The instructor must then arrange for the shortlink to point to a valid download URL for the target ZIP file. The helper [create_download_url()] helps to create such URLs for GitHub, DropBox, and Google Drive. }} \item{destdir}{The new folder is stored here. If \code{NULL}, defaults to user's Desktop or some other conspicuous place. You can also set a default location using the option \code{usethis.destdir}, e.g. \code{options(usethis.destdir = "a/good/dir")}, perhaps saved to your \code{.Rprofile} with \code{\link[=edit_r_profile]{edit_r_profile()}}} \item{cleanup}{Whether to delete the original ZIP file after unpacking its contents. In an interactive setting, \code{NA} leads to a menu where user can approve the deletion (or decline).} } \value{ Path to the new directory holding the unpacked ZIP file, invisibly. } \description{ Functions to download and unpack a ZIP file into a local folder of files, with very intentional default behaviour. Useful in pedagogical settings or anytime you need a large audience to download a set of files quickly and actually be able to find them. The underlying helpers are documented in \link{use_course_details}. } \section{Functions}{ \itemize{ \item \code{use_course}: Designed with live workshops in mind. Includes intentional friction to highlight the download destination. Workflow: \itemize{ \item User executes, e.g., \code{use_course("bit.ly/xxx-yyy-zzz")}. \item User is asked to notice and confirm the location of the new folder. Specify \code{destdir} or configure the \code{"usethis.destdir"} option to prevent this. \item User is asked if they'd like to delete the ZIP file. \item If new folder contains an \code{.Rproj} file, a new instance of RStudio is launched. Otherwise, the folder is opened in the file manager, e.g. Finder or File Explorer. } \item \code{use_zip}: More useful in day-to-day work. Downloads in current working directory, by default, and allows \code{cleanup} behaviour to be specified. }} \examples{ \dontrun{ # download the source of usethis from GitHub, behind a bit.ly shortlink use_course("bit.ly/usethis-shortlink-example") use_course("http://bit.ly/usethis-shortlink-example") # download the source of rematch2 package from CRAN use_course("https://cran.r-project.org/bin/windows/contrib/3.4/rematch2_2.0.1.zip") # download the source of rematch2 package from GitHub, 4 ways use_course("r-lib/rematch2") use_course("https://api.github.com/repos/r-lib/rematch2/zipball/HEAD") use_course("https://api.github.com/repos/r-lib/rematch2/zipball/main") use_course("https://github.com/r-lib/rematch2/archive/main.zip") } } usethis/man/rename_files.Rd0000644000175000017500000000147413737204645015604 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/r.R \name{rename_files} \alias{rename_files} \title{Automatically rename paired \verb{R/} and \verb{test/} files} \usage{ rename_files(old, new) } \arguments{ \item{old, new}{Old and new file names (with or without extensions).} } \description{ \itemize{ \item Moves \verb{R/\{old\}.R} to \verb{R/\{new\}.R} \item Moves \verb{tests/testthat/test-\{old\}.R} to \verb{tests/testthat/test-\{new\}.R} \item Moves \verb{tests/testthat/test-\{old\}-*.*} to \verb{tests/testthat/test-\{new\}-*.*} and updates paths in the test file. \item Removes \code{context()} calls from the test file, which are unnecessary (and discouraged) as of testthat v2.1.0. } This is a potentially dangerous operation, so you must be using Git in order to use this function. } usethis/man/github_actions.Rd0000644000175000017500000001161314131645451016142 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-actions.R \name{github_actions} \alias{github_actions} \alias{use_github_actions} \alias{use_github_actions_badge} \alias{use_github_action} \alias{use_github_action_check_release} \alias{use_github_action_check_standard} \alias{use_github_action_pr_commands} \title{GitHub Actions setup} \usage{ use_github_actions() use_github_actions_badge(name = "R-CMD-check", repo_spec = NULL) use_github_action( name, url = NULL, save_as = NULL, readme = NULL, ignore = TRUE, open = FALSE ) use_github_action_check_release( save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE ) use_github_action_check_standard( save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE ) use_github_action_pr_commands( save_as = "pr-commands.yaml", ignore = TRUE, open = FALSE ) } \arguments{ \item{name}{For \code{use_github_action()}: Name of one of the example workflows from \url{https://github.com/r-lib/actions/tree/v1/examples}, with or without a \code{.yaml} extension, e.g. "pkgdown" or "test-coverage.yaml". For \code{use_github_actions_badge()}: Specifies the workflow whose status the badge will report. Usually, this is the \code{name} keyword that appears in the workflow \code{.yaml} file.} \item{repo_spec}{Optional GitHub repo specification in this form: \code{owner/repo}. This can usually be inferred from the GitHub remotes of active project.} \item{url}{The full URL to a \code{.yaml} file on GitHub.} \item{save_as}{Name of the local workflow file. Defaults to \code{name} or \code{fs::path_file(url)} for \code{use_github_action()}. Do not specify any other part of the path; the parent directory will always be \code{.github/workflows}, within the active project.} \item{readme}{The full URL to a \code{README} file that provides more details about the workflow. Ignored when \code{url} is \code{NULL}.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Sets up continuous integration (CI) for an R package that is developed on GitHub using \href{https://github.com/features/actions}{GitHub Actions}. CI can be used to trigger various operations for each push or pull request, such as: \itemize{ \item Run \verb{R CMD check} on various operating systems and R versions \item Build and deploy a pkgdown site \item Determine test coverage } This family of functions \itemize{ \item Adds the necessary configuration files and lists them in \code{.Rbuildignore} \item Provides the markdown to insert a badge into your README } } \section{\code{use_github_actions()}}{ Configures a basic \verb{R CMD check} workflow on GitHub Actions by adding a standard \code{R-CMD-check.yaml} file to the \code{.github/workflows} directory of the active project. This is actually just an alias for \code{use_github_action_check_release()}. } \section{\code{use_github_actions_badge()}}{ Generates a GitHub Actions badge and that's all. It does not configure a workflow. This exists mostly for internal use in the other functions documented here. } \section{\code{use_github_action()}}{ Configures an individual, specific \href{https://github.com/features/actions}{GitHub Actions} workflow, either one of the examples from \href{https://github.com/r-lib/actions/tree/v1/examples}{r-lib/actions/examples} or a custom workflow given by the \code{url} parameter. Used internally to power all the other GitHub Actions functions, but it can also be called directly by the user. } \section{\code{use_github_action_check_release()}}{ This entry-level, bare-minimum workflow installs the latest release of R (on a current distribution of Linux) and runs \verb{R CMD check} via the \href{https://github.com/r-lib/rcmdcheck}{rcmdcheck} package. } \section{\code{use_github_action_check_standard()}}{ This workflow runs \verb{R CMD check} via the \href{https://github.com/r-lib/rcmdcheck}{rcmdcheck} package on the three major operating systems (Linux, macOS, and Windows) on the latest release of R and on R-devel. This workflow is appropriate for a package that is (or aspires to be) on CRAN or Bioconductor. } \section{\code{use_github_action_pr_commands()}}{ This workflow enables the use of two R-specific commands in pull request issue comments: \itemize{ \item \verb{/document} to run \code{roxygen2::roxygenise()} and update the PR \item \verb{/style} to run \code{styler::style_pkg()} and update the PR } } \examples{ \dontrun{ use_github_actions() use_github_action_check_standard() use_github_action("pkgdown") } } \seealso{ \itemize{ \item \code{\link[=use_github_file]{use_github_file()}} for more about \code{url} format and parsing. \item \code{\link[=use_tidy_github_actions]{use_tidy_github_actions()}} for the standard GitHub Actions used for tidyverse packages. } } usethis/man/use_make.Rd0000644000175000017500000000054213764577255014751 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/make.R \name{use_make} \alias{use_make} \title{Create Makefile} \usage{ use_make() } \description{ \code{use_make()} adds a basic Makefile to the project root directory. } \seealso{ The \href{https://www.gnu.org/software/make/manual/html_node/}{documentation for GNU Make}. } usethis/man/use_github_links.Rd0000644000175000017500000000273314117743363016506 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github.R \name{use_github_links} \alias{use_github_links} \title{Use GitHub links in URL and BugReports} \usage{ use_github_links( auth_token = deprecated(), host = deprecated(), overwrite = FALSE ) } \arguments{ \item{host, auth_token}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: No longer consulted now that usethis consults the current project's GitHub remotes to get the \code{host} and then relies on gh to discover an appropriate token.} \item{overwrite}{By default, \code{use_github_links()} will not overwrite existing fields. Set to \code{TRUE} to overwrite existing links.} } \description{ Populates the \code{URL} and \code{BugReports} fields of a GitHub-using R package with appropriate links. The GitHub repo to link to is determined from the current project's GitHub remotes: \itemize{ \item If we are not working with a fork, this function expects \code{origin} to be a GitHub remote and the links target that repo. \item If we are working in a fork, this function expects to find two GitHub remotes: \code{origin} (the fork) and \code{upstream} (the fork's parent) remote. In an interactive session, the user can confirm which repo to use for the links. In a noninteractive session, links are formed using \code{upstream}. } } \examples{ \dontrun{ use_github_links() } } usethis/man/rprofile-helper.Rd0000644000175000017500000000157614117743363016253 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rprofile.R \name{rprofile-helper} \alias{rprofile-helper} \alias{use_conflicted} \alias{use_reprex} \alias{use_usethis} \alias{use_devtools} \alias{use_partial_warnings} \title{Helpers to make useful changes to \code{.Rprofile}} \usage{ use_conflicted() use_reprex() use_usethis() use_devtools() use_partial_warnings() } \description{ All functions open your \code{.Rprofile} and give you the code you need to paste in. \itemize{ \item \code{use_devtools()}: makes devtools available in interactive sessions. \item \code{use_usethis()}: makes usethis available in interactive sessions. \item \code{use_reprex()}: makes reprex available in interactive sessions. \item \code{use_conflicted()}: makes conflicted available in interactive sessions. \item \code{use_partial_warnings()}: warns on partial matches. } } usethis/man/git_credentials.Rd0000644000175000017500000000301714117743363016304 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-defunct.R \name{git_credentials} \alias{git_credentials} \alias{use_git_credentials} \title{Produce or register credentials for git2r} \usage{ git_credentials(protocol = deprecated(), auth_token = deprecated()) use_git_credentials(credentials = deprecated()) } \arguments{ \item{protocol}{Deprecated.} \item{auth_token}{Deprecated.} \item{credentials}{Deprecated.} } \value{ These functions raise a warning and return an invisible \code{NULL}. } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} In usethis v2.0.0, usethis switched from git2r to gert (+ credentials) for all Git operations. This pair of packages (gert + credentials) is designed to discover and use the same credentials as command line Git. As a result, a great deal of credential-handling assistance has been removed from usethis, primarily around SSH keys. If you have credential problems, focus your troubleshooting on getting the credentials package to find your credentials. The \href{https://docs.ropensci.org/credentials/articles/intro.html}{introductory vignette} is a good place to start. If you use the HTTPS protocol (which we recommend), a GitHub personal access token will satisfy all auth needs, for both Git and the GitHub API, and is therefore the easiest approach to get working. See \code{\link[=gh_token_help]{gh_token_help()}} for more. } usethis/man/use_git.Rd0000644000175000017500000000113213737204645014601 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git} \alias{use_git} \title{Initialise a git repository} \usage{ use_git(message = "Initial commit") } \arguments{ \item{message}{Message to use for first commit.} } \description{ \code{use_git()} initialises a Git repository and adds important files to \code{.gitignore}. If user consents, it also makes an initial commit. } \examples{ \dontrun{ use_git() } } \seealso{ Other git helpers: \code{\link{use_git_config}()}, \code{\link{use_git_hook}()}, \code{\link{use_git_ignore}()} } \concept{git helpers} usethis/man/use_pipe.Rd0000644000175000017500000000164213737204645014761 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pipe.R \name{use_pipe} \alias{use_pipe} \title{Use magrittr's pipe in your package} \usage{ use_pipe(export = TRUE) } \arguments{ \item{export}{If \code{TRUE}, the file \code{R/utils-pipe.R} is added, which provides the roxygen template to import and re-export \verb{\%>\%}. If \code{FALSE}, the necessary roxygen directive is added, if possible, or otherwise instructions are given.} } \description{ Does setup necessary to use magrittr's pipe operator, \verb{\%>\%} in your package. This function requires the use roxygen. \itemize{ \item Adds magrittr to "Imports" in \code{DESCRIPTION}. \item Imports the pipe operator specifically, which is necessary for internal use. \item Exports the pipe operator, if \code{export = TRUE}, which is necessary to make \verb{\%>\%} available to the users of your package. } } \examples{ \dontrun{ use_pipe() } } usethis/man/use_cpp11.Rd0000644000175000017500000000103114117743363014736 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cpp11.R \name{use_cpp11} \alias{use_cpp11} \title{Use C++ via the cpp11 package} \usage{ use_cpp11() } \description{ Adds infrastructure needed to use the \href{https://cpp11.r-lib.org}{cpp11} package, a header-only R package that helps R package developers handle R objects with C++ code. compiled code: \itemize{ \item Creates \verb{src/} \item Adds cpp11 to \code{DESCRIPTION} \item Creates \code{src/code.cpp}, an initial placeholder \code{.cpp} file } } usethis/man/use_code_of_conduct.Rd0000644000175000017500000000264514117743363017143 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/code-of-conduct.R \name{use_code_of_conduct} \alias{use_code_of_conduct} \title{Add a code of conduct} \usage{ use_code_of_conduct(contact, path = NULL) } \arguments{ \item{contact}{Contact details for making a code of conduct report. Usually an email address.} \item{path}{Path of the directory to put \code{CODE_OF_CONDUCT.md} in, relative to the active project. Passed along to \code{\link[=use_directory]{use_directory()}}. Default is to locate at top-level, but \verb{.github/} is also common.} } \description{ Adds a \code{CODE_OF_CONDUCT.md} file to the active project and lists in \code{.Rbuildignore}, in the case of a package. The goal of a code of conduct is to foster an environment of inclusiveness, and to explicitly discourage inappropriate behaviour. The template comes from \url{https://www.contributor-covenant.org}, version 2: \url{https://www.contributor-covenant.org/version/2/0/code_of_conduct/}. } \details{ If your package is going to CRAN, the link to the CoC in your README must be an absolute link to a rendered website as \code{CODE_OF_CONDUCT.md} is not included in the package sent to CRAN. \code{use_code_of_conduct()} will automatically generate this link if (1) you use pkgdown and (2) have set the \code{url} field in \verb{_pkgdown.yml}; otherwise it will link to a copy of the CoC on \url{https://www.contributor-covenant.org}. } usethis/man/use_git_config.Rd0000644000175000017500000000237014117743363016131 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_config} \alias{use_git_config} \title{Configure Git} \usage{ use_git_config(scope = c("user", "project"), ...) } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} \item{...}{Name-value pairs, processed as <\code{\link[rlang:dyn-dots]{dynamic-dots}}>.} } \value{ Invisibly, the previous values of the modified components, as a named list. } \description{ Sets Git options, for either the user or the project ("global" or "local", in Git terminology). Wraps \code{\link[gert:git_config]{gert::git_config_set()}} and \code{\link[gert:git_config]{gert::git_config_global_set()}}. To inspect Git config, see \code{\link[gert:git_config]{gert::git_config()}}. } \examples{ \dontrun{ # set the user's global user.name and user.email use_git_config(user.name = "Jane", user.email = "jane@example.org") # set the user.name and user.email locally, i.e. for current repo/project use_git_config( scope = "project", user.name = "Jane", user.email = "jane@example.org" ) } } \seealso{ Other git helpers: \code{\link{use_git_hook}()}, \code{\link{use_git_ignore}()}, \code{\link{use_git}()} } \concept{git helpers} usethis/man/use_latest_dependencies.Rd0000644000175000017500000000133014117743363020016 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/latest-dependencies.R \name{use_latest_dependencies} \alias{use_latest_dependencies} \title{Use "latest" versions of all dependencies} \usage{ use_latest_dependencies(overwrite = FALSE, source = c("local", "CRAN")) } \arguments{ \item{overwrite}{By default (\code{FALSE}), only dependencies without version specifications will be modified. Set to \code{TRUE} to modify all dependencies.} \item{source}{Use "local" or "CRAN" package versions.} } \description{ Pins minimum versions of dependencies to latest ones (as determined by \code{source}). Useful for the tidyverse package, but should otherwise be used with extreme care. } \keyword{internal} usethis/man/use_news_md.Rd0000644000175000017500000000113113737204645015451 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/news.R \name{use_news_md} \alias{use_news_md} \title{Create a simple \code{NEWS.md}} \usage{ use_news_md(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ This creates a basic \code{NEWS.md} in the root directory. } \seealso{ The \href{https://r-pkgs.org/release.html#important-files}{important files section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/proj_activate.Rd0000644000175000017500000000103513676400413015767 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/proj.R \name{proj_activate} \alias{proj_activate} \title{Activate a project} \usage{ proj_activate(path) } \arguments{ \item{path}{Project directory} } \value{ Single logical value indicating if current session is modified. } \description{ Activates a project in usethis, R session, and (if relevant) RStudio senses. If you are in RStudio, this will open a new RStudio session. If not, it will change the working directory and \link[=proj_set]{active project}. } usethis/man/use_github_file.Rd0000644000175000017500000000511214131645451016272 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_github_file.R \name{use_github_file} \alias{use_github_file} \title{Copy a file from any GitHub repo into the current project} \usage{ use_github_file( repo_spec, path = NULL, save_as = NULL, ref = NULL, ignore = FALSE, open = FALSE, host = NULL ) } \arguments{ \item{repo_spec}{A string identifying the GitHub repo or, alternatively, a GitHub file URL. Acceptable forms: \itemize{ \item Plain \code{OWNER/REPO} spec \item A blob URL, such as \code{"https://github.com/OWNER/REPO/blob/REF/path/to/some/file"} \item A raw URL, such as \code{"https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"} } In the case of a URL, the \code{path}, \code{ref}, and \code{host} are extracted from it, in addition to the \code{repo_spec}.} \item{path}{Path of file to copy, relative to the GitHub repo it lives in. This is extracted from \code{repo_spec} when user provides a URL.} \item{save_as}{Path of file to create, relative to root of active project. Defaults to the last part of \code{path}, in the sense of \code{basename(path)} or \code{fs::path_file(path)}.} \item{ref}{The name of a branch, tag, or commit. By default, the file at \code{path} will by copied from its current state in the repo's default branch. This is extracted from \code{repo_spec} when user provides a URL.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \value{ A logical indicator of whether a file was written, invisibly. } \description{ Gets the content of a file from GitHub, from any repo the user can read, and writes it into the active project. This function wraps an endpoint of the GitHub API which supports specifying a target reference (i.e. branch, tag, or commit) and which follows symlinks. } \examples{ \dontrun{ use_github_file( "https://github.com/r-lib/actions/blob/v1/examples/check-standard.yaml" ) use_github_file( "r-lib/actions", path = "examples/check-standard.yaml", ref = "v1", save_as = ".github/workflows/R-CMD-check.yaml" ) } } usethis/man/proj_utils.Rd0000644000175000017500000000772614117743363015351 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/proj.R \name{proj_utils} \alias{proj_utils} \alias{proj_get} \alias{proj_set} \alias{proj_path} \alias{with_project} \alias{local_project} \title{Utility functions for the active project} \usage{ proj_get() proj_set(path = ".", force = FALSE) proj_path(..., ext = "") with_project( path = ".", code, force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE) ) local_project( path = ".", force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE), .local_envir = parent.frame() ) } \arguments{ \item{path}{Path to set. This \code{path} should exist or be \code{NULL}.} \item{force}{If \code{TRUE}, use this path without checking the usual criteria for a project. Use sparingly! The main application is to solve a temporary chicken-egg problem: you need to set the active project in order to add project-signalling infrastructure, such as initialising a Git repo or adding a \code{DESCRIPTION} file.} \item{...}{character vectors, if any values are NA, the result will also be NA. The paths follow the recycling rules used in the tibble package, namely that only length 1 arguments are recycled.} \item{ext}{An optional extension to append to the generated path.} \item{code}{Code to run with temporary active project} \item{setwd}{Whether to also temporarily set the working directory to the active project, if it is not \code{NULL}} \item{quiet}{Whether to suppress user-facing messages, while operating in the temporary active project} \item{.local_envir}{The environment to use for scoping. Defaults to current execution environment.} } \description{ Most \verb{use_*()} functions act on the \strong{active project}. If it is unset, usethis uses \href{https://rprojroot.r-lib.org}{rprojroot} to find the project root of the current working directory. It establishes the project root by looking for a \code{.here} file, an RStudio Project, a package \code{DESCRIPTION}, Git infrastructure, a \code{remake.yml} file, or a \code{.projectile} file. It then stores the active project for use for the remainder of the session. } \details{ In general, end user scripts should not contain direct calls to \verb{usethis::proj_*()} utility functions. They are internal functions that are exported for occasional interactive use or use in packages that extend usethis. End user code should call functions in \href{https://rprojroot.r-lib.org}{rprojroot} or its simpler companion, \href{https://here.r-lib.org}{here}, to programmatically detect a project and build paths within it. } \section{Functions}{ \itemize{ \item \code{proj_get}: Retrieves the active project and, if necessary, attempts to set it in the first place. \item \code{proj_set}: Sets the active project. \item \code{proj_path}: Builds a path within the active project returned by \code{proj_get()}. Thin wrapper around \code{\link[fs:path]{fs::path()}}. \item \code{with_project}: Runs code with a temporary active project and, optionally, working directory. It is an example of the \verb{with_*()} functions in \href{https://withr.r-lib.org}{withr}. \item \code{local_project}: Sets an active project and, optionally, working directory until the current execution environment goes out of scope, e.g. the end of the current function or test. It is an example of the \verb{local_*()} functions in \href{https://withr.r-lib.org}{withr}. }} \examples{ \dontrun{ ## see the active project proj_get() ## manually set the active project proj_set("path/to/target/project") ## build a path within the active project (both produce same result) proj_path("R/foo.R") proj_path("R", "foo", ext = "R") ## build a path within SOME OTHER project with_project("path/to/some/other/project", proj_path("blah.R")) ## convince yourself that with_project() temporarily changes the project with_project("path/to/some/other/project", print(proj_sitrep())) } } \seealso{ Other project functions: \code{\link{proj_sitrep}()} } \concept{project functions} usethis/man/use_directory.Rd0000644000175000017500000000127113676400413016017 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/directory.R \name{use_directory} \alias{use_directory} \title{Use a directory} \usage{ use_directory(path, ignore = FALSE) } \arguments{ \item{path}{Path of the directory to create, relative to the project.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} } \description{ \code{use_directory()} creates a directory (if it does not already exist) in the project's top-level directory. This function powers many of the other \code{use_} functions such as \code{\link[=use_data]{use_data()}} and \code{\link[=use_vignette]{use_vignette()}}. } \examples{ \dontrun{ use_directory("inst") } } usethis/man/use_tidy_thanks.Rd0000644000175000017500000000427314117743363016346 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tidyverse.R \name{use_tidy_thanks} \alias{use_tidy_thanks} \title{Identify contributors via GitHub activity} \usage{ use_tidy_thanks(repo_spec = NULL, from = NULL, to = NULL) } \arguments{ \item{repo_spec}{Optional GitHub repo specification in any form accepted for the \code{repo_spec} argument of \code{\link[=create_from_github]{create_from_github()}} (plain spec or a browser or Git URL). A URL specification is the only way to target a GitHub host other than \code{"github.com"}, which is the default.} \item{from, to}{GitHub ref (i.e., a SHA, tag, or release) or a timestamp in ISO 8601 format, specifying the start or end of the interval of interest, in the sense of \verb{[from, to]}. Examples: "08a560d", "v1.3.0", "2018-02-24T00:13:45Z", "2018-05-01". When \verb{from = NULL, to = NULL}, we set \code{from} to the timestamp of the most recent (GitHub) release. Otherwise, \code{NULL} means "no bound".} } \value{ A character vector of GitHub usernames, invisibly. } \description{ Derives a list of GitHub usernames, based on who has opened issues or pull requests. Used to populate the acknowledgment section of package release blog posts at \url{https://www.tidyverse.org/blog/}. If no arguments are given, we retrieve all contributors to the active project since its last (GitHub) release. Unexported helper functions, \code{releases()} and \code{ref_df()} can be useful interactively to get a quick look at release tag names and a data frame about refs (defaulting to releases), respectively. } \examples{ \dontrun{ # active project, interval = since the last release use_tidy_thanks() # active project, interval = since a specific datetime use_tidy_thanks(from = "2020-07-24T00:13:45Z") # r-lib/usethis, interval = since a certain date use_tidy_thanks("r-lib/usethis", from = "2020-08-01") # r-lib/usethis, up to a specific release use_tidy_thanks("r-lib/usethis", from = NULL, to = "v1.1.0") # r-lib/usethis, since a specific commit, up to a specific date use_tidy_thanks("r-lib/usethis", from = "08a560d", to = "2018-05-14") # r-lib/usethis, but with copy/paste of a browser URL use_tidy_thanks("https://github.com/r-lib/usethis") } } usethis/man/use_tibble.Rd0000644000175000017500000000271414117743363015264 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tibble.R \name{use_tibble} \alias{use_tibble} \title{Prepare to return a tibble} \usage{ use_tibble() } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#questioning}{\figure{lifecycle-questioning.svg}{options: alt='[Questioning]'}}}{\strong{[Questioning]}} Does minimum setup such that a tibble returned by your package is handled using the tibble method for generics like \code{print()} or \code{[}. Presumably you care about this if you've chosen to store and expose an object with class \code{tbl_df}. Specifically: \itemize{ \item Check that the active package uses roxygen2 \item Add the tibble package to "Imports" in \code{DESCRIPTION} \item Prepare the roxygen directive necessary to import at least one function from tibble: \itemize{ \item If possible, the directive is inserted into existing package-level documentation, i.e. the roxygen snippet created by \code{\link[=use_package_doc]{use_package_doc()}} \item Otherwise, we issue advice on where the user should add the directive } } This is necessary when your package returns a stored data object that has class \code{tbl_df}, but the package code does not make direct use of functions from the tibble package. If you do nothing, the tibble namespace is not necessarily loaded and your tibble may therefore be printed and subsetted like a base \code{data.frame}. } \examples{ \dontrun{ use_tibble() } } usethis/man/use_git_hook.Rd0000644000175000017500000000135513737204645015630 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_hook} \alias{use_git_hook} \title{Add a git hook} \usage{ use_git_hook(hook, script) } \arguments{ \item{hook}{Hook name. One of "pre-commit", "prepare-commit-msg", "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch", "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout", "post-merge", "pre-push", "pre-auto-gc".} \item{script}{Text of script to run} } \description{ Sets up a git hook using specified script. Creates hook directory if needed, and sets correct permissions on hook. } \seealso{ Other git helpers: \code{\link{use_git_config}()}, \code{\link{use_git_ignore}()}, \code{\link{use_git}()} } \concept{git helpers} usethis/man/use_data.Rd0000644000175000017500000000426513737204645014741 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \name{use_data} \alias{use_data} \alias{use_data_raw} \title{Create package data} \usage{ use_data( ..., internal = FALSE, overwrite = FALSE, compress = "bzip2", version = 2 ) use_data_raw(name = "DATASET", open = rlang::is_interactive()) } \arguments{ \item{...}{Unquoted names of existing objects to save.} \item{internal}{If \code{FALSE}, saves each object in its own \code{.rda} file in the \verb{data/} directory. These data files bypass the usual export mechanism and are available whenever the package is loaded (or via \code{\link[=data]{data()}} if \code{LazyData} is not true). If \code{TRUE}, stores all objects in a single \code{R/sysdata.rda} file. Objects in this file follow the usual export rules. Note that this means they will be exported if you are using the common \code{exportPattern()} rule which exports all objects except for those that start with \code{.}.} \item{overwrite}{By default, \code{use_data()} will not overwrite existing files. If you really want to do so, set this to \code{TRUE}.} \item{compress}{Choose the type of compression used by \code{\link[=save]{save()}}. Should be one of "gzip", "bzip2", or "xz".} \item{version}{The serialization format version to use. The default, 2, was the default format from R 1.4.0 to 3.5.3. Version 3 became the default from R 3.6.0 and can only be read by R versions 3.5.0 and higher.} \item{name}{Name of the dataset to be prepared for inclusion in the package.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ \code{use_data()} makes it easy to save package data in the correct format. I recommend you save scripts that generate package data in \code{data-raw}: use \code{use_data_raw()} to set it up. You also need to document exported datasets. } \examples{ \dontrun{ x <- 1:10 y <- 1:100 use_data(x, y) # For external use use_data(x, y, internal = TRUE) # For internal use } \dontrun{ use_data_raw("daisy") } } \seealso{ The \href{https://r-pkgs.org/data.html}{data chapter} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/tidyverse.Rd0000644000175000017500000001155114132445361015156 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-actions.R, R/tidy-upkeep.R, % R/tidyverse.R \name{use_tidy_github_actions} \alias{use_tidy_github_actions} \alias{use_tidy_upkeep_issue} \alias{tidyverse} \alias{create_tidy_package} \alias{use_tidy_description} \alias{use_tidy_dependencies} \alias{use_tidy_eval} \alias{use_tidy_contributing} \alias{use_tidy_support} \alias{use_tidy_issue_template} \alias{use_tidy_coc} \alias{use_tidy_github} \alias{use_tidy_style} \title{Helpers for tidyverse development} \usage{ use_tidy_github_actions() use_tidy_upkeep_issue(year = NULL) create_tidy_package(path, copyright_holder = NULL) use_tidy_description() use_tidy_dependencies() use_tidy_eval() use_tidy_contributing() use_tidy_support() use_tidy_issue_template() use_tidy_coc() use_tidy_github() use_tidy_style(strict = TRUE) } \arguments{ \item{year}{Approximate year when you last touched this package. If \code{NULL}, the default, will give you a full set of actions to perform.} \item{path}{A path. If it exists, it is used. If it does not exist, it is created, provided that the parent path exists.} \item{copyright_holder}{Name of the copyright holder or holders. This defaults to "{package name} authors"; you should only change this if you use a CLA to assign copyright to a single entity.} \item{strict}{Boolean indicating whether or not a strict version of styling should be applied. See \code{\link[styler:tidyverse_style]{styler::tidyverse_style()}} for details.} } \description{ These helpers follow tidyverse conventions which are generally a little stricter than the defaults, reflecting the need for greater rigor in commonly used packages. } \details{ \itemize{ \item \code{use_tidy_github_actions()}: Sets up the following workflows using \href{https://github.com/features/actions}{GitHub Actions}: \itemize{ \item Run \verb{R CMD check} on the current release, devel, and four previous versions of R. The build matrix also ensures \verb{R CMD check} is run at least once on each of the three major operating systems (Linux, macOS, and Windows). \item Report test coverage. \item Build and deploy a pkgdown site. \item Provide two commands to be used in pull requests: \verb{/document} to run \code{roxygen2::roxygenise()} and update the PR, and \verb{/style} to run \code{styler::style_pkg()} and update the PR. This is how the tidyverse team checks its packages, but it is overkill for less widely used packages. Consider using the more streamlined workflows set up by \code{\link[=use_github_actions]{use_github_actions()}} or \code{\link[=use_github_action_check_standard]{use_github_action_check_standard()}}. } } \itemize{ \item \code{create_tidy_package()}: creates a new package, immediately applies as many of the tidyverse conventions as possible, issues a few reminders, and activates the new package. \item \code{use_tidy_dependencies()}: sets up standard dependencies used by all tidyverse packages (except packages that are designed to be dependency free). \item \code{use_tidy_description()}: puts fields in standard order and alphabetises dependencies. \item \code{use_tidy_eval()}: imports a standard set of helpers to facilitate programming with the tidy eval toolkit. \item \code{use_tidy_style()}: styles source code according to the \href{https://style.tidyverse.org}{tidyverse style guide}. This function will overwrite files! See below for usage advice. \item \code{use_tidy_contributing()}: adds standard tidyverse contributing guidelines. \item \code{use_tidy_issue_template()}: adds a standard tidyverse issue template. \item \code{use_tidy_release_test_env()}: updates the test environment section in \code{cran-comments.md}. \item \code{use_tidy_support()}: adds a standard description of support resources for the tidyverse. \item \code{use_tidy_coc()}: equivalent to \code{use_code_of_conduct()}, but puts the document in a \verb{.github/} subdirectory. \item \code{use_tidy_github()}: convenience wrapper that calls \code{use_tidy_contributing()}, \code{use_tidy_issue_template()}, \code{use_tidy_support()}, \code{use_tidy_coc()}. \item \code{\link[=use_tidy_github_labels]{use_tidy_github_labels()}} calls \code{use_github_labels()} to implement tidyverse conventions around GitHub issue label names and colours. \item \code{use_tidy_upkeep_issue()} creates an issue containing a checklist of actions to bring your package up to current tidyverse standards. } } \section{\code{use_tidy_style()}}{ Uses the \href{https://styler.r-lib.org}{styler package} package to style all code in a package, project, or directory, according to the \href{https://style.tidyverse.org}{tidyverse style guide}. \strong{Warning:} This function will overwrite files! It is strongly suggested to only style files that are under version control or to first create a backup copy. Invisibly returns a data frame with one row per file, that indicates whether styling caused a change. } usethis/man/use_testthat.Rd0000644000175000017500000000160714117743363015663 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/test.R \name{use_testthat} \alias{use_testthat} \title{Sets up overall testing infrastructure} \usage{ use_testthat(edition = NULL, parallel = FALSE) } \arguments{ \item{edition}{testthat edition to use. Defaults to the latest edition, i.e. the major version number of the currently installed testthat.} \item{parallel}{Should tests be run in parallel? This feature appeared in testthat 3.0.0; see \url{https://testthat.r-lib.org/articles/parallel.html} for details and caveats.} } \description{ Creates \verb{tests/testthat/}, \code{tests/testthat.R}, and adds the testthat package to the Suggests field. Learn more in \url{https://r-pkgs.org/tests.html} } \examples{ \dontrun{ use_testthat() use_test() use_test("something-management") } } \seealso{ \code{\link[=use_test]{use_test()}} to create individual test files } usethis/man/use_coverage.Rd0000644000175000017500000000125614117743363015616 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/coverage.R \name{use_coverage} \alias{use_coverage} \alias{use_covr_ignore} \title{Test coverage} \usage{ use_coverage(type = c("codecov", "coveralls"), repo_spec = NULL) use_covr_ignore(files) } \arguments{ \item{type}{Which web service to use.} \item{repo_spec}{Optional GitHub repo specification in this form: \code{owner/repo}. This can usually be inferred from the GitHub remotes of active project.} \item{files}{Character vector of file globs.} } \description{ Adds test coverage reporting to a package, using either Codecov (\verb{https://codecov.io}) or Coveralls (\verb{https://coveralls.io}). } usethis/man/use_package_doc.Rd0000644000175000017500000000215514117743363016242 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/documentation.R \name{use_package_doc} \alias{use_package_doc} \title{Package-level documentation} \usage{ use_package_doc(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Adds a dummy \code{.R} file that will prompt roxygen to generate basic package-level documentation. If your package is named "foo", this will make help available to the user via \code{?foo} or \code{package?foo}. Once you call \code{devtools::document()}, roxygen will flesh out the \code{.Rd} file using data from the \code{DESCRIPTION}. That ensures you don't need to repeat the same information in multiple places. This \code{.R} file is also a good place for roxygen directives that apply to the whole package (vs. a specific function), such as global namespace tags like \verb{@importFrom}. } \seealso{ The \href{https://r-pkgs.org/man.html}{documentation chapter} of \href{https://r-pkgs.org}{R Packages} } usethis/man/use_data_table.Rd0000644000175000017500000000175214131622147016074 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data-table.R \name{use_data_table} \alias{use_data_table} \title{Prepare for importing data.table} \usage{ use_data_table() } \description{ \code{use_data_table()} imports the \code{data.table()} function from the data.table package, as well as several important symbols: \verb{:=}, \code{.SD}, \code{.BY}, \code{.N}, \code{.I}, \code{.GRP}, \code{.NGRP}, \code{.EACHI}. This is a minimal setup to get \code{data.table}s working with your package. See the \href{https://rdatatable.gitlab.io/data.table/articles/datatable-importing.html}{importing data.table} vignette for other strategies. In addition to importing these function, \code{use_data_table()} also blocks the usage of data.table in the \code{Depends} field of the \code{DESCRIPTION} file; \code{data.table} should be used as an \emph{imported} or \emph{suggested} package only. See this \href{https://github.com/Rdatatable/data.table/issues/3076}{discussion}. } usethis/man/use_r.Rd0000644000175000017500000000227514117743363014266 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/r.R \name{use_r} \alias{use_r} \alias{use_test} \title{Create or edit R or test files} \usage{ use_r(name = NULL, open = rlang::is_interactive()) use_test(name = NULL, open = rlang::is_interactive()) } \arguments{ \item{name}{Either a name without extension, or \code{NULL} to create the paired file based on currently open file in the script editor. If the R file is open, \code{use_test()} will create/open the corresponding test file; if the test file is open, \code{use_r()} will create/open the corresponding R file.} \item{open}{Whether to open the file for interactive editing.} } \description{ This pair of functions makes it easy to create paired R and test files, using the convention that the tests for \code{R/foofy.R} should live in \code{tests/testthat/test-foofy.R}. You can use them to create new files from scratch by supplying \code{name}, or if you use RStudio, you can call to create (or navigate to) the paired file based on the currently open script. } \seealso{ The \href{https://r-pkgs.org/tests.html}{testing} and \href{https://r-pkgs.org/r.html}{R code} chapters of \href{https://r-pkgs.org}{R Packages}. } usethis/man/create_package.Rd0000644000175000017500000000444614132400710016050 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/create.R \name{create_package} \alias{create_package} \alias{create_project} \title{Create a package or project} \usage{ create_package( path, fields = list(), rstudio = rstudioapi::isAvailable(), roxygen = TRUE, check_name = TRUE, open = rlang::is_interactive() ) create_project( path, rstudio = rstudioapi::isAvailable(), open = rlang::is_interactive() ) } \arguments{ \item{path}{A path. If it exists, it is used. If it does not exist, it is created, provided that the parent path exists.} \item{fields}{A named list of fields to add to \code{DESCRIPTION}, potentially overriding default values. See \code{\link[=use_description]{use_description()}} for how you can set personalized defaults using package options.} \item{rstudio}{If \code{TRUE}, calls \code{\link[=use_rstudio]{use_rstudio()}} to make the new package or project into an \href{https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects}{RStudio Project}. If \code{FALSE} and a non-package project, a sentinel \code{.here} file is placed so that the directory can be recognized as a project by the \href{https://here.r-lib.org}{here} or \href{https://rprojroot.r-lib.org}{rprojroot} packages.} \item{roxygen}{Do you plan to use roxygen2 to document your package?} \item{check_name}{Whether to check if the name is valid for CRAN and throw an error if not.} \item{open}{If \code{TRUE}, \link[=proj_activate]{activates} the new project: \itemize{ \item If RStudio desktop, the package is opened in a new session. \item If on RStudio server, the current RStudio project is activated. \item Otherwise, the working directory and active project is changed. }} } \value{ Path to the newly created project or package, invisibly. } \description{ These functions create an R project: \itemize{ \item \code{create_package()} creates an R package \item \code{create_project()} creates a non-package project, i.e. a data analysis project } Both functions can be called on an existing project; you will be asked before any existing files are changed. } \seealso{ \code{\link[=create_tidy_package]{create_tidy_package()}} is a convenience function that extends \code{create_package()} by immediately applying as many of the tidyverse development conventions as possible. } usethis/man/ci.Rd0000644000175000017500000000733414132400710013524 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ci.R \name{ci} \alias{use_travis} \alias{use_travis_badge} \alias{use_appveyor} \alias{use_appveyor_badge} \alias{use_gitlab_ci} \alias{use_circleci} \alias{use_circleci_badge} \title{Continuous integration setup and badges} \usage{ use_travis(browse = rlang::is_interactive(), ext = c("com", "org")) use_travis_badge(ext = c("com", "org"), repo_spec = NULL) use_appveyor(browse = rlang::is_interactive()) use_appveyor_badge(repo_spec = NULL) use_gitlab_ci() use_circleci(browse = rlang::is_interactive(), image = "rocker/verse:latest") use_circleci_badge(repo_spec = NULL) } \arguments{ \item{browse}{Open a browser window to enable automatic builds for the package.} \item{ext}{Which travis website to use. Defaults to \code{"com"} for https://www.travis-ci.com/. Change to \code{"org"} for https://travis-ci.org.} \item{repo_spec}{Optional GitHub repo specification in this form: \code{owner/repo}. This can usually be inferred from the GitHub remotes of active project.} \item{image}{The Docker image to use for build. Must be available on \href{https://hub.docker.com}{DockerHub}. The \href{https://hub.docker.com/r/rocker/verse}{rocker/verse} image includes TeXLive, pandoc, and the tidyverse packages. For a minimal image, try \href{https://hub.docker.com/r/rocker/r-ver}{rocker/r-ver}. To specify a version of R, change the tag from \code{latest} to the version you want, e.g. \verb{rocker/r-ver:3.5.3}.} } \description{ \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} Some of these functions are now soft-deprecated since the tidyverse team has started using \href{https://github.com/features/actions}{GitHub Actions (GHA)} for continuous integration (CI). See \code{\link[=use_github_actions]{use_github_actions()}} for help configuring GHA. GHA functionality in usethis is actively maintained and exercised, which is no longer true for Travis-CI or AppVeyor. Sets up third-party continuous integration (CI) services for an R package that is developed on GitHub or, perhaps, GitLab. These functions \itemize{ \item Add service-specific configuration files and add them to \code{.Rbuildignore}. \item Activate a service or give the user a detailed prompt. \item Provide the markdown to insert a badge into README. } } \section{\code{use_travis()}}{ Adds a basic \code{.travis.yml} to the top-level directory of a package. This is a configuration file for the \href{https://www.travis-ci.com/}{Travis CI} continuous integration service. } \section{\code{use_travis_badge()}}{ Only adds the Travis CI badge. Use for a project where Travis is already configured. } \section{\code{use_appveyor()}}{ Adds a basic \code{appveyor.yml} to the top-level directory of a package. This is a configuration file for the \href{https://www.appveyor.com}{AppVeyor} continuous integration service for Windows. } \section{\code{use_appveyor_badge()}}{ Only adds the \href{https://www.appveyor.com}{AppVeyor} badge. Use for a project where AppVeyor is already configured. } \section{\code{use_gitlab_ci()}}{ Adds a basic \code{.gitlab-ci.yml} to the top-level directory of a package. This is a configuration file for the \href{https://docs.gitlab.com/ee/ci/}{GitLab CI/CD} continuous integration service. } \section{\code{use_circleci()}}{ Adds a basic \code{.circleci/config.yml} to the top-level directory of a package. This is a configuration file for the \href{https://circleci.com/}{CircleCI} continuous integration service. } \section{\code{use_circleci_badge()}}{ Only adds the \href{https://circleci.com/}{Circle CI} badge. Use for a project where Circle CI is already configured. } usethis/man/use_github_release.Rd0000644000175000017500000000254214132400710016763 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/release.R \name{use_github_release} \alias{use_github_release} \title{Draft a GitHub release} \usage{ use_github_release(host = deprecated(), auth_token = deprecated()) } \arguments{ \item{host, auth_token}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: No longer consulted now that usethis allows the gh package to lookup a token based on a URL determined from the current project's GitHub remotes.} } \description{ Creates a \strong{draft} GitHub release for the current package. Once you are satisfied that it is correct, you will need to publish the release from GitHub. The key pieces of info are which commit / SHA to tag, the associated package version, and the relevant NEWS entries. If you use \code{devtools::release()} or \code{devtools::submit_cran()} to submit to CRAN, information about the submitted state is captured in a CRAN-SUBMISSION or CRAN-RELEASE file. \code{use_github_release()} uses this info to populate the draft GitHub release and, after success, deletes the CRAN-SUBMISSION or CRAN-RELEASE file. In the absence of such a file, we must fall back to assuming the current state (SHA of \code{HEAD}, package version, NEWS) is the submitted state. } usethis/man/use_description.Rd0000644000175000017500000000520614132400710016324 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/description.R \name{use_description} \alias{use_description} \alias{use_description_defaults} \title{Create or modify a DESCRIPTION file} \usage{ use_description(fields = list(), check_name = TRUE, roxygen = TRUE) use_description_defaults(package = NULL, roxygen = TRUE, fields = list()) } \arguments{ \item{fields}{A named list of fields to add to \code{DESCRIPTION}, potentially overriding default values. See \code{\link[=use_description]{use_description()}} for how you can set personalized defaults using package options.} \item{check_name}{Whether to check if the name is valid for CRAN and throw an error if not.} \item{roxygen}{If \code{TRUE}, sets \code{RoxygenNote} to current roxygen2 version} \item{package}{Package name} } \description{ \code{use_description()} creates a \code{DESCRIPTION} file. Although mostly associated with R packages, a \code{DESCRIPTION} file can also be used to declare dependencies for a non-package project. Within such a project, \code{devtools::install_deps()} can then be used to install all the required packages. Note that, by default, \code{use_decription()} checks for a CRAN-compliant package name. You can turn this off with \code{check_name = FALSE}. usethis consults the following sources, in this order, to set \code{DESCRIPTION} fields: \itemize{ \item \code{fields} argument of \code{\link[=create_package]{create_package()}} or \code{\link[=use_description]{use_description()}} \item \code{getOption("usethis.description")} \item Defaults built into usethis } The fields discovered via options or the usethis package can be viewed with \code{use_description_defaults()}. If you create a lot of packages, consider storing personalized defaults as a named list in an option named \code{"usethis.description"}. Here's an example of code to include in \code{.Rprofile}, which can be opened via \code{\link[=edit_r_profile]{edit_r_profile()}}:\preformatted{options( usethis.description = list( `Authors@R` = 'person("Jane", "Doe", email = "jane@example.com", role = c("aut", "cre"), comment = c(ORCID = "YOUR-ORCID-ID"))', License = "MIT + file LICENSE", Language = "es" ) ) } Prior to usethis v2.0.0, \code{getOption("devtools.desc")} was consulted for backwards compatibility, but now only the \code{"usethis.description"} option is supported. } \examples{ \dontrun{ use_description() use_description(fields = list(Language = "es")) use_description_defaults() } } \seealso{ The \href{https://r-pkgs.org/description.html}{description chapter} of \href{https://r-pkgs.org}{R Packages} } usethis/man/usethis-defunct.Rd0000644000175000017500000000372114131645451016253 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-defunct.R \name{usethis-defunct} \alias{usethis-defunct} \alias{pr_pull_upstream} \alias{pr_sync} \alias{browse_github_token} \alias{browse_github_pat} \alias{github_token} \alias{git_branch_default} \alias{use_tidy_labels} \alias{use_tidy_ci} \alias{use_github_action_check_full} \title{Defunct and deprecated functions in usethis} \usage{ pr_pull_upstream() pr_sync() browse_github_token(...) browse_github_pat(...) github_token() git_branch_default() use_tidy_labels() use_tidy_ci(...) use_github_action_check_full( save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE, repo_spec = NULL ) } \description{ These functions have either been deprecated or removed from usethis. } \section{\code{pr_pull_upstream()}}{ This function has been replaced by \code{\link[=pr_merge_main]{pr_merge_main()}}. } \section{\code{pr_sync()}}{ Bundling these operations together did not seem justified, in terms of how rarely this comes up and, when it does, how likely merge conflicts are. Users of \code{pr_sync()} should implement these steps "by hand": \itemize{ \item (Check you are on a PR branch) \item \code{pr_pull()} \item \code{pr_merge_main()}, deal with any merge conflicts, if any \item \code{pr_push()} } } \section{\code{browse_github_token()}, \code{browse_github_pat()}}{ These functions have been replaced by \code{\link[=create_github_token]{create_github_token()}}. } \section{\code{github_token()}}{ All implicit and explicit token discovery routes through \code{\link[gh:gh_token]{gh::gh_token()}} now. } \section{\code{git_branch_default()}}{ Please call \code{\link[=git_default_branch]{git_default_branch()}} instead. In hindsight, that is a better name for this function. } \section{\code{use_tidy_labels()}}{ Please call \code{\link[=use_tidy_github_labels]{use_tidy_github_labels()}} instead. In hindsight, that is a better name for this function. } \keyword{internal} usethis/man/use_github.Rd0000644000175000017500000001056614153502006015274 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github.R \name{use_github} \alias{use_github} \title{Connect a local repo with GitHub} \usage{ use_github( organisation = NULL, private = FALSE, visibility = c("public", "private", "internal"), protocol = git_protocol(), host = NULL, auth_token = deprecated(), credentials = deprecated() ) } \arguments{ \item{organisation}{If supplied, the repo will be created under this organisation, instead of the login associated with the GitHub token discovered for this \code{host}. The user's role and the token's scopes must be such that you have permission to create repositories in this \code{organisation}.} \item{private}{If \code{TRUE}, creates a private repository.} \item{visibility}{Only relevant for organisation-owned repos associated with certain GitHub Enterprise products. The special "internal" \code{visibility} grants read permission to all organisation members, i.e. it's intermediate between "private" and "public", within GHE. When specified, \code{visibility} takes precedence over \code{private = TRUE/FALSE}.} \item{protocol}{One of "https" or "ssh"} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} \item{auth_token, credentials}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: No longer consulted now that usethis uses the gert package for Git operations, instead of git2r; gert relies on the credentials package for auth. The API requests are now authorized with the token associated with the \code{host}, as retrieved by \code{\link[gh:gh_token]{gh::gh_token()}}.} } \description{ \code{use_github()} takes a local project and: \itemize{ \item Checks that the initial state is good to go: \itemize{ \item Project is already a Git repo \item Current branch is the default branch, e.g. \code{main} or \code{master} \item No uncommitted changes \item No pre-existing \code{origin} remote } \item Creates an associated repo on GitHub \item Adds that GitHub repo to your local repo as the \code{origin} remote \item Makes an initial push to GitHub \item Calls \code{\link[=use_github_links]{use_github_links()}}, if the project is an R package \item Configures \code{origin/DEFAULT} to be the upstream branch of the local \code{DEFAULT} branch, e.g. \code{main} or \code{master} } See below for the authentication setup that is necessary for all of this to work. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \examples{ \dontrun{ pkgpath <- file.path(tempdir(), "testpkg") create_package(pkgpath) ## now, working inside "testpkg", initialize git repository use_git() ## create github repository and configure as git remote use_github() } } usethis/man/use_import_from.Rd0000644000175000017500000000216414117743363016357 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/use_import_from.R \name{use_import_from} \alias{use_import_from} \title{Import a function from another package} \usage{ use_import_from(package, fun, load = is_interactive()) } \arguments{ \item{package}{Package name} \item{fun}{A vector of function names} \item{load}{Logical. Re-load with \code{\link[pkgload:load_all]{pkgload::load_all()}}?} } \value{ Invisibly, \code{TRUE} if the package document has changed, \code{FALSE} if not. } \description{ \code{use_import_from()} imports a function from another package by adding the roxygen2 \verb{@importFrom} tag to the package-level documentation (which can be created with \code{\link[=use_package_doc]{use_package_doc()}}). Importing a function from another package allows you to refer to it without a namespace (e.g., \code{fun()} instead of \code{package::fun()}). \code{use_import_from()} also re-documents the NAMESPACE, and re-load the current package. This ensures that \code{fun} is immediately available in your development session. } \examples{ \dontrun{ use_import_from("usethis", "ui_todo") } } usethis/man/use_spell_check.Rd0000644000175000017500000000175713737204645016307 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spelling.R \name{use_spell_check} \alias{use_spell_check} \title{Use spell check} \usage{ use_spell_check(vignettes = TRUE, lang = "en-US", error = FALSE) } \arguments{ \item{vignettes}{Logical, \code{TRUE} to spell check all \code{rmd} and \code{rnw} files in the \verb{vignettes/} folder.} \item{lang}{Preferred spelling language. Usually either \code{"en-US"} or \code{"en-GB"}.} \item{error}{Logical, indicating whether the unit test should fail if spelling errors are found. Defaults to \code{FALSE}, which does not error, but prints potential spelling errors} } \description{ Adds a unit test to automatically run a spell check on documentation and, optionally, vignettes during \verb{R CMD check}, using the \link[spelling:spell_check_package]{spelling} package. Also adds a \code{WORDLIST} file to the package, which is a dictionary of whitelisted words. See \link[spelling:wordlist]{spelling::wordlist} for details. } usethis/man/use_rcpp.Rd0000644000175000017500000000200213737204645014757 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rcpp.R \name{use_rcpp} \alias{use_rcpp} \alias{use_rcpp_armadillo} \alias{use_rcpp_eigen} \alias{use_c} \title{Use C, C++, RcppArmadillo, or RcppEigen} \usage{ use_rcpp(name = NULL) use_rcpp_armadillo(name = NULL) use_rcpp_eigen(name = NULL) use_c(name = NULL) } \arguments{ \item{name}{If supplied, creates and opens \verb{src/name.\{c,cpp\}}.} } \description{ Adds infrastructure commonly needed when using compiled code: \itemize{ \item Creates \verb{src/} \item Adds required packages to \code{DESCRIPTION} \item May create an initial placeholder \code{.c} or \code{.cpp} file \item Creates \code{Makevars} and \code{Makevars.win} files (\code{use_rcpp_armadillo()} only) } } \details{ When using compiled code, please note that there must be at least one file inside the \verb{src/} directory prior to building the package. As a result, if an empty \verb{src/} directory is detected, either a \code{.c} or \code{.cpp} file will be added. } usethis/man/git_sitrep.Rd0000644000175000017500000000067214117743363015321 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_sitrep} \alias{git_sitrep} \title{Git/GitHub sitrep} \usage{ git_sitrep() } \description{ Get a situation report on your current Git/GitHub status. Useful for diagnosing problems. \code{\link[=git_vaccinate]{git_vaccinate()}} adds some basic R- and RStudio-related entries to the user-level git ignore file. } \examples{ \dontrun{ git_sitrep() } } usethis/man/git_vaccinate.Rd0000644000175000017500000000115714131622147015737 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_vaccinate} \alias{git_vaccinate} \title{Vaccinate your global gitignore file} \usage{ git_vaccinate() } \description{ Adds \code{.DS_Store}, \code{.Rproj.user}, \code{.Rdata}, \code{.Rhistory}, and \code{.httr-oauth} to your global (a.k.a. user-level) \code{.gitignore}. This is good practice as it decreases the chance that you will accidentally leak credentials to GitHub. \code{git_vaccinate()} also tries to detect and fix the situation where you have a global gitignore file, but it's missing from your global Git config. } usethis/man/edit.Rd0000644000175000017500000000437314153502006014062 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/edit.R \name{edit} \alias{edit} \alias{edit_r_profile} \alias{edit_r_environ} \alias{edit_r_buildignore} \alias{edit_r_makevars} \alias{edit_rstudio_snippets} \alias{edit_rstudio_prefs} \alias{edit_git_config} \alias{edit_git_ignore} \alias{edit_pkgdown_config} \title{Open configuration files} \usage{ edit_r_profile(scope = c("user", "project")) edit_r_environ(scope = c("user", "project")) edit_r_buildignore() edit_r_makevars(scope = c("user", "project")) edit_rstudio_snippets( type = c("r", "markdown", "c_cpp", "css", "html", "java", "javascript", "python", "sql", "stan", "tex") ) edit_rstudio_prefs() edit_git_config(scope = c("user", "project")) edit_git_ignore(scope = c("user", "project")) edit_pkgdown_config() } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} \item{type}{Snippet type (case insensitive text).} } \value{ Path to the file, invisibly. } \description{ \itemize{ \item \code{edit_r_profile()} opens \code{.Rprofile} \item \code{edit_r_environ()} opens \code{.Renviron} \item \code{edit_r_makevars()} opens \code{.R/Makevars} \item \code{edit_git_config()} opens \code{.gitconfig} or \code{.git/config} \item \code{edit_git_ignore()} opens global (user-level) gitignore file and ensures its path is declared in your global Git config. \item \code{edit_pkgdown_config} opens the pkgdown YAML configuration file for the current Project. \item \code{edit_rstudio_snippets()} opens RStudio's snippet config for the given type. \item \code{edit_rstudio_prefs()} opens RStudio's preference file. } } \details{ The \verb{edit_r_*()} functions consult R's notion of user's home directory. The \verb{edit_git_*()} functions (and \pkg{usethis} in general) inherit home directory behaviour from the \pkg{fs} package, which differs from R itself on Windows. The \pkg{fs} default is more conventional in terms of the location of user-level Git config files. See \code{\link[fs:path_expand]{fs::path_home()}} for more details. Files created by \code{edit_rstudio_snippets()} will \emph{mask}, not supplement, the built-in default snippets. If you like the built-in snippets, copy them and include with your custom snippets. } usethis/man/use_revdep.Rd0000644000175000017500000000120613676400413015276 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/revdep.R \name{use_revdep} \alias{use_revdep} \title{Reverse dependency checks} \usage{ use_revdep() } \description{ Performs set up for checking the reverse dependencies of an R package, as implemented by the revdepcheck package: \itemize{ \item Adds \code{revdep} directory and adds it to \code{.Rbuildignore} \item Populates \code{revdep/.gitignore} to prevent tracking of various revdep artefacts \item Creates \code{revdep/email.yml} for use with \code{revdepcheck::revdep_email()} \item Prompts user to run the checks with \code{revdepcheck::revdep_check()} } } usethis/man/use_addin.Rd0000644000175000017500000000122213762553000015063 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/addin.R \name{use_addin} \alias{use_addin} \title{Add minimal RStudio Addin binding} \usage{ use_addin(addin = "new_addin", open = rlang::is_interactive()) } \arguments{ \item{addin}{Name of the addin function, which should be defined in the \code{R} folder.} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ This function helps you add a minimal \href{https://rstudio.github.io/rstudioaddins/}{RStudio Addin} binding to \code{inst/rstudio/addins.dcf}. } usethis/man/use_git_remote.Rd0000644000175000017500000000410413737204645016156 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{use_git_remote} \alias{use_git_remote} \alias{git_remotes} \title{Configure and report Git remotes} \usage{ use_git_remote(name = "origin", url, overwrite = FALSE) git_remotes() } \arguments{ \item{name}{A string giving the short name of a remote.} \item{url}{A string giving the url of a remote.} \item{overwrite}{Logical. Controls whether an existing remote can be modified.} } \value{ Named list of Git remotes. } \description{ Two helpers are available: \itemize{ \item \code{use_git_remote()} sets the remote associated with \code{name} to \code{url}. \item \code{git_remotes()} reports the configured remotes, similar to \verb{git remote -v}. } } \examples{ \dontrun{ # see current remotes git_remotes() # add new remote named 'foo', a la `git remote add ` use_git_remote(name = "foo", url = "https://github.com//.git") # remove existing 'foo' remote, a la `git remote remove ` use_git_remote(name = "foo", url = NULL, overwrite = TRUE) # change URL of remote 'foo', a la `git remote set-url ` use_git_remote( name = "foo", url = "https://github.com//.git", overwrite = TRUE ) # Scenario: Fix remotes when you cloned someone's repo, but you should # have fork-and-cloned (in order to make a pull request). # Store origin = main repo's URL, e.g., "git@github.com:/.git" upstream_url <- git_remotes()[["origin"]] # IN THE BROWSER: fork the main GitHub repo and get your fork's remote URL my_url <- "git@github.com:/.git" # Rotate the remotes use_git_remote(name = "origin", url = my_url) use_git_remote(name = "upstream", url = upstream_url) git_remotes() # Scenario: Add upstream remote to a repo that you fork-and-cloned, so you # can pull upstream changes. # Note: If you fork-and-clone via `usethis::create_from_github()`, this is # done automatically! # Get URL of main GitHub repo, probably in the browser upstream_url <- "git@github.com:/.git" use_git_remote(name = "upstream", url = upstream_url) } } usethis/man/use_cran_comments.Rd0000644000175000017500000000145413737204645016655 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cran.R \name{use_cran_comments} \alias{use_cran_comments} \title{CRAN submission comments} \usage{ use_cran_comments(open = rlang::is_interactive()) } \arguments{ \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} } \description{ Creates \code{cran-comments.md}, a template for your communications with CRAN when submitting a package. The goal is to clearly communicate the steps you have taken to check your package on a wide range of operating systems. If you are submitting an update to a package that is used by other packages, you also need to summarize the results of your \link[=use_revdep]{reverse dependency checks}. } usethis/man/use_lifecycle.Rd0000644000175000017500000000133714131622147015752 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lifecycle.R \name{use_lifecycle} \alias{use_lifecycle} \title{Use lifecycle badges} \usage{ use_lifecycle() } \description{ This helper: \itemize{ \item Adds lifecycle as a dependency. \item Imports \code{\link[lifecycle:deprecated]{lifecycle::deprecated()}} for use in function arguments. \item Copies the lifecycle badges into \code{man/figures}. \item Reminds you how to use the badge syntax. } Learn more at \url{https://lifecycle.r-lib.org/articles/communicate.html} } \seealso{ \code{\link[=use_lifecycle_badge]{use_lifecycle_badge()}} to signal the \href{https://lifecycle.r-lib.org/articles/stages.html}{lifecycle stage} of your package as whole } usethis/man/use_build_ignore.Rd0000644000175000017500000000201713737204645016463 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ignore.R \name{use_build_ignore} \alias{use_build_ignore} \title{Add files to \code{.Rbuildignore}} \usage{ use_build_ignore(files, escape = TRUE) } \arguments{ \item{files}{Character vector of path names.} \item{escape}{If \code{TRUE}, the default, will escape \code{.} to \verb{\\\\.} and surround with \code{^} and \code{$}.} } \description{ \code{.Rbuildignore} has a regular expression on each line, but it's usually easier to work with specific file names. By default, \code{use_build_ignore()} will (crudely) turn a filename into a regular expression that will only match that path. Repeated entries will be silently removed. \code{use_build_ignore()} is designed to ignore \emph{individual} files. If you want to ignore \emph{all} files with a given extension, consider providing an "as-is" regular expression, using \code{escape = FALSE}; see examples. } \examples{ \dontrun{ # ignore all Excel files use_build_ignore("[.]xlsx$", escape = FALSE) } } usethis/man/use_logo.Rd0000644000175000017500000000154113676400413014753 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/logo.R \name{use_logo} \alias{use_logo} \title{Use a package logo} \usage{ use_logo(img, geometry = "240x278", retina = TRUE) } \arguments{ \item{img}{The path to an existing image file} \item{geometry}{a \link[magick:geometry]{magick::geometry} string specifying size. The default assumes that you have a hex logo using spec from \url{http://hexb.in/sticker.html}.} \item{retina}{\code{TRUE}, the default, scales the image on the README, assuming that geometry is double the desired size.} } \description{ This function helps you use a logo in your package: \itemize{ \item Enforces a specific size \item Stores logo image file at \code{man/figures/logo.png} \item Produces the markdown text you need in README to include the logo } } \examples{ \dontrun{ use_logo("usethis.png") } } usethis/man/use_blank_slate.Rd0000644000175000017500000000155714117743363016306 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rstudio.R \name{use_blank_slate} \alias{use_blank_slate} \title{Don't save/load user workspace between sessions} \usage{ use_blank_slate(scope = c("user", "project")) } \arguments{ \item{scope}{Edit globally for the current \strong{user}, or locally for the current \strong{project}} } \description{ R can save and reload the user's workspace between sessions via an \code{.RData} file in the current directory. However, long-term reproducibility is enhanced when you turn this feature off and clear R's memory at every restart. Starting with a blank slate provides timely feedback that encourages the development of scripts that are complete and self-contained. More detail can be found in the blog post \href{https://www.tidyverse.org/blog/2017/12/workflow-vs-script/}{Project-oriented workflow}. } usethis/man/use_citation.Rd0000644000175000017500000000044213676400413015624 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/citation.R \name{use_citation} \alias{use_citation} \title{Create a CITATION template} \usage{ use_citation() } \description{ Use this if you want to encourage users of your package to cite an article or book. } usethis/man/issue-this.Rd0000644000175000017500000000255414131645451015241 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/issue.R \name{issue-this} \alias{issue-this} \alias{issue_close_community} \alias{issue_reprex_needed} \title{Helpers for GitHub issues} \usage{ issue_close_community(number, reprex = FALSE) issue_reprex_needed(number) } \arguments{ \item{number}{Issue number} \item{reprex}{Does the issue also need a reprex?} } \description{ The \verb{issue_*} family of functions allows you to perform common operations on GitHub issues from within R. They're designed to help you efficiently deal with large numbers of issues, particularly motivated by the challenges faced by the tidyverse team. \itemize{ \item \code{issue_close_community()} closes an issue, because it's not a bug report or feature request, and points the author towards RStudio Community as a better place to discuss usage (\url{https://community.rstudio.com}). \item \code{issue_reprex_needed()} labels the issue with the "reprex" label and gives the author some advice about what is needed. } } \section{Saved replies}{ Unlike GitHub's "saved replies", these functions can: \itemize{ \item Be shared between people \item Perform other actions, like labelling, or closing \item Have additional arguments \item Include randomness (like friendly gifs) } } \examples{ \dontrun{ issue_close_community(12, reprex = TRUE) issue_reprex_needed(241) } } usethis/man/roxygen/0000755000175000017500000000000014117743363014347 5ustar nileshnileshusethis/man/roxygen/templates/0000755000175000017500000000000014117743363016345 5ustar nileshnileshusethis/man/roxygen/templates/double-auth.R0000644000175000017500000000257214117743363020707 0ustar nileshnilesh#' @section Git/GitHub Authentication: #' Many usethis functions, including those documented here, potentially interact #' with GitHub in two different ways: #' * Via the GitHub REST API. Examples: create a repo, a fork, or a pull #' request. #' * As a conventional Git remote. Examples: clone, fetch, or push. #' #' Therefore two types of auth can happen and your credentials must be #' discoverable. Which credentials do we mean? #' #' * A GitHub personal access token (PAT) must be discoverable by the gh #' package, which is used for GitHub operations via the REST API. See #' [gh_token_help()] for more about getting and configuring a PAT. #' * If you use the HTTPS protocol for Git remotes, your PAT is also used for #' Git operations, such as `git push`. Usethis uses the gert package for this, #' so the PAT must be discoverable by gert. Generally gert and gh will #' discover and use the same PAT. This ability to "kill two birds with one #' stone" is why HTTPS + PAT is our recommended auth strategy for those new #' to Git and GitHub and PRs. #' * If you use SSH remotes, your SSH keys must also be discoverable, in #' addition to your PAT. The public key must be added to your GitHub account. #' #' Git/GitHub credential management is covered in a dedicated article: #' [Managing Git(Hub) Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) usethis/man/edit_file.Rd0000644000175000017500000000203614117743363015070 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/edit.R \name{edit_file} \alias{edit_file} \alias{edit_template} \title{Open file for editing} \usage{ edit_file(path, open = rlang::is_interactive()) edit_template(template = NULL, open = rlang::is_interactive()) } \arguments{ \item{path}{Path to target file.} \item{open}{Whether to open the file for interactive editing.} \item{template}{The target template file. If not specified, existing template files are offered for interactive selection.} } \value{ Target path, invisibly. } \description{ Opens a file for editing in RStudio, if that is the active environment, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise. If the file does not exist, it is created. If the parent directory does not exist, it is also created. \code{edit_template()} specifically opens templates in \code{inst/templates} for use with \code{\link[=use_template]{use_template()}}. } \examples{ \dontrun{ edit_file("DESCRIPTION") edit_file("~/.gitconfig") } } \keyword{internal} usethis/man/use_version.Rd0000644000175000017500000000230714117743363015506 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/version.R \name{use_version} \alias{use_version} \alias{use_dev_version} \title{Increment package version} \usage{ use_version(which = NULL) use_dev_version() } \arguments{ \item{which}{A string specifying which level to increment, one of: "major", "minor", "patch", "dev". If \code{NULL}, user can choose interactively.} } \description{ \code{use_version()} increments the "Version" field in \code{DESCRIPTION}, adds a new heading to \code{NEWS.md} (if it exists), and commits those changes (if package uses Git). It makes the same update to a line like \code{PKG_version = "x.y.z";} in \code{src/version.c} (if it exists). \code{use_dev_version()} increments to a development version, e.g. from 1.0.0 to 1.0.0.9000. If the existing version is already a development version with four components, it does nothing. Thin wrapper around \code{use_version()}. } \examples{ \dontrun{ ## for interactive selection, do this: use_version() ## request a specific type of increment use_version("minor") use_dev_version() } } \seealso{ The \href{https://r-pkgs.org/description.html#version}{version section} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_vignette.Rd0000644000175000017500000000230414117743363015643 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/vignette.R \name{use_vignette} \alias{use_vignette} \alias{use_article} \title{Create a vignette or article} \usage{ use_vignette(name, title = name) use_article(name, title = name) } \arguments{ \item{name}{Base for file name to use for new vignette. Should consist only of numbers, letters, \verb{_} and \code{-}. Lower case is recommended.} \item{title}{The title of the vignette.} } \description{ Creates a new vignette or article in \verb{vignettes/}. Articles are a special type of vignette that appear on pkgdown websites, but are not included in the package itself (because they are added to \code{.Rbuildignore} automatically). } \section{General setup}{ \itemize{ \item Adds needed packages to \code{DESCRIPTION}. \item Adds \code{inst/doc} to \code{.gitignore} so built vignettes aren't tracked. \item Adds \verb{vignettes/*.html} and \verb{vignettes/*.R} to \code{.gitignore} so you never accidentally track rendered vignettes. } } \examples{ \dontrun{ use_vignette("how-to-do-stuff", "How to do stuff") } } \seealso{ The \href{https://r-pkgs.org/vignettes.html}{vignettes chapter} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/use_jenkins.Rd0000644000175000017500000000120413764577255015471 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/jenkins.R \name{use_jenkins} \alias{use_jenkins} \title{Create Jenkinsfile for Jenkins CI Pipelines} \usage{ use_jenkins() } \description{ \code{use_jenkins()} adds a basic Jenkinsfile for R packages to the project root directory. The Jenkinsfile stages take advantage of calls to \code{make}, and so calling this function will also run \code{use_make()} if a Makefile does not already exist at the project root. } \seealso{ The \href{https://www.jenkins.io/doc/book/pipeline/jenkinsfile/}{documentation on Jenkins Pipelines}. \code{\link[=use_make]{use_make()}} } usethis/man/proj_sitrep.Rd0000644000175000017500000000157213737204645015512 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sitrep.R \name{proj_sitrep} \alias{proj_sitrep} \title{Report working directory and usethis/RStudio project} \usage{ proj_sitrep() } \value{ A named list, with S3 class \code{sitrep} (for printing purposes), reporting current working directory, active usethis project, and active RStudio Project } \description{ \code{proj_sitrep()} reports \itemize{ \item current working directory \item the active usethis project \item the active RStudio Project } Call this function if things seem weird and you're not sure what's wrong or how to fix it. Usually, all three of these should coincide (or be unset) and \code{proj_sitrep()} provides suggested commands for getting back to this happy state. } \examples{ proj_sitrep() } \seealso{ Other project functions: \code{\link{proj_utils}} } \concept{project functions} usethis/man/use_release_issue.Rd0000644000175000017500000000244314131645451016645 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/release.R \name{use_release_issue} \alias{use_release_issue} \title{Create a release checklist in a GitHub issue} \usage{ use_release_issue(version = NULL) } \arguments{ \item{version}{Optional version number for release. If unspecified, you can make an interactive choice.} } \description{ When preparing to release a package to CRAN there are quite a few steps that need to be performed, and some of the steps can take multiple hours. This function creates a checklist in a GitHub issue to: \itemize{ \item Help you keep track of where you are in the process \item Feel a sense of satisfaction as you progress towards final submission \item Help watchers of your package stay informed. } The checklist contains a generic set of steps that we've found to be helpful, based on the type of release ("patch", "minor", or "major"). You're encouraged to edit the issue to customize this list to meet your needs. If you want to consistently add extra bullets for every release, you can include your own custom bullets by providing a (unexported) a \code{release_bullets()} function that returns a character vector. (For historical reasons, \code{release_questions()} is also supported). } \examples{ \dontrun{ use_release_issue("2.0.0") } } usethis/man/usethis-package.Rd0000644000175000017500000000225614132450375016220 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/usethis-package.R \docType{package} \name{usethis-package} \alias{usethis} \alias{usethis-package} \title{usethis: Automate Package and Project Setup} \description{ \if{html}{\figure{logo.png}{options: align='right' alt='logo' width='120'}} Automate package and project setup tasks that are otherwise performed manually. This includes setting up unit testing, test coverage, continuous integration, Git, 'GitHub', licenses, 'Rcpp', 'RStudio' projects, and more. } \seealso{ Useful links: \itemize{ \item \url{https://usethis.r-lib.org} \item \url{https://github.com/r-lib/usethis} \item Report bugs at \url{https://github.com/r-lib/usethis/issues} } } \author{ \strong{Maintainer}: Jennifer Bryan \email{jenny@rstudio.com} (\href{https://orcid.org/0000-0002-6983-2759}{ORCID}) Authors: \itemize{ \item Hadley Wickham \email{hadley@rstudio.com} (\href{https://orcid.org/0000-0003-4757-117X}{ORCID}) \item Malcolm Barrett \email{malcolmbarrett@gmail.com} (\href{https://orcid.org/0000-0003-0299-5825}{ORCID}) } Other contributors: \itemize{ \item RStudio [copyright holder, funder] } } \keyword{internal} usethis/man/use_template.Rd0000644000175000017500000000423213737204645015635 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/template.R \name{use_template} \alias{use_template} \title{Use a usethis-style template} \usage{ use_template( template, save_as = template, data = list(), ignore = FALSE, open = FALSE, package = "usethis" ) } \arguments{ \item{template}{Path to template file relative to \verb{templates/} directory within \code{package}; see details.} \item{save_as}{Path of file to create, relative to root of active project. Defaults to \code{template}} \item{data}{A list of data passed to the template.} \item{ignore}{Should the newly created file be added to \code{.Rbuildignore}?} \item{open}{Open the newly created file for editing? Happens in RStudio, if applicable, or via \code{\link[utils:file.edit]{utils::file.edit()}} otherwise.} \item{package}{Name of the package where the template is found.} } \value{ A logical vector indicating if file was modified. } \description{ Creates a file from data and a template found in a package. Provides control over file name, the addition to \code{.Rbuildignore}, and opening the file for inspection. } \details{ This function can be used as the engine for a templating function in other packages. The \code{template} argument is used along with the \code{package} argument to derive the path to your template file; it will be expected at \code{fs::path_package(package = package, "templates", template)}. We use \code{fs::path_package()} instead of \code{base::system.file()} so that path construction works even in a development workflow, e.g., works with \code{devtools::load_all()} or \code{pkgload::load_all()}. \emph{Note this describes the behaviour of \code{fs::path_package()} in fs v1.2.7.9001 and higher.} To interpolate your data into the template, supply a list using the \code{data} argument. Internally, this function uses \code{\link[whisker:whisker.render]{whisker::whisker.render()}} to combine your template file with your data. } \examples{ \dontrun{ # Note: running this will write `NEWS.md` to your working directory use_template( template = "NEWS.md", data = list(Package = "acme", Version = "1.2.3"), package = "usethis" ) } } usethis/man/licenses.Rd0000644000175000017500000000575514117743363014764 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/license.R \name{licenses} \alias{use_mit_license} \alias{use_gpl_license} \alias{use_agpl_license} \alias{use_lgpl_license} \alias{use_apache_license} \alias{use_cc0_license} \alias{use_ccby_license} \alias{use_proprietary_license} \alias{use_gpl3_license} \alias{use_agpl3_license} \alias{use_apl2_license} \title{License a package} \usage{ use_mit_license(copyright_holder = NULL) use_gpl_license(version = 3, include_future = TRUE) use_agpl_license(version = 3, include_future = TRUE) use_lgpl_license(version = 3, include_future = TRUE) use_apache_license(version = 2, include_future = TRUE) use_cc0_license() use_ccby_license() use_proprietary_license(copyright_holder) } \arguments{ \item{copyright_holder}{Name of the copyright holder or holders. This defaults to "{package name} authors"; you should only change this if you use a CLA to assign copyright to a single entity.} \item{version}{License version. This defaults to latest version all licenses.} \item{include_future}{If \code{TRUE}, will license your package under the current and any potential future versions of the license. This is generally considered to be good practice because it means your package will automatically include "bug" fixes in licenses.} } \description{ Adds the necessary infrastructure to declare your package as licensed with one of these popular open source licenses: Permissive: \itemize{ \item \href{https://choosealicense.com/licenses/mit/}{MIT}: simple and permissive. \item \href{https://choosealicense.com/licenses/apache-2.0/}{Apache 2.0}: MIT + provides patent protection. } Copyleft: \itemize{ \item \href{https://choosealicense.com/licenses/gpl-2.0/}{GPL v2}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/gpl-3.0/}{GPL v3}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/agpl-3.0/}{AGPL v3}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/lgpl-2.1/}{LGPL v2.1}: requires sharing of improvements. \item \href{https://choosealicense.com/licenses/lgpl-3.0/}{LGPL v3}: requires sharing of improvements. } Creative commons licenses appropriate for data packages: \itemize{ \item \href{https://creativecommons.org/publicdomain/zero/1.0/}{CC0}: dedicated to public domain. \item \href{https://creativecommons.org/licenses/by/4.0/}{CC-BY}: Free to share and adapt, must give appropriate credit. } See \url{https://choosealicense.com} for more details and other options. Alternatively, for code that you don't want to share with others, \code{use_proprietary_license()} makes it clear that all rights are reserved, and the code is not open source. } \details{ CRAN does not permit you to include copies of standard licenses in your package, so these functions save the license as \code{LICENSE.md} and add it to \code{.Rbuildignore}. } \seealso{ For more details, refer to the the \href{https://r-pkgs.org/license.html}{license chapter} in \emph{R Packages}. } usethis/man/use_course_details.Rd0000644000175000017500000001426714153502006017021 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/course.R \name{use_course_details} \alias{use_course_details} \alias{create_download_url} \title{Helpers to download and unpack a ZIP file} \usage{ create_download_url(url) } \arguments{ \item{url}{a GitHub, DropBox, or Google Drive URL, as copied from a web browser.} \item{destdir}{Path to existing local directory where the ZIP file will be stored. Defaults to current working directory, but note that \code{\link[=use_course]{use_course()}} has different default behavior.} \item{zipfile}{Path to local ZIP file.} } \description{ Details on the internal and helper functions that power \code{\link[=use_course]{use_course()}} and \code{\link[=use_zip]{use_zip()}}. Only \code{create_download_url()} is exported. } \section{tidy_download()}{ \preformatted{## function signature tidy_download(url, destdir = getwd()) # as called inside use_course() tidy_download( url, ## after post-processing with normalize_url() # conspicuous_place() = `getOption('usethis.destdir')` or desktop or home # directory or working directory destdir = destdir \%||\% conspicuous_place() ) } Special-purpose function to download a ZIP file and automatically determine the file name, which often determines the folder name after unpacking. Developed with DropBox and GitHub as primary targets, possibly via shortlinks. Both platforms offer a way to download an entire folder or repo as a ZIP file, with information about the original folder or repo transmitted in the \code{Content-Disposition} header. In the absence of this header, a filename is generated from the input URL. In either case, the filename is sanitized. Returns the path to downloaded ZIP file, invisibly. \code{tidy_download()} is setup to retry after a download failure. In an interactive session, it asks for user's consent. All retries use a longer connect timeout. \subsection{DropBox}{ To make a folder available for ZIP download, create a shared link for it: \itemize{ \item \url{https://help.dropbox.com/files-folders/share/view-only-access} } A shared link will have this form:\preformatted{https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0 } Replace the \code{dl=0} at the end with \code{dl=1} to create a download link:\preformatted{https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1 } You can use \code{create_download_url()} to do this conversion. This download link (or a shortlink that points to it) is suitable as input for \code{tidy_download()}. After one or more redirections, this link will eventually lead to a download URL. For more details, see \url{https://help.dropbox.com/files-folders/share/force-download} and \url{https://help.dropbox.com/installs-integrations/sync-uploads/download-entire-folders}. } \subsection{GitHub}{ Click on the repo's "Clone or download" button, to reveal a "Download ZIP" button. Capture this URL, which will have this form:\preformatted{https://github.com/r-lib/usethis/archive/main.zip } This download link (or a shortlink that points to it) is suitable as input for \code{tidy_download()}. After one or more redirections, this link will eventually lead to a download URL. Here are other links that also lead to ZIP download, albeit with a different filenaming scheme (REF could be a branch name, a tag, or a SHA):\preformatted{https://github.com/github.com/r-lib/usethis/zipball/HEAD https://api.github.com/repos/r-lib/rematch2/zipball/REF https://api.github.com/repos/r-lib/rematch2/zipball/HEAD https://api.github.com/repos/r-lib/usethis/zipball/REF } You can use \code{create_download_url()} to create the "Download ZIP" URL from a typical GitHub browser URL. } \subsection{Google Drive}{ To our knowledge, it is not possible to download a Google Drive folder as a ZIP archive. It is however possible to share a ZIP file stored on Google Drive. To get its URL, click on "Get the shareable link" (within the "Share" menu). This URL doesn't allow for direct download, as it's designed to be processed in a web browser first. Such a sharing link looks like:\preformatted{https://drive.google.com/open?id=123456789xxyyyzzz } To be able to get the URL suitable for direct download, you need to extract the "id" element from the URL and include it in this URL format:\preformatted{https://drive.google.com/uc?export=download&id=123456789xxyyyzzz } Use \code{create_download_url()} to perform this transformation automatically. } } \section{tidy_unzip()}{ Special-purpose function to unpack a ZIP file and (attempt to) create the directory structure most people want. When unpacking an archive, it is easy to get one more or one less level of nesting than you expected. It's especially important to finesse the directory structure here: we want the same local result when unzipping the same content from either GitHub or DropBox ZIP files, which pack things differently. Here is the intent: \itemize{ \item If the ZIP archive \code{foo.zip} does not contain a single top-level directory, i.e. it is packed as "loose parts", unzip into a directory named \code{foo}. Typical of DropBox ZIP files. \item If the ZIP archive \code{foo.zip} has a single top-level directory (which, by the way, is not necessarily called "foo"), unpack into said directory. Typical of GitHub ZIP files. } Returns path to the directory holding the unpacked files, invisibly. \strong{DropBox:} The ZIP files produced by DropBox are special. The file list tends to contain a spurious directory \code{"/"}, which we ignore during unzip. Also, if the directory is a Git repo and/or RStudio Project, we unzip-ignore various hidden files, such as \code{.RData}, \code{.Rhistory}, and those below \verb{.git/} and \code{.Rproj.user}. } \examples{ \dontrun{ tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") } \dontrun{ tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") tidy_unzip("rematch2-main.zip") } # GitHub create_download_url("https://github.com/r-lib/usethis") create_download_url("https://github.com/r-lib/usethis/issues") # DropBox create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0") # Google Drive create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz") create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz/view") } \keyword{internal} usethis/man/pull-requests.Rd0000644000175000017500000002373014131645451015770 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pr.R \name{pull-requests} \alias{pull-requests} \alias{pr_init} \alias{pr_resume} \alias{pr_fetch} \alias{pr_push} \alias{pr_pull} \alias{pr_merge_main} \alias{pr_view} \alias{pr_pause} \alias{pr_finish} \alias{pr_forget} \title{Helpers for GitHub pull requests} \usage{ pr_init(branch) pr_resume(branch = NULL) pr_fetch(number = NULL, target = c("source", "primary")) pr_push() pr_pull() pr_merge_main() pr_view(number = NULL, target = c("source", "primary")) pr_pause() pr_finish(number = NULL, target = c("source", "primary")) pr_forget() } \arguments{ \item{branch}{Name of a new or existing local branch. If creating a new branch, note this should usually consist of lower case letters, numbers, and \code{-}.} \item{number}{Number of PR.} \item{target}{Which repo to target? This is only a question in the case of a fork. In a fork, there is some slim chance that you want to consider pull requests against your fork (the primary repo, i.e. \code{origin}) instead of those against the source repo (i.e. \code{upstream}, which is the default).} } \description{ The \verb{pr_*} family of functions is designed to make working with GitHub pull requests (PRs) as painless as possible for both contributors and package maintainers. To use the \verb{pr_*} functions, your project must be a Git repo and have one of these GitHub remote configurations: \itemize{ \item "ours": You can push to the GitHub remote configured as \code{origin} and it's not a fork. \item "fork": You can push to the GitHub remote configured as \code{origin}, it's a fork, and its parent is configured as \code{upstream}. \code{origin} points to your \strong{personal} copy and \code{upstream} points to the \strong{source repo}. } "Ours" and "fork" are two of several GitHub remote configurations examined in \href{https://happygitwithr.com/common-remote-setups.html}{Common remote setups} in Happy Git and GitHub for the useR. The \href{https://usethis.r-lib.org/articles/articles/pr-functions.html}{Pull Request Helpers} article walks through the process of making a pull request with the \verb{pr_*} functions. The \verb{pr_*} functions also use your Git/GitHub credentials to carry out various remote operations; see below for more about auth. The \verb{pr_*} functions also proactively check for agreement re: the default branch in your local repo and the source repo. See \code{\link[=git_default_branch]{git_default_branch()}} for more. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \section{For contributors}{ To contribute to a package, first use \code{create_from_github("OWNER/REPO")}. This forks the source repository and checks out a local copy. Next use \code{pr_init()} to create a branch for your PR. It is best practice to never make commits to the default branch branch of a fork (usually named \code{main} or \code{master}), because you do not own it. A pull request should always come from a feature branch. It will be much easier to pull upstream changes from the fork parent if you only allow yourself to work in feature branches. It is also much easier for a maintainer to explore and extend your PR if you create a feature branch. Work locally, in your branch, making changes to files, and committing your work. Once you're ready to create the PR, run \code{pr_push()} to push your local branch to GitHub, and open a webpage that lets you initiate the PR (or draft PR). To learn more about the process of making a pull request, read the \href{https://usethis.r-lib.org/articles/articles/pr-functions.html}{Pull Request Helpers} vignette. If you are lucky, your PR will be perfect, and the maintainer will accept it. You can then run \code{pr_finish()} to delete your PR branch. In most cases, however, the maintainer will ask you to make some changes. Make the changes, then run \code{pr_push()} to update your PR. It's also possible that the maintainer will contribute some code to your PR: to get those changes back onto your computer, run \code{pr_pull()}. It can also happen that other changes have occurred in the package since you first created your PR. You might need to merge the default branch (usually named \code{main} or \code{master}) into your PR branch. Do that by running \code{pr_merge_main()}: this makes sure that your PR is compatible with the primary repo's main line of development. Both \code{pr_pull()} and \code{pr_merge_main()} can result in merge conflicts, so be prepared to resolve before continuing. } \section{For maintainers}{ To download a PR locally so that you can experiment with it, run \code{pr_fetch()} and select the PR or, if you already know its number, call \verb{pr_fetch()}. If you make changes, run \code{pr_push()} to push them back to GitHub. After you have merged the PR, run \code{pr_finish()} to delete the local branch and remove the remote associated with the contributor's fork. } \section{Overview of all the functions}{ \itemize{ \item \code{pr_init()}: Does a preparatory pull of the default branch from the source repo, to get a good start point. Creates and checks out a new branch. Nothing is pushed to or created on GitHub (that does not happen until the first time you call \code{pr_push()}). \item \code{pr_resume()}: Resume work on a PR by switching to an existing local branch and pulling any changes from its upstream tracking branch, if it has one. If called with no arguments, up to 9 local branches are offered for interactive selection, with a preference for branches connected to PRs and for branches with recent activity. \item \code{pr_fetch()}: Checks out a PR on the source repo for local exploration. If called with no arguments, up to 9 open PRs are offered for interactive selection. This can cause a new remote to be configured and a new local branch to be created. The local branch is configured to track its remote counterpart. The transport protocol (HTTPS vs SSH) for any new remote is inherited from the remote representing the source repo. \code{pr_fetch()} puts a maintainer in a position where they can push changes into an internal or external PR via \code{pr_push()}. \item \code{pr_push()}: The first time it's called, a PR branch is pushed to GitHub and you're taken to a webpage where a new PR (or draft PR) can be created. This also sets up the local branch to track its remote counterpart. Subsequent calls to \code{pr_push()} make sure the local branch has all the remote changes and, if so, pushes local changes, thereby updating the PR. \item \code{pr_pull()}: Pulls changes from the local branch's remote tracking branch. If a maintainer has extended your PR, this is how you bring those changes back into your local work. \item \code{pr_merge_main()}: Pulls changes from the default branch of the source repo into the current local branch. This can be used when the local branch is the default branch or when it's a PR branch. \item \code{pr_pause()}: Makes sure you're up-to-date with any remote changes in the PR. Then switches back to the default branch and pulls from the source repo. \item \code{pr_view()}: Visits the PR associated with the current branch in the browser (default) or the specific PR identified by \code{number}. (FYI \code{\link[=browse_github_pulls]{browse_github_pulls()}} is a handy way to visit the list of all PRs for the current project.) \item \code{pr_forget()}: Does local clean up when the current branch is an actual or notional PR that you want to abandon. Maybe you initiated it yourself, via \code{pr_init()}, or you used \code{pr_fetch()} to explore a PR from GitHub. Only does \emph{local} operations: does not update or delete any remote branches, nor does it close any PRs. Alerts the user to any uncommitted or unpushed work that is at risk of being lost. If user chooses to proceed, switches back to the default branch, pulls changes from source repo, and deletes local PR branch. Any associated Git remote is deleted, if the "forgotten" PR was the only branch using it. \item \code{pr_finish()}: Does post-PR clean up, but does NOT actually merge or close a PR (maintainer should do this in the browser). If \code{number} is not given, infers the PR from the upstream tracking branch of the current branch. If \code{number} is given, it does not matter whether the PR exists locally. If PR exists locally, alerts the user to uncommitted or unpushed changes, then switches back to the default branch, pulls changes from source repo, and deletes local PR branch. If the PR came from an external fork, any associated Git remote is deleted, provided it's not in use by any other local branches. If the PR has been merged and user has permission, deletes the remote branch (this is the only remote operation that \code{pr_finish()} potentially does). } } \examples{ \dontrun{ pr_fetch(123) } } usethis/man/use_namespace.Rd0000644000175000017500000000132113737204645015752 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/namespace.R \name{use_namespace} \alias{use_namespace} \title{Use a basic \code{NAMESPACE}} \usage{ use_namespace(roxygen = TRUE) } \arguments{ \item{roxygen}{Do you plan to manage \code{NAMESPACE} with roxygen2?} } \description{ If \code{roxygen} is \code{TRUE} generates an empty \code{NAMESPACE} that exports nothing; you'll need to explicitly export functions with \verb{@export}. If \code{roxygen} is \code{FALSE}, generates a default \code{NAMESPACE} that exports all functions except those that start with \code{.}. } \seealso{ The \href{https://r-pkgs.org/namespace.html}{namespace chapter} of \href{https://r-pkgs.org}{R Packages}. } usethis/man/figures/0000755000175000017500000000000014154446735014324 5ustar nileshnileshusethis/man/figures/lifecycle-defunct.svg0000644000175000017500000000170414117743363020430 0ustar nileshnileshlifecyclelifecycledefunctdefunct usethis/man/figures/logo.png0000644000175000017500000006601414140057144015764 0ustar nileshnileshPNG  IHDRX?gAMA a cHRMz&u0`:pQ<bKGD pHYs!7!73XztIMEӬ\jIDATxwx$u:G44&CN`Τ + eYN%w>?Z7؏한:ȟeْ-kESLbs搓fFꪺՍAnF~@#nA ת3|0QkB 6z-4~P hxE-Y 8mަ+W"F[~({][E䭉 f܃c.(8 8Wt-"o-0 pq+<|,0PZg.~0ѤS$/ Reț-ob#n ',1`?_y"&,z˷ᄅx#k@Z$ެhxaqp I@`R?~ E͆7  M9p7ZERJn7B@d4A_hx1:pi_hF-JWgE(1>9I\FU=r xǬP7-oW̸+=EQtwvPeot)l^-s7pE @yY,зc^:4 )圿 !,ɩ$#dsf\NpBP@ *U<)%.DDG{qCA0`tlbUπI ZD^ƨ!4fcJ)QUX4JWW߿`EQ@la*|(SZ"8队%[\7FXoɐR"  I$FU9iqqN<3r{vkU޻KGJmۤGHX%[\dBy1Oo~E%EE2@Owght&r!\jHJ3jmeP0#9=(l)ag&Yl4k)Ktq].n.>M("O˚B(KgmdP8\9ҦorO226FP\YݎcռYco}kU[9]1yaDG.ˎn\.}Yr!Dt/SYsɳ07hFW4)%ݕKݙ)X&.'n*yP гPC83od~j;ҋ/r=rI"$?.\su5Xˁ[J2 C#͔-~>WKX-22f%/&W-#o?hZ *dᑱlqqxy8Δ&xn<ˇl.w'>.:6/35rE8:N4,iZE.b k:aZ2Rчoq7Bls~+pכ.A? 8űlq pxe~2t6o}?S8:WG)e]_9hdiF82B*i&TCa^Gkmygd۸p  c9.  J2<:J>_XaCOoblb ;]^[_̑y<6Ԯ~ˆy۬3$US%cU^7wBExy[xFC3d~0] of܅qN\6k:<墯X$h3dWSsF2%R Vd~ 2-=  0ehdtEvMp<֯nx[8NU<q:c32M5nQTm(@IG(W%[|߂#[<0mF_抠*gyg?#8[JD(ǹvoz"FXt[jvo45R$g.7G ݕ jo=( F̋ϿC&g{9=GH4!<` RRv\y5oj.׺V3LKk.Ko-( 燆l8WTl qӷwKfu[LV-6Ė-nZא}_9]`ےE$[p#nN7Un~d~[BR bGjykdZOol{vbSdUlnJ2O⨆V%ӫ2(Y6EQ?"lg>>-IkRc㫖-Vu_L3Si'-3e-)X_"K*ډJ_Y(kX]}=m3=6N*AtR( .}m_ @ɩU`ek>ϓ4MUo'NԱ1Z٢s!N׌l֞kvR Llxh,%*|q2%M:1UdKv.n/#^l"N^atee~]P'*}^83lB #Q0v 7ƳaY2/*(. q|n}C\ ~tQ2je|GfŦxq\4_G}j2Ӛ"١q҃Ƨ(HF BU 7zH";1ԑ33n0xhEuWм+&]"e!MdP)Lȏ%)NƟl'M]G+ok8q&6lqUZK_(c VFϾFnlO4DtO A?,.\ϼ06iR>yhC}K뻴 zn]^"8w^mQ?o vZf=:Ͼpðzm}.r2'OxM&vpm*V }vg W~F*ɩ$ãc M-M\d~wij#L+ͥ~O7K$0@y4ͻ?tm!Dr2$SGgLڢ~Zg^òl]bsk{qVLeQz|˲ %8PW(' I._|ˤuPC,axtd*ݬni8  љ-AS&E9#FZ&V![\k;_7 yIҌVުQdnv:1ZH7VJ5ׅ"Rq9K9NۅK_g1nTU!_L%Hĭ%BdR(]3gz{cq׌l-J(`bbe ax;즷ײ1dvYt]=G &GXJI!q^~@eshM xs(׎3Tp5ⱕ6g9$TWge})P\,Y3wNv_Ei|Bk ǣ14pϡih8\?$\.;;Rq#LN)D> bmӲygL#ޮriAN?GOᚽN.wXuɴuGG$P,.eml/&˥qFw9v͉?tLLf?e9xctwFF Zg,!o<Ďؚ<E\^J2Ͽt^S'ғ\O(&/sdW_3IN`pskpRJ.nҬ%[k]$K]|Sg8}i8!T.vn~=5s!H .nO43䳒!D)دC^+wW*y:Kץ3r~1#o9qrb )q5I_9I$_}nv'8~rIM MUtػx{6jw ]]0L Rn>SRg=_)%HUW((\z軸~d3a؉ (l!-\xa6CN^U|>3#{^l6lBb3ނf|Y \v^%Bo l֗};^Ϧ&0l_ʓ BBA/擛  ۓ'O([6 !,=1M/0-A`~$V>7-|nUU838+=e ۍN~*ܮ樊6J*.T_Y0Ɩ"0TnXzc^,A47a–6G[[4Τ#00fZ|Gri램 ~Iz^sm>lJ)l[ z \Z3K˲) tt2Vuu:&0]  Hg LNe8?<*BA~t8XoBWm۔Je2LBP>x,D0Cru˚Q7 nqv 0^YkDzm&&ҴBx=.~/ ӴK3yFǦ1&x }|Y1 Q&-KV.B!=1l–&|T:O.WIJm nۊ/I |C>lid LMg,x"̊a36ϙ,m}^<E)IP"Γ1M  zwf{UŪϓS2B ˲q4~/]1|^׌ *q~Ilx\(%2CSWS<3Sʻ.MMy/gbeQ(3y2;b۶P4L( $NC<qI\U\ \4UKpGt"l)uq -06>M0+>7`K!tT&312D( > Ӵ(ʎ3!Vk/Mf K"ҕvfUlp-jol" 0<-䭞MhucrӀX4 $YGΙycaI xu$KM|~K[`&"!p[&*/$lT&+F"]ǘӴKgn:|^7.6_ft3&}>M)p(ҶmJd L&3[juֺE66id5%~H_=f;w4A0Q I%/T+ː0-7Z:~5JY=C>34o-=MѼ7!55H>;Q[o!)'nhxAJIij3SNhxbwf-oFl6-o6{Tޫy\^Z@)lB"=~\*ˁ#m 0fxmt_aJp  y@|/I e *1>-"&M`Y-mlj$hxAn5o#VV"@-"&DBQ|ʼn7x1؜$Bho96 .0f|c$'N#Ù >"mX ,$`e C+S"RE`" By==uEM`[‰9wM+*-Ep  !+lͅڻ{Q5`YF1E*օTݽ),ƚXQ \'Il[.5Ւ1o naYڶB̴ضT a,ԩӤ'EUٹwQT-ԇQ6lb顭-ڴwrl6_㏢){|~}.3.޷#_&V]* _w jNtaf`DG֙Ҋm %m"Pp{C;"2Kh/]6'2ͦsXYY_8=mdKLȇ#$zZq@:9󔊙M=AjCu~'תӶB ӓòLfURڤ\nKAfxłw- >Ql`[XmTaZyB@&5Qr/@6jasʥGevBblzǽ- iMmmVmH[/Ff MʺYbml㷲y3f-AP2Ja b+d? ۘ̓*h"AeYnٳh [h}8EZh0Zn-[ha EZh0Zqm !J,m/Q^QɫPQT EB·c)kV !`KYY#D\`*)ąJ!foXơ(]S <>*5:mNP(4Ӭjp/Ѷ9 U%AYsսYhV-/"8yz4֛\to~3EIxez:s( tD\se\yŷ}4xsCbhxx7~̬)^煗 ڃ+_r$ 142m7]ɥ{g>p%?˓ϼB_o'CD"AL"_(ro! |G&? T*c[  DQ=N>_ fhx /,r+%bXZ^Eyu <>1+Νoۅάs7]^9FG s_}9>"sm EQ)|yky}QU˲QÏ>ǟ[g^ej:ͽ.t'vmvi#pmؓ/wwB)n(i( g΍?9pnҙL;H$SJU<22:Ɂv3[ǟ{0rCM׽e3Ee@JI{,7=}pE3f3Ig?vr(t*COWCr5#$_(q #3t8W;{^:Q?mٽÍD(X$衫r$ګ.}銢.~/z 0e$+y +'>cD!^9|\䄳 85j|o!/o㚫.cg7WKG\eلy`:WVŢ+OЙhDz-_~ _W>rlg= ύO/voy+EY~d>n o*{E>g_uR]z5mۜ8uW_?'>~>coôs|= >0LoL&OAUPTӴ4˴4͙[XZ^mceʦUw)%%a8~߹ Wc'FBʹ1~7>!N8sŅ%F0RȞ\RT\=NiZFA0 gٳm/?E>T=12{_s9@iJg)KumI&̼̺i*bq{;4-t]cxdHfDzl|EQPu&"+iu̘iNg狋UH +lf)tQdsy2\pC(ۅaH26$ 04<@C#L%5Vq+gvo_ e|P~ EtAv쩷^뮺H$^0'--wW\X24!i14<⟹\|>_X[~AgG9͹cB~E_obHJxA.ۿhMeEE('ά\0MmLi*W\ZRyE̊y/ EӗiY=V}ENE%(?:|Q\%1PY ">y*y>]q~'M![]Q*|OCd29twű,\{e<3!=%&R|@$GIgrc0/$ض/|}oco!ï> ඛ:Ï=T2O}3V5sYZ S?}B]oHxh_JYr$ho vl- \E[|AԹJ K)x#tt`2}tǓRkpWww0̄‹= IpWҷ[}NH#e"7^wnnv?pr٤P, [JI$};|L%S=1H,fϮ^E{|ޚ䑕ԭ7^Ig"6rz.X1̯UhuW_N(FW*Eg ]Ļ.V5'p}Bc'g'B i@#8eP( (]vJZ&"sQ _(KG=ï'հ+ŒjVEA:_r MUy#_}C l3Irb5!`-kӷTpLC4u4+.ȑ(蚆aV)V.fx{Owg;tBVW_vR,BUU)M\.r jާPuW_];Hs(@UUlr[IHB'_(ᮣFbmv%#qe\|DoO(39qlUUf,UU{iEdph^ "=s>fBD%=kI, r\t9]`* w띌^{m] !H&<+]fIz;:YnuZ,>LLL_TU#Sp~}E}%ضs<\}>zrG-FUύ_A;/Q*W>3vsC<.tU$|`6.dsot] 7ffr|H&ò J%t&G0QU׎7~/ؽt:G._oQ~7#)}] %ҙ,wp5y׸|~v3w6qζ[9<1hL&r~n[yZd,[nn8*™o/eW H*76nF,sqŊ=" yffT*Kdڬ%6,|t&KuT:ҵ@;3T.W ɒH sņ@5E!͓PQ*KٴpU%B52BriJg % sl:J8+͓/HUJgghq`)%o,CN"^}",)~.Z!䚚Ћ}iC Z-˴6TrfےX[?_R2 ^?!͹vEQ>{z|g߇d'r"o[`K)qt>?Glv(KnE[d>տ?Ob6Q,sKo3FDG"8!=w(M.t'׿KosQ.?gmĶl\.BuS(z<ıo}ImcLvza[{'P/iv.f łD{UFCQz :{jE!D×-&v1]905v Yu-Sgoja z9[4>ع:FcZ^n۴rO[ha EZh0Zn-[ha ^"ukE%0 r|+ܱAJ1S? hx 8uz/ƂIl@ -/ibޝ6!ZNZh˅sr9kYE"^~5U-ljTmSiJiizxb\ocbA9_09]( Z~V !4uQ2RYS)lqGx-4-/BPI=ALI3r 2Y!&)#3RϣcG$ܦbAݍ+_HF 3CAzg7:rFvbiqt(B 3L ݉ZNmhy5(kOU:4W9nd`B:uh "ԩ—X6gxÈf~9D 4w39 mIc N0.>S=\&zp2zA2qiW=r~>kSJ3R:kr63!)ɜ<;t}r .ܯ߉s+ ;#)% J9eXBhTok]*AY,92@+~`i[EAn")!qa`7,Q AZx1L9;W,\ؾ0;#+3piWNUG %E˖dB01a?2>nP, *z@  *3V4SLU2E[>̭kȶ4Ǟ !Lg-|]33DqjD0,( uhr㺰_"TR\د:wg'+bclIJTǟLav<(;SG P6l^}y^B;wo?]cS;7$%()oO>$)όUew_) tW;=w 窙;Y7?~x0+(%i;GtB&hx)};)M琖p P]ZôF"-L.#U(!< .?m~O~mxBO(̮8S,2JT@Z[aNn\~LRG̰!DT.^k(%[Gh[X[X^n8!@Bٶ1BYQU)BJe6ni"k'Jڊcj-M;4ómIJaۨI逸0WiRU};cA<^7Tc9&s5EQ۪HR4RzTTtt{:>tClHM:?d6tX[t]360:6ϙDhX{>GI&畗ctt;!v WqXT{NG$%jJ\!(<<Mn{x<.Lz^ LNf Vnn׺xםB^xEx];•]&V" N /_;n.nJBU]]9"gvBJIs7HEaj*75F+U/{~aQ*cܿ%!>n02:ƗV}1gyI)qtbO >DOOx~SOCߦ7SĨ:fԺY2ȫOqߊE$:{qy-V!uAA29Ϳ~EDG{Y0~W/XSn]( N}=!t}GeYL &9?M>_֪vi\J cY>ń.Kǽ"Qqμm360]tbow/>R0gN"I:;\SYQ]Az^vI[[tr5qݪj.]t_g/VL&TH[;"V =ũE݄Cytx{gOO7aML5'(ˤ-IBa ٷZn bmsfR)%t~xv~wNlǍLwܳl ݜGȥ[ά-)%^R3?LgG6#I?#ٳ5QԩS{3:o|Еr!"Y &t0x'VC4M w]&s-˚m`0ty(J JcA" ($ ַ߱fBb;&(꫔i^ FS"|'/G9eY%'>Sw^Hrv/Ҋx^Z_EQ:?Dzzϳ ;4o荮z߾KyX֩5 رc+Q.]%Տ{4H4C]Si~e^fB@6[5G XV%my._'t~"$d-BpI<^\7-ܲ䯧eY_9tCW\]aEk#5SJbrbc_:)%Dݛl&˲%m'v5eKm/me-A\εK%17[x`bbiP- 8\[~ڵh|8o`266t3zM l4.CWV|!Bs1͹7XTJx9Vlr8%&`1Mc.aMTbprf\!f L%3l4v09{cY6Oc"<:;w0ƱŜ%zzx>bG~`z:c.xl ,/ɶm:Exx_g!Ҷ(IP6_ JJvekfk!岵c-r_oX6%Q^L/TyA]SD)xK E4]qMP@QlD"H BaZD`ӴLiaVl܋hEG:BLjK=L.2#"Y|su c{+4b66!RB,\piG c7R<'c׫Hx,3ؽg NW%XT J5VTUAQJrtMôlU¢^m)/ g֊X]$/z/[dA>BB@._IJ%O !gIIhclliiՐ%srLp2lW45`qOJK)IN8zϞ[vu뮿iV:RJ?w&F&8'?x5Fi*]}TbqJK^7|ۯ(XR3/.~mQޯt&R," K4ήJjum/ ?>ǟ!`*Hp7-XRJ:B':  LN%?ʬEϰNU;26O042:mOdj:3CӃ\~f;yK<7wÙs) 9G>_5M74X,N Q3& ˗0ĜEs7R(J햲^J4dfՃ>j4UWcBvQfSJ[ogd,;s?YeݵIJI"?e $p/yqG(.$ ~ʲw/pf%GBP*=7đ'¶m\.nRY\0o[v=+)C)"C97my^/+.tc'\8#nJqt'cĂEbKBW#Cej1. tMmN6MbL 8q|2E,]AQwlN-ض͡Cص G/?NWW粖bm0&'2"#QN,VawwL(w.]ՍQeL6ljSg8~ tޞnwsr0oZypH|#wJswq UE܁SN{{1a'T*7VUMU(um-Z*, 1f[x|IO,ijaKEkH lN#~7n٢}5+d4N>h. %}0bwp ۙ;Z%5_> ,+;LN%Igcmݻ}bw I<k<$D#Qm]O͙C>+YH_bKE5bĄx\ % . K5Rqb5b_F'/.JB 4 zgOj:kx!C$isV>cYm:Wk_e|$1xۭ5ERd% x ̞lqY{>CeCi NIˊ-r;pTPTT*\~,K CJ/b3-S `ۖ XUU ,'|To31&^CN\Zy]#8a hdy ">}׿I27cǎs\OpI GU$r3S8 P,q Gd2\R2RnZBH$j}%0lݙpRm)b\|RJtM4ubvYSiZ DC4. lۮv8yf٪ԕZZ $T"f H>sy믻SS;yGr0)' s 83;?&`砡pY}o8!O? 3Og2y0]~r]n'7=3*p_b\v4lۦlu ijEk 8Krkn鉺B`MTET^ %\.m0mR<>>Tp_gY%h \~ٜB(?0>.uɟ^T: Baxdɤ#\E9q.igcI/ 5,!AԲomLLNq ΞZAuDW?0YAA,d|"]wEQܤҹר*QlLR+r=yVB8bQu`P\qT*O$\_}d6d]yٜb9sϳsg?^}O< \C?BWw}}snò%=2\ < #h]0mAKׁqr;Q^?v܌5 &^-H&[hi Ig6pR !PTAٰ̞zgm{^=2q`}k&ŒANȩ Y@(C 3PY*J}o}综aFd׮:T*Mgg򶷼#_sL%JtfjͣD?Ђ{<. %هZXn=b|lm쌬z.Q9NAW؍h(&qJNF6mN(-P.dEb`MٵcQ1R!Vv)%Ν˹K4Mv pycphSe9z *]\ .>kFq/wnd R4t֭]c}Hb &G sCnwpG8g:-Z5 1de\H=%oeFڍgFeǙf-qNrۥ׵,&-s A=Ka:kphmesf^VH7==azz ss#G1:6o{ \t&C 6~c߁4|]pM9ř8-G)Y܎i^&UÍIri KLq&Dض8 c5/6\~dz_EcesD%uITU|_ӧ SSe?|_11ԙAN=G"_gpB&V>l8 Ve$]BEHpK.0 ! ն ! Fjm:8/:) !q:ո ]5^gn,D}!-%A4Rےp 9~jT(Je":esae7B019l-\wt G'ƿ|Iv tᓧg+n'Opf_ĭbMγ_1ߋ^zZ%Wnq,W( ⍮!pP;hZI4W/Q&pX?$hhZ0e9 G{T~HT`[ww,(axaL_U?A8⥗_0 àV42>1Zp&Bծs^Ypb? N~TuaB(D#NtWYYZE`__,*3pҲ ԫA\Q!lU[֦\6OymK5)S(02?(7|}^߾L$kahts#}^3ۀĭb]Ϻ,lq=t24c!R|Mi`؝Q61.@ԭ@)@SEYG襊^ SBUgs6Wp\PX<5/#\Vd5y=wq;?x)mmQan:v[o~'N%_q #\%Nh͉[źzAudjebzuI V8\-hrUkzݮrk ZC]p5l[4jQ6LGT|'|3>dHoIEa!I^~UR4mMwb6~DG}\.Ν;5S9+AU.j F*]K8JQ>-Bd@ؙJ5%^)t2\Xnlj &UIz5P&\uՎ lZucRJEMemrE#ӴHb_#ѠlN& 1<2(!੧AUU:C(hW_?j,6 >aȧĭb \Ś`xt|&|- ]n' P(5[Tu|n8wS(֯v _XFUuSoe9S5f@ҺkzE皝 z]qn;M?x) J~d2Ɂ ;ҿro{8Lsnhrwb2BSHżSUxN}{LGV9&tuv 3VSn'59L%3tu.,Mq3u燓O.̊#\.t^Uq`) 0R5H;…1j_N>}~Y8w>~1wrՕWpIw366'9鳫%n6<*[|+US:㴩8y lni vuZӂEQ I!f<)%5l#(JL,g3e:/z0Sc@AT*S(D. )Sx^Fn˲߽x=w(*R(< 8S?W*(s6h [|UB(%m(hW͘p(ò,RM'I$NEpۥQ(SF].ĒE݅p֨jA%|u$岉Q6+ʢ3T2穩w@hdnrc?/_9"09B6^=?NsCzãc͐4+6![|/LX-[nG!0^nrS6F%vQlK|{;ԺQvX5Z ՊJ),R65#:4SNs1.llT*͝wγϽw"Foo/\C͔3+6t [A(#I_|fbKSm4^յ ᤶjad,I=!*K_E*pAfX/˪\_ZΧ^)e )ґFFGygر\.A.RowljF9~Y2/g6-Gl8w˪3[hFy=6ǣ"[܎RJ&׬H:5竓t]icZMS)&aRע.-<5UX\۶M._#v@01&ձ;< # bY_ƁO$ i><˫|xRmMO*6lZngd5lXp iÜJ^&&kZa3"pEȡ.M^c& _480-TjL=tVQxi~p8mE׿I*o)F&(7o)PG8@8<٢`i kv("\8.[+-땘2k[;Ƶ-P(U8DҙLhu3yTi3Sl?)mn&:4ٮz_~v, gWͯ5a R^^<'OX N~Xn"Y 'ej|^bmGKj[BQ 5&jZeNZYPBKH2~ S]&͖-f9bmQ:;xiBgW?NizCM %ku՚łGQZUQ0Z㚦Av(^.[VF$/ѿvw)S);kY6_id9R,[8 ؚ3|lx>f=Zl\lRn'Z1 H)%~l-h)3{[ bjz#ص.Ū2 -[2q.$!-}<եľ<lgcl\-ޏ3@r9[X ;E#ȥCG3 NT!izs4[w 'aY%-^x]_Xܽ^7 !eBӔ""r_T:OFj˔DU)qH&9bxtL&-WGy௩(`mC*yq? WrL6GpH& ʊ"HtrXR~zޛ'œ9cF#Mki/6DQQ[W-ضe^߭*jLAJ0!r 3Pf2&yq\"]NVm gh03 '=F/夤n۔jZdj%FSJuH5RBCS5Uoi7RbU[:W!ӌML6 /~Tpz`GqfC+=ę:qcQtݑ׽~5^y>RvOfGqB_ ?y"!p܏O@v]D!ɥ6]E ㋰WpN^(߰YD5bRJTU-^}϶<O0ڋ7GQl1 m7\o,"qƟnV OJ4K zr1*7궀@U[Xh9GqLjS+۵8o5-9UsuAr/qEب DjnC%NH8!"&/s̪"\<uddE4'ɐIx|H2UE bB7sP / 0eHW-/[/l\@J+1DL]_,Er qQ -|&m BeìYU͸o"*1nƪdg>4RhrcY!B:Wg܋R^h:? -^XsQN|uu3P;&?S=8NyͰvhl <.db lifecyclelifecyclearchivedarchived usethis/man/figures/lifecycle-stable.svg0000644000175000017500000000167414117743363020260 0ustar nileshnileshlifecyclelifecyclestablestable usethis/man/figures/lifecycle-questioning.svg0000644000175000017500000000171414117743363021346 0ustar nileshnileshlifecyclelifecyclequestioningquestioning usethis/man/figures/lifecycle-experimental.svg0000644000175000017500000000171614117743363021500 0ustar nileshnileshlifecyclelifecycleexperimentalexperimental usethis/man/figures/lifecycle-maturing.svg0000644000175000017500000000170614117743363020630 0ustar nileshnileshlifecyclelifecyclematuringmaturing usethis/man/figures/lifecycle-superseded.svg0000644000175000017500000000171314117743363021143 0ustar nileshnilesh lifecyclelifecyclesupersededsuperseded usethis/man/figures/lifecycle-soft-deprecated.svg0000644000175000017500000000172614117743363022055 0ustar nileshnileshlifecyclelifecyclesoft-deprecatedsoft-deprecated usethis/man/figures/lifecycle-deprecated.svg0000644000175000017500000000171214117743363021077 0ustar nileshnileshlifecyclelifecycledeprecateddeprecated usethis/man/browse-this.Rd0000644000175000017500000000621614132400710015375 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/browse.R \name{browse-this} \alias{browse-this} \alias{browse_package} \alias{browse_project} \alias{browse_github} \alias{browse_github_issues} \alias{browse_github_pulls} \alias{browse_github_actions} \alias{browse_travis} \alias{browse_circleci} \alias{browse_cran} \title{Visit important project-related web pages} \usage{ browse_package(package = NULL) browse_project() browse_github(package = NULL) browse_github_issues(package = NULL, number = NULL) browse_github_pulls(package = NULL, number = NULL) browse_github_actions(package = NULL) browse_travis(package = NULL, ext = c("com", "org")) browse_circleci(package = NULL) browse_cran(package = NULL) } \arguments{ \item{package}{Name of package. If \code{NULL}, the active project is targeted, regardless of whether it's an R package or not.} \item{number}{Optional, to specify an individual GitHub issue or pull request. Can be a number or \code{"new"}.} \item{ext}{Version of travis to use.} } \description{ These functions take you to various web pages associated with a project (often, an R package) and return the target URL(s) invisibly. To form these URLs we consult: \itemize{ \item Git remotes configured for the active project that appear to be hosted on a GitHub deployment \item DESCRIPTION file for the active project or the specified \code{package}. The DESCRIPTION file is sought first in the local package library and then on CRAN. \item Fixed templates: \itemize{ \item Travis CI: \verb{https://travis-ci.\{EXT\}/\{OWNER\}/\{PACKAGE\}} \item Circle CI: \verb{https://circleci.com/gh/\{OWNER\}/\{PACKAGE\}} \item CRAN landing page: \verb{https://cran.r-project.org/package=\{PACKAGE\}} \item GitHub mirror of a CRAN package: \verb{https://github.com/cran/\{PACKAGE\}} Templated URLs aren't checked for existence, so there is no guarantee there will be content at the destination. } } } \details{ \itemize{ \item \code{browse_package()}: Assembles a list of URLs and lets user choose one to visit in a web browser. In a non-interactive session, returns all discovered URLs. \item \code{browse_project()}: Thin wrapper around \code{browse_package()} that always targets the active usethis project. \item \code{browse_github()}: Visits a GitHub repository associated with the project. In the case of a fork, you might be asked to specify if you're interested in the source repo or your fork. \item \code{browse_github_issues()}: Visits the GitHub Issues index or one specific issue. \item \code{browse_github_pulls()}: Visits the GitHub Pull Request index or one specific pull request. \item \code{browse_travis()}: Visits the project's page on \href{https://www.travis-ci.com/}{Travis CI}. \item \code{browse_circleci()}: Visits the project's page on \href{https://circleci.com}{Circle CI}. \item \code{browse_cran()}: Visits the package on CRAN, via the canonical URL. } } \examples{ # works on the active project # browse_project() browse_package("httr") browse_github("gh") browse_github_issues("fs") browse_github_issues("fs", 1) browse_github_pulls("curl") browse_github_pulls("curl", 183) browse_travis("gert", ext = "org") browse_cran("MASS") } usethis/man/git_protocol.Rd0000644000175000017500000000302314117743363015645 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/git.R \name{git_protocol} \alias{git_protocol} \alias{use_git_protocol} \title{See or set the default Git protocol} \usage{ git_protocol() use_git_protocol(protocol) } \arguments{ \item{protocol}{One of "https" or "ssh"} } \value{ The protocol, either "https" or "ssh" } \description{ Git operations that address a remote use a so-called "transport protocol". usethis supports HTTPS and SSH. The protocol dictates the Git URL format used when usethis needs to configure the first GitHub remote for a repo: \itemize{ \item \code{protocol = "https"} implies \verb{https://github.com//.git} \item \code{protocol = "ssh"} implies \verb{git@github.com:/.git} } Two helper functions are available: \itemize{ \item \code{git_protocol()} reveals the protocol "in force". As of usethis v2.0.0, this defaults to "https". You can change this for the duration of the R session with \code{use_git_protocol()}. Change the default for all R sessions with code like this in your \code{.Rprofile} (easily editable via \code{\link[=edit_r_profile]{edit_r_profile()}}):\preformatted{options(usethis.protocol = "ssh") } \item \code{use_git_protocol()} sets the Git protocol for the current R session } This protocol only affects the Git URL for newly configured remotes. All existing Git remote URLs are always respected, whether HTTPS or SSH. } \examples{ \dontrun{ git_protocol() use_git_protocol("ssh") git_protocol() use_git_protocol("https") git_protocol() } } usethis/man/github-token.Rd0000644000175000017500000000624114153723421015537 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github_token.R \name{github-token} \alias{github-token} \alias{create_github_token} \alias{gh_token_help} \title{Get help with GitHub personal access tokens} \usage{ create_github_token( scopes = c("repo", "user", "gist", "workflow"), description = "DESCRIBE THE TOKEN'S USE CASE", host = NULL ) gh_token_help(host = NULL) } \arguments{ \item{scopes}{Character vector of token scopes, pre-selected in the web form. Final choices are made in the GitHub form. Read more about GitHub API scopes at \url{https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/}.} \item{description}{Short description or nickname for the token. You might (eventually) have multiple tokens on your GitHub account and a label can help you keep track of what each token is for.} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} } \value{ Nothing } \description{ A \href{https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line}{personal access token} (PAT) is needed for certain tasks usethis does via the GitHub API, such as creating a repository, a fork, or a pull request. If you use HTTPS remotes, your PAT is also used when interacting with GitHub as a conventional Git remote. These functions help you get and manage your PAT: \itemize{ \item \code{gh_token_help()} guides you through token troubleshooting and setup. \item \code{create_github_token()} opens a browser window to the GitHub form to generate a PAT, with suggested scopes pre-selected. It also offers advice on storing your PAT. \item \code{gitcreds::gitcreds_set()} helps you register your PAT with the Git credential manager used by your operating system. Later, other packages, such as usethis, gert, and gh can automatically retrieve that PAT and use it to work with GitHub on your behalf. } Usually, the first time the PAT is retrieved in an R session, it is cached in an environment variable, for easier reuse for the duration of that R session. After initial acquisition and storage, all of this should happen automatically in the background. GitHub is encouraging the use of PATs that expire after, e.g., 30 days, so prepare yourself to re-generate and re-store your PAT periodically. Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \details{ \code{create_github_token()} has previously gone by some other names: \code{browse_github_token()} and \code{browse_github_pat()}. } \examples{ \dontrun{ create_github_token() } \dontrun{ gh_token_help() } } \seealso{ \code{\link[gh:gh_whoami]{gh::gh_whoami()}} for information on an existing token and \code{gitcreds::gitcreds_set()} and \code{gitcreds::gitcreds_get()} for a secure way to store and retrieve your PAT. } usethis/man/use_roxygen_md.Rd0000644000175000017500000000070213737204645016173 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/roxygen.R \name{use_roxygen_md} \alias{use_roxygen_md} \title{Use roxygen2 with markdown} \usage{ use_roxygen_md() } \description{ If you are already using roxygen2, but not with markdown, you'll need to use \href{https://roxygen2md.r-lib.org}{roxygen2md} to convert existing Rd expressions to markdown. The conversion is not perfect, so make sure to check the results. } usethis/man/badges.Rd0000644000175000017500000000430214131645451014362 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/badge.R \name{badges} \alias{badges} \alias{use_badge} \alias{use_cran_badge} \alias{use_bioc_badge} \alias{use_lifecycle_badge} \alias{use_binder_badge} \title{README badges} \usage{ use_badge(badge_name, href, src) use_cran_badge() use_bioc_badge() use_lifecycle_badge(stage) use_binder_badge(ref = git_default_branch(), urlpath = NULL) } \arguments{ \item{badge_name}{Badge name. Used in error message and alt text} \item{href, src}{Badge link and image src} \item{stage}{Stage of the package lifecycle. One of "experimental", "stable", "superseded", or "deprecated".} \item{ref}{A Git branch, tag, or SHA} \item{urlpath}{An optional \code{urlpath} component to add to the link, e.g. \code{"rstudio"} to open an RStudio IDE instead of a Jupyter notebook. See the \href{https://mybinder.readthedocs.io/en/latest/howto/user_interface.html}{binder documentation} for additional examples.} } \description{ These helpers produce the markdown text you need in your README to include badges that report information, such as the CRAN version or test coverage, and link out to relevant external resources. To add badges automatically ensure your badge block starts with a line containing only \verb{} and ends with a line containing only \verb{}. } \details{ \itemize{ \item \code{use_badge()}: a general helper used in all badge functions \item \code{use_bioc_badge()}: badge indicates \href{https://bioconductor.org/developers/}{BioConductor build status} \item \code{use_cran_badge()}: badge indicates what version of your package is available on CRAN, powered by \url{https://www.r-pkg.org} \item \code{use_lifecycle_badge()}: badge declares the developmental stage of a package according to \url{https://lifecycle.r-lib.org/articles/stages.html}. \item \code{use_binder_badge()}: badge indicates that your repository can be launched in an executable environment on \url{https://mybinder.org/} } } \examples{ \dontrun{ use_cran_badge() use_lifecycle_badge("stable") } } \seealso{ Functions that configure continuous integration, such as \code{\link[=use_github_actions]{use_github_actions()}}, also create badges. } usethis/man/use_github_labels.Rd0000644000175000017500000000655514131645451016631 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/github-labels.R \name{use_github_labels} \alias{use_github_labels} \alias{use_tidy_github_labels} \alias{tidy_labels} \alias{tidy_labels_rename} \alias{tidy_label_colours} \alias{tidy_label_descriptions} \title{Manage GitHub issue labels} \usage{ use_github_labels( repo_spec = deprecated(), labels = character(), rename = character(), colours = character(), descriptions = character(), delete_default = FALSE, host = deprecated(), auth_token = deprecated() ) use_tidy_github_labels() tidy_labels() tidy_labels_rename() tidy_label_colours() tidy_label_descriptions() } \arguments{ \item{repo_spec, host, auth_token}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: These arguments are now deprecated and will be removed in the future. Any input provided via these arguments is not used. The target repo, host, and auth token are all now determined from the current project's Git remotes.} \item{labels}{A character vector giving labels to add.} \item{rename}{A named vector with names giving old names and values giving new names.} \item{colours, descriptions}{Named character vectors giving hexadecimal colours (like \code{e02a2a}) and longer descriptions. The names should match label names, and anything unmatched will be left unchanged. If you create a new label, and don't supply colours, it will be given a random colour.} \item{delete_default}{If \code{TRUE}, removes GitHub default labels that do not appear in the \code{labels} vector and that do not have associated issues.} } \description{ \code{use_github_labels()} can create new labels, update colours and descriptions, and optionally delete GitHub's default labels (if \code{delete_default = TRUE}). It will never delete labels that have associated issues. \code{use_tidy_github_labels()} calls \code{use_github_labels()} with tidyverse conventions powered by \code{tidy_labels()}, \code{tidy_labels_rename()}, \code{tidy_label_colours()} and \code{tidy_label_descriptions()}. } \section{Label usage}{ Labels are used as part of the issue-triage process, designed to minimise the time spent re-reading issues. The absence of a label indicates that an issue is new, and has yet to be triaged. \itemize{ \item \code{reprex} indicates that an issue does not have a minimal reproducible example, and that a reply has been sent requesting one from the user. \item \code{bug} indicates an unexpected problem or unintended behavior. \item \code{feature} indicates a feature request or enhancement. \item \code{docs} indicates an issue with the documentation. \item \code{wip} indicates that someone is working on it or has promised to. \item \verb{good first issue} indicates a good issue for first-time contributors. \item \verb{help wanted} indicates that a maintainer wants help on an issue. } } \examples{ \dontrun{ # typical use in, e.g., a new tidyverse project use_github_labels(delete_default = TRUE) # create labels without changing colours/descriptions use_github_labels( labels = c("foofy", "foofier", "foofiest"), colours = NULL, descriptions = NULL ) # change descriptions without changing names/colours use_github_labels( labels = NULL, colours = NULL, descriptions = c("foofiest" = "the foofiest issue you ever saw") ) } } usethis/man/ui.Rd0000644000175000017500000000562214117743363013565 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ui.R \name{ui} \alias{ui} \alias{ui_line} \alias{ui_todo} \alias{ui_done} \alias{ui_oops} \alias{ui_info} \alias{ui_code_block} \alias{ui_stop} \alias{ui_warn} \alias{ui_silence} \alias{ui_field} \alias{ui_value} \alias{ui_path} \alias{ui_code} \alias{ui_unset} \title{User interface} \usage{ ui_line(x = character(), .envir = parent.frame()) ui_todo(x, .envir = parent.frame()) ui_done(x, .envir = parent.frame()) ui_oops(x, .envir = parent.frame()) ui_info(x, .envir = parent.frame()) ui_code_block(x, copy = rlang::is_interactive(), .envir = parent.frame()) ui_stop(x, .envir = parent.frame()) ui_warn(x, .envir = parent.frame()) ui_silence(code) ui_field(x) ui_value(x) ui_path(x, base = NULL) ui_code(x) ui_unset(x = "unset") } \arguments{ \item{x}{A character vector. For block styles, conditions, and questions, each element of the vector becomes a line, and the result is processed by \code{\link[glue:glue]{glue::glue()}}. For inline styles, each element of the vector becomes an entry in a comma separated list.} \item{.envir}{Used to ensure that \code{\link[glue:glue]{glue::glue()}} gets the correct environment. For expert use only.} \item{copy}{If \code{TRUE}, the session is interactive, and the clipr package is installed, will copy the code block to the clipboard.} \item{code}{Code to execute with usual UI output silenced.} \item{base}{If specified, paths will be displayed relative to this path.} } \value{ The block styles, conditions, and questions are called for their side-effect. The inline styles return a string. } \description{ These functions are used to construct the user interface of usethis. Use them in your own package so that your \code{use_} functions work the same way as usethis. The \code{ui_} functions can be broken down into four main categories: \itemize{ \item block styles: \code{ui_line()}, \code{ui_done()}, \code{ui_todo()}, \code{ui_oops()}, \code{ui_info()}. \item conditions: \code{ui_stop()}, \code{ui_warn()}. \item questions: \code{\link[=ui_yeah]{ui_yeah()}}, \code{\link[=ui_nope]{ui_nope()}}. \item inline styles: \code{ui_field()}, \code{ui_value()}, \code{ui_path()}, \code{ui_code()}, \code{ui_unset()}. } The question functions \code{\link[=ui_yeah]{ui_yeah()}} and \code{\link[=ui_nope]{ui_nope()}} have their own \link[=ui-questions]{help page}. } \section{Silencing output}{ All UI output (apart from \code{ui_yeah()}/\code{ui_nope()} prompts) can be silenced by setting \code{options(usethis.quiet = TRUE)}. Use \code{ui_silence()} to silence selected actions. } \examples{ new_val <- "oxnard" ui_done("{ui_field('name')} set to {ui_value(new_val)}") ui_todo("Redocument with {ui_code('devtools::document()')}") ui_code_block(c( "Line 1", "Line 2", "Line 3" )) } \seealso{ Other user interface functions: \code{\link{ui-questions}} } \concept{user interface functions} \keyword{internal} usethis/man/create_from_github.Rd0000644000175000017500000001510214153723421016763 0ustar nileshnilesh% Generated by roxygen2: do not edit by hand % Please edit documentation in R/create.R \name{create_from_github} \alias{create_from_github} \title{Create a project from a GitHub repo} \usage{ create_from_github( repo_spec, destdir = NULL, fork = NA, rstudio = NULL, open = rlang::is_interactive(), protocol = git_protocol(), host = NULL, auth_token = deprecated(), credentials = deprecated() ) } \arguments{ \item{repo_spec}{A string identifying the GitHub repo in one of these forms: \itemize{ \item Plain \code{OWNER/REPO} spec \item Browser URL, such as \code{"https://github.com/OWNER/REPO"} \item HTTPS Git URL, such as \code{"https://github.com/OWNER/REPO.git"} \item SSH Git URL, such as \code{"git@github.com:OWNER/REPO.git"} } In the case of a browser, HTTPS, or SSH URL, the \code{host} is extracted from the URL. The \code{REPO} part will be the name of the new local folder, which is also a project and Git repo.} \item{destdir}{The new folder is stored here. If \code{NULL}, defaults to user's Desktop or some other conspicuous place. You can also set a default location using the option \code{usethis.destdir}, e.g. \code{options(usethis.destdir = "a/good/dir")}, perhaps saved to your \code{.Rprofile} with \code{\link[=edit_r_profile]{edit_r_profile()}}} \item{fork}{If \code{FALSE}, we clone \code{repo_spec}. If \code{TRUE}, we fork \code{repo_spec}, clone that fork, and do additional set up favorable for future pull requests: \itemize{ \item The source repo, \code{repo_spec}, is configured as the \code{upstream} remote, using the indicated \code{protocol}. \item The local \code{DEFAULT} branch is set to track \code{upstream/DEFAULT}, where \code{DEFAULT} is typically \code{main} or \code{master}. It is also immediately pulled, to cover the case of a pre-existing, out-of-date fork. } If \code{fork = NA} (the default), we check your permissions on \code{repo_spec}. If you can push, we set \code{fork = FALSE}, If you cannot, we set \code{fork = TRUE}.} \item{rstudio}{Initiate an \href{https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects}{RStudio Project}? Defaults to \code{TRUE} if in an RStudio session and project has no pre-existing \code{.Rproj} file. Defaults to \code{FALSE} otherwise (but note that the cloned repo may already be an RStudio Project, i.e. may already have a \code{.Rproj} file).} \item{open}{If \code{TRUE}, \link[=proj_activate]{activates} the new project: \itemize{ \item If RStudio desktop, the package is opened in a new session. \item If on RStudio server, the current RStudio project is activated. \item Otherwise, the working directory and active project is changed. }} \item{protocol}{One of "https" or "ssh"} \item{host}{GitHub host to target, passed to the \code{.api_url} argument of \code{\link[gh:gh]{gh::gh()}}. If unspecified, gh defaults to "https://api.github.com", although gh's default can be customised by setting the GITHUB_API_URL environment variable. For a hypothetical GitHub Enterprise instance, either "https://github.acme.com/api/v3" or "https://github.acme.com" is acceptable.} \item{auth_token}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: No longer consulted now that usethis uses the gert package for Git operations, instead of git2r; gert relies on the credentials package for auth. The API requests are now authorized with the token associated with the \code{host}, as retrieved by \code{\link[gh:gh_token]{gh::gh_token()}}.} \item{credentials}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}: No longer consulted now that usethis uses the gert package for Git operations, instead of git2r; gert relies on the credentials package for auth. The API requests are now authorized with the token associated with the \code{host}, as retrieved by \code{\link[gh:gh_token]{gh::gh_token()}}.} } \description{ Creates a new local project and Git repository from a repo on GitHub, by either cloning or \href{https://docs.github.com/articles/fork-a-repo}{fork-and-cloning}. In the fork-and-clone case, \code{create_from_github()} also does additional remote and branch setup, leaving you in the perfect position to make a pull request with \code{\link[=pr_init]{pr_init()}}, one of several \link[=pull-requests]{functions that work pull requests}. \code{create_from_github()} works best when your GitHub credentials are discoverable. See below for more about authentication. } \section{Git/GitHub Authentication}{ Many usethis functions, including those documented here, potentially interact with GitHub in two different ways: \itemize{ \item Via the GitHub REST API. Examples: create a repo, a fork, or a pull request. \item As a conventional Git remote. Examples: clone, fetch, or push. } Therefore two types of auth can happen and your credentials must be discoverable. Which credentials do we mean? \itemize{ \item A GitHub personal access token (PAT) must be discoverable by the gh package, which is used for GitHub operations via the REST API. See \code{\link[=gh_token_help]{gh_token_help()}} for more about getting and configuring a PAT. \item If you use the HTTPS protocol for Git remotes, your PAT is also used for Git operations, such as \verb{git push}. Usethis uses the gert package for this, so the PAT must be discoverable by gert. Generally gert and gh will discover and use the same PAT. This ability to "kill two birds with one stone" is why HTTPS + PAT is our recommended auth strategy for those new to Git and GitHub and PRs. \item If you use SSH remotes, your SSH keys must also be discoverable, in addition to your PAT. The public key must be added to your GitHub account. } Git/GitHub credential management is covered in a dedicated article: \href{https://usethis.r-lib.org/articles/articles/git-credentials.html}{Managing Git(Hub) Credentials} } \examples{ \dontrun{ create_from_github("r-lib/usethis") # repo_spec can be a URL create_from_github("https://github.com/r-lib/usethis") # a URL repo_spec also specifies the host (e.g. GitHub Enterprise instance) create_from_github("https://github.acme.com/OWNER/REPO") } } \seealso{ \itemize{ \item \code{\link[=use_github]{use_github()}} to go the opposite direction, i.e. create a GitHub repo from your local repo \item \code{\link[=git_protocol]{git_protocol()}} for background on \code{protocol} (HTTPS vs SSH) \item \code{\link[=use_course]{use_course()}} to download a snapshot of all files in a GitHub repo, without the need for any local or remote Git operations } } usethis/tests/0000755000175000017500000000000014154446740013243 5ustar nileshnileshusethis/tests/testthat/0000755000175000017500000000000014154505162015075 5ustar nileshnileshusethis/tests/testthat/test-line-ending.R0000644000175000017500000000224413764577255020412 0ustar nileshnileshtest_that("can detect path from RStudio project file", { create_local_package() use_rstudio("posix") expect_equal(proj_line_ending(), "\n") file_delete(proj_path(paste(paste0(project_name(), ".Rproj")))) use_rstudio("windows") expect_equal(proj_line_ending(), "\r\n") }) test_that("can detect path from DESCRIPTION or .R file", { create_local_project() write_utf8(proj_path("DESCRIPTION"), c("x", "y", "z"), line_ending = "\r\n") expect_equal(proj_line_ending(), "\r\n") file_delete(proj_path("DESCRIPTION")) dir_create(proj_path("R")) write_utf8(proj_path("R/test.R"), c("x", "y", "z"), line_ending = "\r\n") expect_equal(proj_line_ending(), "\r\n") }) test_that("falls back to platform specific encoding", { create_local_project() expect_equal(proj_line_ending(), platform_line_ending()) }) test_that("correctly detect line encoding", { path <- file_temp() con <- file(path, open = "wb") writeLines(c("a", "b", "c"), con, sep = "\n") close(con) expect_equal(detect_line_ending(path), "\n") con <- file(path, open = "wb") writeLines(c("a", "b", "c"), con, sep = "\r\n") close(con) expect_equal(detect_line_ending(path), "\r\n") }) usethis/tests/testthat/test-license.R0000644000175000017500000000277314117743363017636 0ustar nileshnileshtest_that("use_mit_license() works", { create_local_package() use_mit_license() expect_equal(desc::desc_get("License", proj_get())[[1]], "MIT + file LICENSE") expect_proj_file("LICENSE.md") expect_true(is_build_ignored("^LICENSE\\.md$")) expect_proj_file("LICENSE") expect_false(is_build_ignored("^LICENSE$")) }) test_that("use_proprietary_license() works", { create_local_package() use_proprietary_license("foo") expect_equal(desc::desc_get("License", proj_get())[[1]], "file LICENSE") expect_proj_file("LICENSE") # TODO add snapshot test }) test_that("other licenses work without error", { create_local_package() expect_error(use_agpl_license(3), NA) expect_error(use_apache_license(2), NA) expect_error(use_cc0_license(), NA) expect_error(use_ccby_license(), NA) expect_error(use_gpl_license(2), NA) expect_error(use_gpl_license(3), NA) expect_error(use_lgpl_license(2.1), NA) expect_error(use_lgpl_license(3), NA) # old fallbacks expect_error(use_agpl3_license(), NA) expect_error(use_gpl3_license(), NA) expect_error(use_apl2_license(), NA) }) test_that("check license gives useful errors", { expect_error(check_license_version(1, 2), "must be 2") expect_error(check_license_version(1, 2:4), "must be 2, 3, or 4") }) test_that("generate correct abbreviations", { expect_equal(license_abbr("GPL", 2, TRUE), "GPL (>= 2)") expect_equal(license_abbr("GPL", 2, FALSE), "GPL-2") expect_equal(license_abbr("Apache License", 2, FALSE), "Apache License (== 2)") }) usethis/tests/testthat/test-utils-git.R0000644000175000017500000000422414117743363020126 0ustar nileshnilesh# Branch ------------------------------------------------------------------ test_that("git_branch() works", { skip_if_no_git_user() create_local_project() expect_usethis_error(git_branch(), "Cannot detect") git_init() expect_usethis_error(git_branch(), "unborn branch") writeLines("blah", proj_path("blah.txt")) gert::git_add("blah.txt", repo = git_repo()) gert::git_commit("Make one commit", repo = git_repo()) # branch name can depend on user's config, e.g. could be 'master' or 'main' expect_error_free( b <- git_branch() ) expect_true(nzchar(b)) }) # Protocol ------------------------------------------------------------------ test_that("git_protocol() catches bad input from usethis.protocol option", { withr::with_options( list(usethis.protocol = "nope"), { expect_usethis_error(git_protocol(), "must be either") expect_null(getOption("usethis.protocol")) } ) withr::with_options( list(usethis.protocol = c("ssh", "https")), { expect_usethis_error(git_protocol(), "must be either") expect_null(getOption("usethis.protocol")) } ) }) test_that("use_git_protocol() errors for bad input", { expect_usethis_error(use_git_protocol("nope"), "must be either") }) test_that("git_protocol() defaults to 'https'", { withr::with_options( list(usethis.protocol = NULL), expect_identical(git_protocol(), "https") ) }) test_that("git_protocol() honors, vets, and lowercases the option", { withr::with_options( list(usethis.protocol = "ssh"), expect_identical(git_protocol(), "ssh") ) withr::with_options( list(usethis.protocol = "SSH"), expect_identical(git_protocol(), "ssh") ) withr::with_options( list(usethis.protocol = "https"), expect_identical(git_protocol(), "https") ) withr::with_options( list(usethis.protocol = "nope"), expect_usethis_error(git_protocol(), "must be either") ) }) test_that("use_git_protocol() prioritizes and lowercases direct input", { withr::with_options( list(usethis.protocol = "ssh"), { expect_identical(use_git_protocol("HTTPS"), "https") expect_identical(git_protocol(), "https") } ) }) usethis/tests/testthat/test-tutorial.R0000644000175000017500000000123314117743363020045 0ustar nileshnileshtest_that("use_tutorial() checks its inputs", { skip_if_not_installed("rmarkdown") create_local_package() expect_error(use_tutorial(), "no default") expect_error(use_tutorial(name = "tutorial-file"), "no default") }) test_that("use_tutorial() creates a tutorial", { skip_if_not_installed("rmarkdown") create_local_package() with_mock( # pass the check re: whether learnr is installed check_installed = function(pkg) TRUE, use_tutorial(name = "aaa", title = "bbb") ) tute_file <- path("inst", "tutorials", "aaa", "aaa", ext = "Rmd") expect_proj_file(tute_file) expect_equal(rmarkdown::yaml_front_matter(tute_file)$title, "bbb") }) usethis/tests/testthat/test-ui.R0000644000175000017500000000223314117743363016620 0ustar nileshnileshtest_that("basic UI actions behave as expected", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) expect_snapshot({ ui_line("line") ui_todo("to do") ui_done("done") ui_oops("oops") ui_info("info") ui_code_block(c("x <- 1", "y <- 2")) ui_warn("a warning") }) }) test_that("ui_stop() works", { expect_usethis_error(ui_stop("an error"), "an error") }) test_that("ui_silence() suppresses output", { # suppress test silencing withr::local_options(list(usethis.quiet = FALSE)) expect_output(ui_silence(ui_line()), NA) }) test_that("trailing slash behaviour of ui_path()", { withr::local_options(list(crayon.enabled = FALSE)) # target doesn't exist so no empirical evidence that it's a directory expect_match(ui_path("abc"), "abc'$") # path suggests it's a directory expect_match(ui_path("abc/"), "abc/'$") expect_match(ui_path("abc//"), "abc/'$") # path is known to be a directory tmpdir <- withr::local_tempdir(pattern = "ui_path") expect_match(ui_path(tmpdir), "/'$") expect_match(ui_path(paste0(tmpdir, "/")), "[^/]/'$") expect_match(ui_path(paste0(tmpdir, "//")), "[^/]/'$") }) usethis/tests/testthat/test-edit.R0000644000175000017500000001173214131622147017124 0ustar nileshnileshexpect_r_file <- function(...) { expect_true(file_exists(path_home_r(...))) } expect_fs_file <- function(...) { expect_true(file_exists(path_home(...))) } test_that("edit_file() creates new directory and another and a file within", { tmp <- file_temp() expect_false(dir_exists(tmp)) capture.output(new_file <- edit_file(path(tmp, "new_dir", "new_file"))) expect_true(dir_exists(tmp)) expect_true(dir_exists(path(tmp, "new_dir"))) expect_true(file_exists(path(tmp, "new_dir", "new_file"))) }) test_that("edit_file() creates new file in existing directory", { tmp <- file_temp() dir_create(tmp) capture.output(new_file <- edit_file(path(tmp, "new_file"))) expect_true(file_exists(path(tmp, "new_file"))) }) test_that("edit_file() copes with path to existing file", { tmp <- file_temp() dir_create(tmp) existing <- file_create(path(tmp, "a_file")) capture.output(res <- edit_file(path(tmp, "a_file"))) expect_identical(existing, res) }) test_that("edit_template() can create a new template", { create_local_package() edit_template("new_template") expect_proj_file("inst/templates/new_template") }) ## testing edit_XXX("user") only on travis and appveyor, because I don't want to ## risk creating user-level files de novo for an actual user, which would ## obligate me to some nerve-wracking clean up test_that("edit_r_XXX() and edit_git_XXX() have default scope", { skip_if_no_git_user() ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() ## on Windows, under R CMD check, some env vars are set to sentinel values ## https://github.com/wch/r-source/blob/78da6e06aa0017564ec057b768f98c5c79e4d958/src/library/tools/R/check.R#L257 ## we need to explicitly ensure R_ENVIRON_USER="" here withr::local_envvar(list(R_ENVIRON_USER = "")) expect_error_free(edit_r_profile()) expect_error_free(edit_r_buildignore()) expect_error_free(edit_r_environ()) expect_error_free(edit_r_makevars()) expect_error_free(edit_git_config()) expect_error_free(edit_git_ignore()) }) test_that("edit_r_XXX('user') ensures the file exists", { ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() ## on Windows, under R CMD check, some env vars are set to sentinel values ## https://github.com/wch/r-source/blob/78da6e06aa0017564ec057b768f98c5c79e4d958/src/library/tools/R/check.R#L257 ## we need to explicitly ensure R_ENVIRON_USER="" here withr::local_envvar(list(R_ENVIRON_USER = "")) edit_r_environ("user") expect_r_file(".Renviron") edit_r_profile("user") expect_r_file(".Rprofile") edit_r_makevars("user") expect_r_file(".R", "Makevars") }) test_that("edit_r_buildignore() only works with packages", { create_local_project() expect_usethis_error(edit_r_buildignore(), "not an R package") use_description() edit_r_buildignore() expect_proj_file(".Rbuildignore") }) test_that("can edit snippets", { path <- withr::local_tempdir() withr::local_envvar(c("XDG_CONFIG_HOME" = path)) path <- edit_rstudio_snippets(type = "R") expect_true(file_exists(path)) expect_error( edit_rstudio_snippets("not-existing-type"), regexp = "should be one of" ) }) test_that("edit_r_profile() respects R_PROFILE_USER", { path1 <- user_path_prep(file_temp()) withr::local_envvar(list(R_PROFILE_USER = path1)) path2 <- edit_r_profile("user") expect_equal(path1, path2) }) test_that("edit_git_XXX('user') ensures the file exists", { skip_if_no_git_user() ## run these manually if you already have these files or are happy to ## have them or delete them skip_if_not_ci() edit_git_config("user") expect_fs_file(".gitconfig") edit_git_ignore("user") expect_fs_file(".gitignore") expect_match( git_cfg_get("core.excludesfile", where = "global"), "gitignore" ) }) test_that("edit_r_profile() ensures .Rprofile exists in project", { create_local_package() edit_r_profile("project") expect_proj_file(".Rprofile") create_local_project() edit_r_profile("project") expect_proj_file(".Rprofile") }) test_that("edit_r_environ() ensures .Renviron exists in project", { create_local_package() edit_r_environ("project") expect_proj_file(".Renviron") create_local_project() edit_r_environ("project") expect_proj_file(".Renviron") }) test_that("edit_r_makevars() ensures .R/Makevars exists in package", { create_local_package() edit_r_makevars("project") expect_proj_file(".R", "Makevars") }) test_that("edit_git_config() ensures git ignore file exists in project", { create_local_package() edit_git_config("project") expect_proj_file(".git", "config") create_local_project() edit_git_config("project") expect_proj_file(".git", "config") }) test_that("edit_git_ignore() ensures .gitignore exists in project", { create_local_package() edit_git_ignore("project") expect_proj_file(".gitignore") create_local_project() edit_git_ignore("project") expect_proj_file(".gitignore") }) usethis/tests/testthat/test-roxygen.R0000644000175000017500000000123514117743363017677 0ustar nileshnileshtest_that("use_package_doc() compatible with roxygen_ns_append()", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message(use_package_doc()) expect_message(roxygen_ns_append("test"), "Adding 'test'") expect_silent(roxygen_ns_append("test")) }) test_that("use_roxygen_md() adds DESCRIPTION fields to naive package", { skip_if_not_installed("roxygen2") pkg <- create_local_package() use_roxygen_md() expect_identical( desc::desc_get("Roxygen", pkg), c(Roxygen = "list(markdown = TRUE)") ) expect_true(desc::desc_has_fields("RoxygenNote", pkg)) expect_true(uses_roxygen_md()) }) usethis/tests/testthat/test-description.R0000644000175000017500000001151314132400710020506 0ustar nileshnilesh # use_description_defaults() ---------------------------------------------- test_that("user's fields > usethis defaults", { d <- use_description_defaults("pkg", fields = list(Title = "TEST1", URL = "TEST1")) expect_equal(d$Title, "TEST1") expect_equal(d$URL, "TEST1") expect_equal(d$Version, "0.0.0.9000") }) test_that("usethis options > usethis defaults", { withr::local_options(list( usethis.description = list(License = "TEST") )) d <- use_description_defaults() expect_equal(d$License, "TEST") expect_equal(d$Version, "0.0.0.9000") }) test_that("usethis options > usethis defaults, even for Authors@R", { withr::local_options(list( usethis.description = list( "Authors@R" = utils::person("Jane", "Doe") ) )) d <- use_description_defaults() expect_equal( d$`Authors@R`, "person(given = \"Jane\",\n family = \"Doe\")" ) expect_match(d$`Authors@R`, '^person[(]given = "Jane"') expect_match(d$`Authors@R`, '"Doe"[)]$') }) test_that("user's fields > options > defaults", { withr::local_options(list( usethis.description = list(License = "TEST1", Title = "TEST1") )) d <- use_description_defaults("pkg", fields = list(Title = "TEST2")) expect_equal(d$Title, "TEST2") expect_equal(d$License, "TEST1") expect_equal(d$Version, "0.0.0.9000") }) test_that("automatically converts person object to text", { d <- use_description_defaults( "pkg", fields = list(`Authors@R` = person("H", "W")) ) expect_match(d$`Authors@R`, '^person[(]given = "H"') expect_match(d$`Authors@R`, '"W"[)]$') }) test_that("can set package", { d <- use_description_defaults(package = "TEST") expect_equal(d$Package, "TEST") }) test_that("`roxygen = FALSE` is honoured", { d <- use_description_defaults(roxygen = FALSE) expect_null(d[["Roxygen"]]) expect_null(d[["RoxygenNote"]]) }) # use_description --------------------------------------------------------- test_that("creation succeeds even if options are broken", { withr::local_options(list(usethis.description = list( `Authors@R` = "person(" ))) create_local_project() expect_error(use_description(), NA) }) test_that("default description is tidy", { withr::local_options(list(usethis.description = NULL, devtools.desc = NULL)) create_local_package() before <- readLines(proj_path("DESCRIPTION")) use_tidy_description() after <- readLines(proj_path("DESCRIPTION")) expect_equal(before, after) }) test_that("valid CRAN names checked", { withr::local_options(list(usethis.description = NULL, devtools.desc = NULL)) create_local_package(dir = file_temp(pattern = "invalid_pkg_name")) expect_error(use_description(check_name = FALSE), NA) expect_error( use_description(check_name = TRUE), "is not a valid package name", class = "usethis_error" ) }) test_that("use_description_list() can initiate, add to, or replace", { create_local_package() with_mock( can_overwrite = function(path) TRUE, use_description_list("Config/Needs/foofy", "alfa") ) desc <- desc::desc() expect_equal(desc$get_field("Config/Needs/foofy"), "alfa") with_mock( can_overwrite = function(path) TRUE, use_description_list("Config/Needs/foofy", "bravo") ) desc <- desc::desc() expect_equal(desc$get_list("Config/Needs/foofy"), c("alfa", "bravo")) use_description_list("Config/Needs/foofy", "alfa") desc <- desc::desc() expect_equal(desc$get_list("Config/Needs/foofy"), c("alfa", "bravo")) with_mock( can_overwrite = function(path) TRUE, use_description_list("Config/Needs/foofy", "charlie", append = FALSE) ) desc <- desc::desc() expect_equal(desc$get_list("Config/Needs/foofy"), "charlie") }) test_that("use_description_field() can address an existing field", { pkg <- create_local_package() orig <- tools::md5sum(proj_path("DESCRIPTION")) ## specify existing value of existing field --> should be no op use_description_field( name = "Version", value = desc::desc_get("Version", file = pkg)[[1]] ) expect_identical(orig, tools::md5sum(proj_path("DESCRIPTION"))) expect_usethis_error( use_description_field( name = "Version", value = "1.1.1" ), "has a different value" ) ## overwrite existing field use_description_field( name = "Version", value = "1.1.1", overwrite = TRUE ) expect_identical(c(Version = "1.1.1"), desc::desc_get("Version", file = pkg)) }) test_that("use_description_field() can add new field", { pkg <- create_local_package() use_description_field(name = "foo", value = "bar") expect_identical(c(foo = "bar"), desc::desc_get("foo", file = pkg)) }) test_that("use_description_field() ignores whitespace", { pkg <- create_local_package() use_description_field(name = "foo", value = "\n bar") use_description_field(name = "foo", value = "bar") expect_identical(c(foo = "\n bar"), desc::desc_get("foo", file = pkg)) }) usethis/tests/testthat/test-tibble.R0000644000175000017500000000106114131622147017432 0ustar nileshnileshtest_that("use_tibble() requires a package", { create_local_project() expect_usethis_error(use_tibble(), "not an R package") }) test_that("use_tibble() Imports tibble", { create_local_package(path_temp("mypackage")) withr::local_options(list(usethis.quiet = FALSE)) ui_silence(use_package_doc()) with_mock( check_installed = function(pkg) TRUE, roxygen_update_ns = function(...) NULL, check_functions_exist = function(...) TRUE, expect_snapshot(use_tibble()) ) expect_match(desc::desc_get("Imports", proj_get()), "tibble") }) usethis/tests/testthat/test-readme.R0000644000175000017500000000335114131645451017435 0ustar nileshnileshtest_that("use_readme_md() creates README.md", { create_local_package() use_readme_md() expect_proj_file("README.md") }) test_that("use_readme_rmd() creates README.Rmd", { skip_if_not_installed("rmarkdown") create_local_package() use_readme_rmd() expect_proj_file("README.Rmd") }) test_that("use_readme_rmd() sets up git pre-commit hook if pkg uses git", { skip_if_no_git_user() skip_if_not_installed("rmarkdown") create_local_package() use_git() use_readme_rmd(open = FALSE) expect_proj_file(".git", "hooks", "pre-commit") }) test_that("use_readme_md() has expected form for a non-GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() use_readme_md() expect_snapshot(writeLines(read_utf8("README.md")), transform = scrub_testpkg) }) test_that("use_readme_md() has expected form for a GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() with_mock( target_repo_spec = function(...) "OWNER/TESTPKG", use_readme_md() ) expect_snapshot(writeLines(read_utf8("README.md")), transform = scrub_testpkg) }) test_that("use_readme_rmd() has expected form for a non-GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() use_readme_rmd() expect_snapshot(writeLines(read_utf8("README.Rmd")), transform = scrub_testpkg) }) test_that("use_readme_rmd() has expected form for a GitHub package", { skip_if_not_installed("rmarkdown") local_interactive(FALSE) create_local_package() with_mock( target_repo_spec = function(...) "OWNER/TESTPKG", use_readme_rmd() ) expect_snapshot(writeLines(read_utf8("README.Rmd")), transform = scrub_testpkg) }) usethis/tests/testthat/test-utils-glue.R0000644000175000017500000000065014117743363020276 0ustar nileshnileshtest_that("glue_chr() returns plain character, evals in correct env", { x <- letters[1:2] y <- LETTERS[25:26] f <- toupper expect_identical(glue_chr("{f(x)}-{y}"), c("A-Y", "B-Z")) }) test_that("glue_data_chr() returns plain character, evals in correct env", { z <- list(x = letters[1:2], y = LETTERS[25:26]) f <- tolower x <- 1 y <- 2 expect_identical(glue_data_chr(z, "{x}-{f(y)}"), c("a-y", "b-z")) }) usethis/tests/testthat/test-logo.R0000644000175000017500000000035714117743363017150 0ustar nileshnileshtest_that("use_logo() doesn't error", { skip_if_not_installed("magick") skip_on_os("solaris") create_local_package() img <- magick::image_write(magick::image_read("logo:"), "logo.png") expect_error_free(use_logo("logo.png")) }) usethis/tests/testthat/test-git-default-branch.R0000644000175000017500000001102314131645451021633 0ustar nileshnileshtest_that("git_default_branch() consults the default branch candidates, in order", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) # singleton branch, with weird name git_default_branch_rename(from = git_branch(), to = "foofy") expect_equal(git_default_branch(), "foofy") # two weirdly named branches, but one matches init.defaultBranch (local) config gert::git_branch_create("blarg", checkout = TRUE, repo = repo) use_git_config("project", `init.defaultBranch` = "blarg") expect_equal(git_default_branch(), "blarg") # one of the Usual Suspects shows up gert::git_branch_create("master", checkout = TRUE, repo = repo) expect_equal(git_default_branch(), "master") # and another Usual Suspect shows up gert::git_branch_create("main", checkout = TRUE, repo = repo) expect_equal(git_default_branch(), "main") # finally, prefer something that matches what upstream says is default gert::git_branch_create("default-branch-of-upstream", checkout = TRUE, repo = repo) with_mock( git_default_branch_remote = function(remote) { list( name = remote, is_configured = TRUE, url = NA_character_, repo_spec = NA_character_, default_branch = as.character(glue("default-branch-of-{remote}")) ) }, expect_equal(git_default_branch(), "default-branch-of-upstream") ) }) test_that("git_default_branch() errors if can't find obvious local default branch", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) git_default_branch_rename(from = git_branch(), to = "foofy") gert::git_branch_create("blarg", checkout = TRUE, repo = repo) expect_error(git_default_branch(), class = "error_default_branch") }) test_that("git_default_branch() errors for local vs remote mismatch", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) create_local_project() use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) git_default_branch_rename(from = git_branch(), to = "foofy") with_mock( git_default_branch_remote = function(remote) { list( name = remote, is_configured = TRUE, url = NA_character_, repo_spec = NA_character_, default_branch = as.character(glue("default-branch-of-{remote}")) ) }, expect_error(git_default_branch(), class = "error_default_branch") ) gert::git_branch_create("blarg", checkout = TRUE, repo = repo) with_mock( git_default_branch_remote = function(remote) { list( name = remote, is_configured = TRUE, url = NA_character_, repo_spec = NA_character_, default_branch = as.character(glue("default-branch-of-{remote}")) ) }, expect_error(git_default_branch(), class = "error_default_branch") ) }) test_that("git_default_branch_rename() surfaces files that smell fishy", { skip_on_cran() skip_if_no_git_user() local_interactive(FALSE) # for snapshot purposes, I don't want a random project name create_local_project(path(path_temp(), "abcde")) use_git() repo <- git_repo() gert::git_add(".gitignore", repo = repo) gert::git_commit("a commit, so we are not on an unborn branch", repo = repo) # make sure we start with default branch = 'master' git_default_branch_rename(from = git_branch(), to = "master") expect_equal(git_default_branch(), "master") badge_lines <- c( "", "[![Codecov test coverage](https://codecov.io/gh/OWNER/REPO/branch/master/graph/badge.svg)](https://codecov.io/gh/OWNER/REPO?branch=master)", "" ) cli::cat_line(badge_lines, file = proj_path("README.md")) gha_lines <- c( "on:", " push:", " branches:", " - master" ) create_directory(".github/workflows") cli::cat_line(gha_lines, file = path(".github", "workflows", "blah.yml")) create_directory("whatever/foo") cli::cat_line( "edit: https://github.com/OWNER/REPO/edit/master/%s", file = path("whatever", "foo", "_bookdown.yaml") ) withr::local_options(usethis.quiet = FALSE) expect_snapshot( git_default_branch_rename() ) }) usethis/tests/testthat/test-vignette.R0000644000175000017500000000256014117743363020033 0ustar nileshnilesh# use_vignette ------------------------------------------------------------ test_that("use_vignette() requires a package", { create_local_project() expect_usethis_error(use_vignette(), "not an R package") }) test_that("use_vignette() gives useful errors", { create_local_package() expect_snapshot(error = TRUE, { use_vignette() use_vignette("bad name") }) }) test_that("use_vignette() does the promised setup", { create_local_package() use_vignette("name", "title") expect_proj_file("vignettes/name.Rmd") ignores <- read_utf8(proj_path(".gitignore")) expect_true("inst/doc" %in% ignores) deps <- desc::desc_get_deps(proj_get()) expect_true( all(c("knitr", "rmarkdown") %in% deps$package[deps$type == "Suggests"]) ) expect_identical( desc::desc_get_or_fail("VignetteBuilder", proj_get()), c(VignetteBuilder = "knitr") ) }) # use_article ------------------------------------------------------------- test_that("use_article goes in article subdirectory", { create_local_package() use_article("test") expect_proj_file("vignettes/articles/test.Rmd") }) # helpers ----------------------------------------------------------------- test_that("valid_vignette_name() works", { expect_true(valid_vignette_name("perfectly-valid-name")) expect_false(valid_vignette_name("01-test")) expect_false(valid_vignette_name("test.1")) }) usethis/tests/testthat/test-rcpp.R0000644000175000017500000000512214117743363017147 0ustar nileshnileshtest_that("use_rcpp() requires a package", { create_local_project() expect_usethis_error(use_rcpp(), "not an R package") }) test_that("use_rcpp() creates files/dirs, edits DESCRIPTION and .gitignore", { pkg <- create_local_package() use_roxygen_md() use_rcpp() expect_match(desc::desc_get("LinkingTo", pkg), "Rcpp") expect_match(desc::desc_get("Imports", pkg), "Rcpp") expect_proj_dir("src") ignores <- read_utf8(proj_path("src", ".gitignore")) expect_true(all(c("*.o", "*.so", "*.dll") %in% ignores)) }) test_that("use_rcpp_armadillo() creates Makevars files and edits DESCRIPTION", { create_local_package() use_roxygen_md() local_interactive(FALSE) with_mock( # Required to pass the check re: whether RcppArmadillo is installed check_installed = function(pkg) TRUE, use_rcpp_armadillo() ) expect_match(desc::desc_get("LinkingTo"), "RcppArmadillo") expect_proj_file("src", "Makevars") expect_proj_file("src", "Makevars.win") }) test_that("use_rcpp_eigen() edits DESCRIPTION", { create_local_package() use_roxygen_md() with_mock( # Required to pass the check re: whether RcppEigen is installed check_installed = function(pkg) TRUE, use_rcpp_eigen() ) expect_match(desc::desc_get("LinkingTo"), "RcppEigen") }) test_that("use_src() doesn't message if not needed", { create_local_package() use_roxygen_md() use_package_doc() use_src() withr::local_options(list(usethis.quiet = FALSE)) expect_silent(use_src()) }) test_that("use_makevars() respects pre-existing Makevars", { pkg <- create_local_package() dir_create(proj_path("src")) makevars_file <- proj_path("src", "Makevars") makevars_win_file <- proj_path("src", "Makevars.win") writeLines("USE_CXX = CXX11", makevars_file) file_copy(makevars_file, makevars_win_file) before_makevars_file <- read_utf8(makevars_file) before_makevars_win_file <- read_utf8(makevars_win_file) makevars_settings <- list( "PKG_CXXFLAGS" = "-Wno-reorder" ) use_makevars(makevars_settings) expect_identical(before_makevars_file, read_utf8(makevars_file)) expect_identical(before_makevars_win_file, read_utf8(makevars_win_file)) }) test_that("use_makevars() creates Makevars files with appropriate configuration", { pkg <- create_local_package() makevars_settings <- list( "CXX_STD" = "CXX11" ) use_makevars(makevars_settings) makevars_content <- paste0(names(makevars_settings), " = ", makevars_settings) expect_identical(makevars_content, read_utf8(proj_path("src", "Makevars"))) expect_identical(makevars_content, read_utf8(proj_path("src", "Makevars.win"))) }) usethis/tests/testthat/test-code-of-conduct.R0000644000175000017500000000025214117743363021153 0ustar nileshnileshtest_that("use_code_of_conduct() creates promised file", { create_local_project() use_code_of_conduct("test@example.com") expect_proj_file("CODE_OF_CONDUCT.md") }) usethis/tests/testthat/test-data.R0000644000175000017500000000443314131622147017110 0ustar nileshnileshtest_that("use_data() errors for a non-package project", { create_local_project() expect_usethis_error(use_data(letters), "not an R package") }) test_that("use_data() stores new, non-internal data", { pkg <- create_local_package() letters2 <- letters month.abb2 <- month.abb expect_false(desc::desc_has_fields("LazyData")) use_data(letters2, month.abb2) expect_true(desc::desc_has_fields("LazyData")) rm(letters2, month.abb2) load(proj_path("data", "letters2.rda")) load(proj_path("data", "month.abb2.rda")) expect_identical(letters2, letters) expect_identical(month.abb2, month.abb) }) test_that("use_data() honors `overwrite` for non-internal data", { pkg <- create_local_package() letters2 <- letters use_data(letters2) expect_usethis_error(use_data(letters2), ".*data/letters2.rda.* already exist") letters2 <- rev(letters) use_data(letters2, overwrite = TRUE) load(proj_path("data", "letters2.rda")) expect_identical(letters2, rev(letters)) }) test_that("use_data() stores new internal data", { pkg <- create_local_package() letters2 <- letters month.abb2 <- month.abb use_data(letters2, month.abb2, internal = TRUE) rm(letters2, month.abb2) load(proj_path("R", "sysdata.rda")) expect_identical(letters2, letters) expect_identical(month.abb2, month.abb) }) test_that("use_data() honors `overwrite` for internal data", { pkg <- create_local_package() letters2 <- letters use_data(letters2, internal = TRUE) rm(letters2) expect_usethis_error( use_data(letters2, internal = TRUE), ".*R/sysdata.rda.* already exist" ) letters2 <- rev(letters) use_data(letters2, internal = TRUE, overwrite = TRUE) load(proj_path("R", "sysdata.rda")) expect_identical(letters2, rev(letters)) }) test_that("use_data() writes version 2 by default", { create_local_package() x <- letters use_data(x, internal = TRUE, version = 2, compress = FALSE) expect_identical( rawToChar(readBin(proj_path("R", "sysdata.rda"), n = 4, what = "raw")), "RDX2" ) }) test_that("use_data_raw() does setup", { create_local_package() use_data_raw(open = FALSE) expect_proj_file(path("data-raw", "DATASET.R")) use_data_raw("daisy", open = FALSE) expect_proj_file(path("data-raw", "daisy.R")) expect_true(is_build_ignored("^data-raw$")) }) usethis/tests/testthat/test-rmarkdown.R0000644000175000017500000000176114117743363020214 0ustar nileshnileshtest_that("use_rmarkdown_template() creates everything as promised, defaults", { create_local_package() use_rmarkdown_template() path <- path("inst", "rmarkdown", "templates", "template-name") yml <- read_utf8(proj_path(path, "template.yaml")) expect_true( all( c( "name: Template Name", "description: >", " A description of the template", "create_dir: FALSE" ) %in% yml ) ) expect_proj_file(path, "skeleton", "skeleton.Rmd") }) test_that("use_rmarkdown_template() creates everything as promised, args", { create_local_package() use_rmarkdown_template( template_name = "aaa", template_dir = "bbb", template_description = "ccc", template_create_dir = TRUE ) path <- path("inst", "rmarkdown", "templates", "bbb") yml <- read_utf8(proj_path(path, "template.yaml")) expect_true( all( c("name: aaa", "description: >", " ccc", "create_dir: TRUE") %in% yml ) ) expect_proj_file(path, "skeleton", "skeleton.Rmd") }) usethis/tests/testthat/test-write.R0000644000175000017500000001604014131645451017331 0ustar nileshnilesh# test that write_utf8() does not alter active project and # does not consult active project for line ending test_that("write_utf8(): no active project, write path outside project", { local_project(NULL) expect_false(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-nonproject") expect_false(possibly_in_proj(dir)) write_utf8(path(dir, "letters_LF"), letters[1:2], line_ending = "\n") expect_equal( readBin(path(dir, "letters_LF"), what = "raw", n = 3), charToRaw("a\nb") ) write_utf8(path(dir, "letters_CRLF"), letters[1:2], line_ending = "\r\n") expect_equal( readBin(path(dir, "letters_CRLF"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_false(proj_active()) }) test_that("write_utf8(): no active project, write to path inside a project", { local_project(NULL) expect_false(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-in-a-project") file_create(path(dir, ".here")) expect_true(possibly_in_proj(dir)) with_project(dir, use_rstudio(line_ending = "posix")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\nb") ) file_delete(path(dir, paste0(path_file(dir), ".Rproj"))) with_project(dir, use_rstudio(line_ending = "windows")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_false(proj_active()) }) test_that("write_utf8(): in an active project, write path outside project", { proj <- create_local_project(rstudio = TRUE) expect_true(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-nonproject") expect_false(possibly_in_proj(dir)) write_utf8(path(dir, "letters_LF"), letters[1:2], line_ending = "\n") expect_equal( readBin(path(dir, "letters_LF"), what = "raw", n = 3), charToRaw("a\nb") ) write_utf8(path(dir, "letters_CRLF"), letters[1:2], line_ending = "\r\n") expect_equal( readBin(path(dir, "letters_CRLF"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_equal(proj_get(), proj) }) test_that("write_utf8(): in an active project, write path in other project", { proj <- create_local_project(rstudio = TRUE) expect_true(proj_active()) dir <- withr::local_tempdir(pattern = "write-utf8-in-a-project") file_create(path(dir, ".here")) expect_true(possibly_in_proj(dir)) with_project(dir, use_rstudio(line_ending = "posix")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\nb") ) file_delete(path(dir, paste0(path_file(dir), ".Rproj"))) with_project(dir, use_rstudio(line_ending = "windows")) write_utf8(path(dir, "letters"), letters[1:2]) expect_equal( readBin(path(dir, "letters"), what = "raw", n = 3), charToRaw("a\r\n") ) expect_equal(proj_get(), proj) }) test_that("write_utf8() can append text when requested", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") write_utf8(path, "x", line_ending = "\n", append = TRUE) expect_equal(readChar(path, 4), "x\nx\n") }) test_that("write_utf8() respects line ending", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") expect_equal(detect_line_ending(path), "\n") write_utf8(path, "x", line_ending = "\r\n") expect_equal(detect_line_ending(path), "\r\n") }) # TODO: explore more edge cases re: active project on both sides test_that("write_utf8() can operate outside of a project", { dir <- withr::local_tempdir(pattern = "write-utf8-test") withr::local_dir(dir) local_project(NULL) expect_false(proj_active()) expect_error_free(write_utf8(path = "foo", letters[1:3])) }) # https://github.com/r-lib/usethis/issues/514 test_that("write_utf8() always produces a trailing newline", { path <- file_temp() write_utf8(path, "x", line_ending = "\n") expect_equal(readChar(path, 2), "x\n") }) test_that("write_union() writes a de novo file", { tmp <- file_temp() expect_false(file_exists(tmp)) write_union(tmp, letters[1:3], quiet = TRUE) expect_identical(read_utf8(tmp), letters[1:3]) }) test_that("write_union() leaves file 'as is'", { tmp <- file_temp() writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_union(tmp, "b", quiet = TRUE) expect_identical(before, read_utf8(tmp)) }) test_that("write_union() adds lines", { tmp <- file_temp() writeLines(letters[1:3], tmp) write_union(tmp, letters[4:5], quiet = TRUE) expect_setequal(read_utf8(tmp), letters[1:5]) }) # https://github.com/r-lib/usethis/issues/526 test_that("write_union() doesn't remove duplicated lines in the input", { tmp <- file_temp() before <- rep(letters[1:2], 3) add_me <- c("z", "a", "c", "a", "b") writeLines(before, tmp) expect_identical(before, read_utf8(tmp)) write_union(tmp, add_me, quiet = TRUE) expect_identical(read_utf8(tmp), c(before, c("z", "c"))) }) test_that("same_contents() detects if contents are / are not same", { tmp <- file_temp() x <- letters[1:3] writeLines(x, con = tmp, sep = "\n") expect_true(same_contents(tmp, x)) expect_false(same_contents(tmp, letters[4:6])) }) test_that("write_over() writes a de novo file", { tmp <- file_temp() expect_false(file_exists(tmp)) write_over(tmp, letters[1:3], quiet = TRUE) expect_identical(read_utf8(tmp), letters[1:3]) }) test_that("write_over() leaves file 'as is' (outside of a project)", { local_interactive(FALSE) tmp <- withr::local_file(file_temp()) writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) # usethis.overwrite shouldn't matter for a file outside of a project withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) } ) }) test_that("write_over() works in active project", { local_interactive(FALSE) create_local_project() tmp <- proj_path("foo.txt") writeLines(letters[1:3], tmp) before <- read_utf8(tmp) write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) use_git() withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), letters[4:6]) } ) }) test_that("write_over() works for a file in a project that is not active", { local_interactive(FALSE) owd <- getwd() proj <- create_local_project() use_git() tmp <- proj_path("foo.txt") writeLines(letters[1:3], tmp) withr::local_dir(owd) local_project(NULL) expect_false(proj_active()) tmp <- path(proj, "foo.txt") before <- read_utf8(tmp) withr::with_options( list(usethis.overwrite = FALSE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), before) } ) withr::with_options( list(usethis.overwrite = TRUE), { write_over(tmp, letters[4:6], quiet = TRUE) expect_identical(read_utf8(tmp), letters[4:6]) } ) expect_false(proj_active()) }) usethis/tests/testthat/test-git.R0000644000175000017500000000461614131645451016770 0ustar nileshnileshtest_that("uses_git() works", { skip_if_no_git_user() create_local_package() expect_false(uses_git()) expect_error(check_uses_git()) git_init() expect_true(uses_git()) expect_error_free(check_uses_git()) }) test_that('use_git_config(scope = "project") errors if project not using git', { create_local_package() expect_usethis_error( use_git_config(scope = "project", user.name = "USER.NAME"), "Cannot detect that project is already a Git repository" ) }) test_that("use_git_config() can set local config", { skip_if_no_git_user() create_local_package() use_git() use_git_config( scope = "project", user.name = "Jane", user.email = "jane@example.org", init.defaultBranch = "main" ) r <- git_repo() expect_identical(git_cfg_get("user.name", "local"), "Jane") expect_identical(git_cfg_get("user.email", "local"), "jane@example.org") expect_identical(git_cfg_get("init.defaultBranch", "local"), "main") expect_identical(git_cfg_get("init.defaultbranch", "local"), "main") }) test_that("use_git_config() can set a non-existing config field", { skip_if_no_git_user() create_local_package() use_git() expect_null(git_cfg_get("aaa.bbb")) use_git_config(scope = "project", aaa.bbb = "ccc") expect_identical(git_cfg_get("aaa.bbb"), "ccc") }) test_that("use_git_config() facilitates round trips", { skip_if_no_git_user() create_local_package() use_git() orig <- use_git_config(scope = "project", aaa.bbb = "ccc") expect_null(orig$aaa.bbb) expect_identical(git_cfg_get("aaa.bbb"), "ccc") new <- use_git_config(scope = "project", aaa.bbb = NULL) expect_identical(new$aaa.bbb, "ccc") expect_null(git_cfg_get("aaa.bbb")) }) test_that("use_git_hook errors if project not using git", { create_local_package() expect_usethis_error( use_git_hook( "pre-commit", render_template("readme-rmd-pre-commit.sh") ), "Cannot detect that project is already a Git repository" ) }) test_that("git remote handlers work", { skip_if_no_git_user() create_local_package() use_git() expect_null(git_remotes()) use_git_remote(name = "foo", url = "foo_url") expect_identical(git_remotes(), list(foo = "foo_url")) use_git_remote(name = "foo", url = "new_url", overwrite = TRUE) expect_identical(git_remotes(), list(foo = "new_url")) use_git_remote(name = "foo", url = NULL, overwrite = TRUE) expect_null(git_remotes()) }) usethis/tests/testthat/test-ignore.R0000644000175000017500000000026014117743363017464 0ustar nileshnileshtest_that(". escaped around surround by anchors", { expect_equal(escape_path("."), "^\\.$") }) test_that("strip trailing /", { expect_equal(escape_path("./"), "^\\.$") }) usethis/tests/testthat/test-use_github_file.R0000644000175000017500000000312714131645451021336 0ustar nileshnileshtest_that("parse_file_url() works when it should", { expected <- list( parsed = TRUE, repo_spec = "OWNER/REPO", path = "path/to/some/file", ref = "REF", host = "https://github.com" ) expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/path/to/some/file"), expected ) expect_equal( parse_file_url("https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"), expected ) expected$path <- "file" expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/file"), expected ) expect_equal( parse_file_url("https://github.com/OWNER/REPO/blob/REF/file"), parse_file_url("https://raw.githubusercontent.com/OWNER/REPO/REF/file") ) expected$host <- "https://github.acme.com" expect_equal( parse_file_url("https://github.acme.com/OWNER/REPO/blob/REF/file"), expected ) expect_equal( parse_file_url("https://raw.github.acme.com/OWNER/REPO/REF/file"), expected ) }) test_that("parse_file_url() gives up when it should", { out <- parse_file_url("OWNER/REPO") expect_false(out$parsed) }) test_that("parse_file_url() errors when it should", { expect_error(parse_file_url("https://github.com/OWNER/REPO")) expect_error(parse_file_url("https://github.com/OWNER/REPO.git")) expect_error(parse_file_url("https://github.com/OWNER/REPO/commit/abcdefg")) expect_error(parse_file_url("https://github.com/OWNER/REPO/releases/tag/vx.y.z")) expect_error(parse_file_url("https://github.com/OWNER/REPO/tree/BRANCH")) expect_error(parse_file_url("https://gitlab.com/OWNER/REPO/path/to/file")) }) usethis/tests/testthat/test-use_import_from.R0000644000175000017500000000142714117743363021420 0ustar nileshnileshtest_that("use_import_from() imports the related package & adds line to package doc", { create_local_package() use_package_doc() use_import_from("tibble", "tibble") expect_equal(trimws(desc::desc_get("Imports", proj_get()))[[1]], "tibble") expect_equal(roxygen_ns_show(), "#' @importFrom tibble tibble") }) test_that("use_import_from() adds one line for each function", { create_local_package() use_package_doc() use_import_from("tibble", c("tibble", "enframe", "deframe")) expect_snapshot(roxygen_ns_show()) }) test_that("use_import_from() generates helpful errors", { create_local_package() use_package_doc() expect_snapshot(error = TRUE, { use_import_from(1) use_import_from(c("tibble", "rlang")) use_import_from("tibble", "pool_noodle") }) }) usethis/tests/testthat/test-pkgdown.R0000644000175000017500000000451714153502006017646 0ustar nileshnileshtest_that("use_pkgdown() requires a package", { create_local_project() expect_usethis_error(use_pkgdown(), "not an R package") }) test_that("use_pkgdown() creates and ignores the promised file/dir", { create_local_package() local_interactive(FALSE) with_mock( check_installed = function(pkg) TRUE, pkgdown_version = function() "1.9000", use_pkgdown() ) expect_true(uses_pkgdown()) expect_true(is_build_ignored("^_pkgdown\\.yml$")) expect_true(is_build_ignored("^docs$")) }) # pkgdown helpers ---- test_that("pkgdown helpers behave in the absence of pkgdown", { create_local_package() expect_null(pkgdown_config_path()) expect_false(uses_pkgdown()) expect_equal(pkgdown_config_meta(), list()) expect_null(pkgdown_url()) }) test_that("pkgdown_config_meta() returns a list", { create_local_package() local_interactive(FALSE) with_mock( check_installed = function(pkg) TRUE, pkgdown_version = function() "1.9000", use_pkgdown() ) expect_type(pkgdown_config_meta(), "list") writeLines(c("home:", " strip_header: true"), pkgdown_config_path()) expect_equal( pkgdown_config_meta(), list(home = list(strip_header = TRUE)) ) }) test_that("pkgdown_url() returns correct data, warns if pedantic", { create_local_package() local_interactive(FALSE) with_mock( check_installed = function(pkg) TRUE, pkgdown_version = function() "1.9000", use_pkgdown() ) # empty config expect_null(pkgdown_url()) expect_silent(pkgdown_url()) expect_warning(pkgdown_url(pedantic = TRUE), "url") # nonempty config, but no url writeLines(c("home:", " strip_header: true"), pkgdown_config_path()) expect_null(pkgdown_url()) expect_silent(pkgdown_url()) expect_warning(pkgdown_url(pedantic = TRUE), "url") # config has url writeLines("url: https://usethis.r-lib.org", pkgdown_config_path()) expect_equal(pkgdown_url(), "https://usethis.r-lib.org") # config has url with trailing slash writeLines("url: https://malcolmbarrett.github.io/tidysmd/", pkgdown_config_path()) expect_equal(pkgdown_url(), "https://malcolmbarrett.github.io/tidysmd/") }) test_that("tidyverse_url() leaves trailing slash alone, almost always", { url <- "https://malcolmbarrett.github.io/tidysmd/" out <- tidyverse_url(url, tr = list(repo_name = "REPO", repo_owner = "OWNER")) expect_equal(out, url) }) usethis/tests/testthat/test-directory.R0000644000175000017500000000144713737204645020217 0ustar nileshnileshtest_that("create_directory() doesn't bother a pre-existing target dir", { tmp <- file_temp() dir_create(tmp) expect_true(is_dir(tmp)) expect_error_free(create_directory(tmp)) expect_true(is_dir(tmp)) }) test_that("create_directory() creates a directory", { tmp <- file_temp("yes") create_directory(tmp) expect_true(is_dir(tmp)) }) # check_path_is_directory ------------------------------------------------- test_that("no false positive for trailing slash", { pwd <- sub("/$", "", getwd()) expect_error_free(check_path_is_directory(paste0(pwd, "/"))) }) test_that("symlink to directory is directory", { base <- dir_create(file_temp()) base_a <- dir_create(path(base, "a")) base_b <- link_create(base_a, path(base, "b")) expect_error_free(check_path_is_directory(base_b)) }) usethis/tests/testthat/test-github_token.R0000644000175000017500000000126114117743363020665 0ustar nileshnileshtest_that("code_hint_with_host() works", { expect_identical(code_hint_with_host("foo"), "foo()") expect_identical(code_hint_with_host("foo", arg_name = "arg"), "foo()") host_github <- "https://api.github.com" expect_identical(code_hint_with_host("foo", host = host_github), "foo()") expect_identical( code_hint_with_host("foo", host = host_github, arg_name = "arg"), "foo()" ) host_ghe <- "https://github.acme.com" expect_identical( code_hint_with_host("foo", host = host_ghe), 'foo("https://github.acme.com")' ) expect_identical( code_hint_with_host("foo", host = host_ghe, arg_name = "arg"), 'foo(arg = \"https://github.acme.com\")' ) }) usethis/tests/testthat/test-package.R0000644000175000017500000000323214131622147017566 0ustar nileshnileshtest_that("use_package() won't facilitate dependency on tidyverse/tidymodels", { create_local_package() expect_usethis_error(use_package("tidyverse"), "rarely a good idea") expect_usethis_error(use_package("tidymodels"), "rarely a good idea") }) test_that("use_package() guides new packages but not pre-existing ones", { create_local_package() withr::local_options(usethis.quiet = FALSE) expect_snapshot({ use_package("withr") use_package("withr") use_package("withr", "Suggests") }) }) # use_dev_package() ----------------------------------------------------------- test_that("use_dev_package() can override over default remote", { create_local_package() use_dev_package("usethis", remote = "github::r-lib/usethis") desc <- desc::desc(proj_path("DESCRIPTION")) expect_equal(desc$get_remotes(), "github::r-lib/usethis") }) test_that("package_remote() works for an installed package with github URL", { d <- desc::desc(text = c( "Package: test", "URL: https://github.com/OWNER/test" )) with_mock( ui_yeah = function(...) TRUE, expect_equal(package_remote(d), "OWNER/test") ) }) test_that("package_remote() works for package installed from github or gitlab", { d <- desc::desc(text = c( "Package: test", "RemoteUsername: OWNER", "RemoteRepo: test" )) d$set(RemoteType = "github") expect_equal(package_remote(d), "OWNER/test") d$set(RemoteType = "gitlab") expect_equal(package_remote(d), "gitlab::OWNER/test") }) test_that("package_remote() errors if no remote and no github URL", { d <- desc::desc(text = c("Package: test")) expect_usethis_error(package_remote(d), "Cannot determine remote") }) usethis/tests/testthat/test-badge.R0000644000175000017500000000152414132400710017226 0ustar nileshnileshtest_that("use_[cran|bioc]_badge() don't error", { create_local_package() expect_error_free(use_cran_badge()) expect_error_free(use_bioc_badge()) }) test_that("use_lifecycle_badge() handles bad and good input", { create_local_package() expect_snapshot(error = TRUE, { use_lifecycle_badge() use_lifecycle_badge("eperimental") }) expect_error_free(use_lifecycle_badge("stable")) }) test_that("use_binder_badge() needs a github repository", { skip_if_no_git_user() create_local_project() use_git() expect_error(use_binder_badge(), class = "usethis_error_bad_github_remote_config") }) test_that("use_badge() does nothing if badge seems to pre-exist", { create_local_package() href <- "https://cran.r-project.org/package=foo" writeLines(href, proj_path("README.md")) expect_false(use_badge("foo", href, "SRC")) }) usethis/tests/testthat/helper.R0000644000175000017500000001042014131645451016475 0ustar nileshnilesh## If session temp directory appears to be, or be within, a project, there ## will be large scale, spurious test failures. ## The IDE sometimes leaves .Rproj files behind in session temp directory or ## one of its parents. ## Delete such files manually. session_temp_proj <- proj_find(path_temp()) if (!is.null(session_temp_proj)) { Rproj_files <- fs::dir_ls(session_temp_proj, glob = "*.Rproj") ui_line(c( "Rproj file(s) found at or above session temp dir:", paste0("* ", Rproj_files), "Expect this to cause spurious test failures." )) } create_local_package <- function(dir = file_temp(pattern = "testpkg"), env = parent.frame(), rstudio = FALSE) { create_local_thing(dir, env, rstudio, "package") } create_local_project <- function(dir = file_temp(pattern = "testproj"), env = parent.frame(), rstudio = FALSE) { create_local_thing(dir, env, rstudio, "project") } create_local_thing <- function(dir = file_temp(pattern = pattern), env = parent.frame(), rstudio = FALSE, thing = c("package", "project")) { thing <- match.arg(thing) if (fs::dir_exists(dir)) { ui_stop("Target {ui_code('dir')} {ui_path(dir)} already exists.") } old_project <- proj_get_() # this could be `NULL`, i.e. no active project old_wd <- getwd() # not necessarily same as `old_project` withr::defer( { ui_done("Deleting temporary project: {ui_path(dir)}") fs::dir_delete(dir) }, envir = env ) ui_silence( switch( thing, package = create_package(dir, rstudio = rstudio, open = FALSE, check_name = FALSE), project = create_project(dir, rstudio = rstudio, open = FALSE) ) ) withr::defer(proj_set(old_project, force = TRUE), envir = env) proj_set(dir) withr::defer( { ui_done("Restoring original working directory: {ui_path(old_wd)}") setwd(old_wd) }, envir = env ) setwd(proj_get()) invisible(proj_get()) } scrub_testpkg <- function(message) { gsub("testpkg[a-zA-Z0-9]+", "{TESTPKG}", message, perl = TRUE) } toggle_rlang_interactive <- function() { # TODO: consider setting options(rlang_backtrace_on_error = "reminder") when # in non-interactive mode, to suppress full backtraces before <- getOption("rlang_interactive") after <- if (identical(before, FALSE)) TRUE else FALSE options(rlang_interactive = after) ui_line(glue::glue("rlang_interactive: {before %||% ''} --> {after}")) invisible() } skip_if_not_ci <- function() { ci_providers <- c("GITHUB_ACTIONS", "TRAVIS", "APPVEYOR") ci <- any(toupper(Sys.getenv(ci_providers)) == "TRUE") if (ci) { return(invisible(TRUE)) } skip("Not on GitHub Actions, Travis, or Appveyor") } skip_if_no_git_user <- function() { user_name <- git_cfg_get("user.name") user_email <- git_cfg_get("user.email") user_name_exists <- !is.null(user_name) user_email_exists <- !is.null(user_email) if (user_name_exists && user_email_exists) { return(invisible(TRUE)) } skip("No Git user configured") } # CRAN's mac builder sets $HOME to a read-only ram disk, so tests can fail if # you even tickle something that might try to lock its own config file during # the operation (e.g. git) or if you simply test for writeability skip_on_cran_macos <- function() { sysname <- tolower(Sys.info()[["sysname"]]) on_cran <- !identical(Sys.getenv("NOT_CRAN"), "true") if (on_cran && sysname == "darwin") { skip("On CRAN and on macOS") } invisible(TRUE) } with_mock <- function(..., .parent = parent.frame()) { mockr::with_mock(..., .parent = .parent, .env = "usethis") } expect_usethis_error <- function(...) { expect_error(..., class = "usethis_error") } expect_error_free <- function(...) { expect_error(..., regexp = NA) } is_build_ignored <- function(pattern, ..., base_path = proj_get()) { lines <- read_utf8(path(base_path, ".Rbuildignore")) length(grep(pattern, x = lines, fixed = TRUE, ...)) > 0 } test_file <- function(fname) testthat::test_path("ref", fname) expect_proj_file <- function(...) expect_true(file_exists(proj_path(...))) expect_proj_dir <- function(...) expect_true(dir_exists(proj_path(...))) usethis/tests/testthat/test-pipe.R0000644000175000017500000000130214131622147017124 0ustar nileshnileshtest_that("use_pipe() requires a package", { create_local_project() expect_usethis_error(use_pipe(), "not an R package") }) test_that("use_pipe(export = TRUE) adds promised file, Imports magrittr", { create_local_package() use_pipe(export = TRUE) expect_match(desc::desc_get("Imports", proj_get()), "magrittr") expect_proj_file("R", "utils-pipe.R") }) test_that("use_pipe(export = FALSE) adds roxygen to package doc", { create_local_package() use_package_doc() use_pipe(export = FALSE) expect_match(desc::desc_get("Imports", proj_get()), "magrittr") package_doc <- read_utf8(proj_path(package_doc_path())) expect_match(package_doc, "#' @importFrom magrittr %>%", all = FALSE) }) usethis/tests/testthat/test-documentation.R0000644000175000017500000000057313764577255021075 0ustar nileshnileshtest_that("use_package_doc() requires a package", { create_local_project() expect_false(has_package_doc()) expect_usethis_error(use_package_doc(), "not an R package") }) test_that("use_package_doc() creates the promised file", { create_local_package() use_package_doc() expect_proj_file("R", paste0(project_name(), "-package.R")) expect_true(has_package_doc()) }) usethis/tests/testthat/test-testthat.R0000644000175000017500000000043314117743363020043 0ustar nileshnileshtest_that("use_testhat() sets up infrastructure", { pkg <- create_local_package() use_testthat() expect_match(desc::desc_get("Suggests", proj_get()), "testthat") expect_proj_dir("tests", "testthat") expect_proj_file("tests", "testthat.R") expect_true(uses_testthat()) }) usethis/tests/testthat/test-tidyverse.R0000644000175000017500000000560614140054263020216 0ustar nileshnileshtest_that("use_tidy_description() alphabetises dependencies and remotes", { pkg <- create_local_package() use_package("usethis") use_package("desc") use_package("withr", "Suggests") use_package("gh", "Suggests") desc::desc_set_remotes(c("r-lib/styler", "jimhester/lintr")) use_tidy_description() desc <- read_utf8(proj_path("DESCRIPTION")) expect_gt(grep("usethis", desc), grep("desc", desc)) expect_gt(grep("withr", desc), grep("gh", desc)) expect_gt(grep("r\\-lib\\/styler", desc), grep("jimhester\\/lintr", desc)) }) test_that("use_tidy_dependencies() isn't overly informative", { skip_on_cran() skip_if_offline("github.com") create_local_package(fs::path_temp("tidydeps")) use_package_doc() withr::local_options(usethis.quiet = FALSE) expect_snapshot(use_tidy_dependencies()) }) test_that("use_tidy_eval() inserts the template file and Imports rlang", { skip_if_not_installed("roxygen2") pkg <- create_local_package() use_tidy_eval() expect_match(dir_ls(proj_path("R")), "utils-tidy-eval.R") expect_match(desc::desc_get("Imports", pkg), "rlang") }) test_that("use_tidy_GITHUB-STUFF() adds and Rbuildignores files", { local_interactive(FALSE) create_local_package() use_git() with_mock( target_repo_spec = function(...) "OWNER/REPO", { use_tidy_contributing() use_tidy_support() } ) use_tidy_issue_template() use_tidy_coc() expect_proj_file(".github/CONTRIBUTING.md") expect_proj_file(".github/ISSUE_TEMPLATE/issue_template.md") expect_proj_file(".github/SUPPORT.md") expect_proj_file(".github/CODE_OF_CONDUCT.md") expect_true(is_build_ignored("^\\.github$")) }) test_that("use_tidy_github() adds and Rbuildignores files", { local_interactive(FALSE) create_local_package() use_git() with_mock( target_repo_spec = function(...) "OWNER/REPO", use_tidy_github() ) expect_proj_file(".github/CONTRIBUTING.md") expect_proj_file(".github/ISSUE_TEMPLATE/issue_template.md") expect_proj_file(".github/SUPPORT.md") expect_proj_file(".github/CODE_OF_CONDUCT.md") expect_true(is_build_ignored("^\\.github$")) }) test_that("styling the package works", { skip_if(getRversion() < 3.2) skip_if_no_git_user() skip_if_not_installed("styler") pkg <- create_local_package() use_r("bad_style") path_to_bad_style <- proj_path("R/bad_style.R") write_utf8(path_to_bad_style, "a++2\n") capture_output(use_tidy_style()) expect_identical(read_utf8(path_to_bad_style), "a + +2") file_delete(path_to_bad_style) }) test_that("styling of non-packages works", { skip_if(getRversion() < 3.2) skip_if_no_git_user() skip_if_not_installed("styler") proj <- create_local_project() path_to_bad_style <- proj_path("R/bad_style.R") use_r("bad_style") write_utf8(path_to_bad_style, "a++22\n") capture_output(use_tidy_style()) expect_identical(read_utf8(path_to_bad_style), "a + +22") file_delete(path_to_bad_style) }) usethis/tests/testthat/test-rstudio.R0000644000175000017500000000531714117743363017702 0ustar nileshnileshtest_that("use_rstudio() creates .Rproj file, named after directory", { dir <- create_local_package(rstudio = FALSE) use_rstudio() rproj <- path_file(dir_ls(proj_get(), regexp = "[.]Rproj$")) expect_identical(path_ext_remove(rproj), path_file(dir)) # Always uses POSIX line endings expect_equal(proj_line_ending(), "\n") }) test_that("use_rstudio() omits package-related config for a project", { create_local_project(rstudio = FALSE) use_rstudio() out <- readLines(rproj_path()) expect_true(is.na(match("BuildType: Package", out))) }) test_that("a non-RStudio project is not recognized", { create_local_package(rstudio = FALSE) expect_false(is_rstudio_project()) expect_identical(rproj_path(), NA_character_) }) test_that("an RStudio project is recognized", { create_local_package(rstudio = TRUE) expect_true(is_rstudio_project()) expect_match(rproj_path(), "\\.Rproj$") }) test_that("we error for multiple Rproj files", { create_local_package(rstudio = TRUE) file_copy( rproj_path(), proj_path("copy.Rproj") ) expect_usethis_error(rproj_path(), "Multiple .Rproj files found", fixed = TRUE) }) test_that("Rproj is parsed (actually, only colon-containing lines)", { tmp <- withr::local_tempfile() writeLines(c("a: a", "", "b: b", "I have no colon"), tmp) expect_identical( parse_rproj(tmp), list(a = "a", "", b = "b", "I have no colon") ) }) test_that("Existing field(s) in Rproj can be modified", { tmp <- withr::local_tempfile() writeLines( c( "Version: 1.0", "", "RestoreWorkspace: Default", "SaveWorkspace: Yes", "AlwaysSaveHistory: Default" ), tmp ) before <- parse_rproj(tmp) delta <- list(RestoreWorkspace = "No", SaveWorkspace = "No") after <- modify_rproj(tmp, delta) expect_identical(before[c(1, 2, 5)], after[c(1, 2, 5)]) expect_identical(after[3:4], delta) }) test_that("we can roundtrip an Rproj file", { create_local_package(rstudio = TRUE) rproj_file <- rproj_path() before <- read_utf8(rproj_file) rproj <- modify_rproj(rproj_file, list()) writeLines(serialize_rproj(rproj), rproj_file) after <- read_utf8(rproj_file) expect_identical(before, after) }) test_that("use_blank_state('project') modifies Rproj", { create_local_package(rstudio = TRUE) use_blank_slate("project") rproj <- parse_rproj(rproj_path()) expect_equal(rproj$RestoreWorkspace, "No") expect_equal(rproj$SaveWorkspace, "No") }) test_that("use_blank_state() modifies user-level RStudio prefs", { path <- withr::local_tempdir() withr::local_envvar(c("XDG_CONFIG_HOME" = path)) use_blank_slate() prefs <- rstudio_prefs_read() expect_equal(prefs[["save_workspace"]], "never") expect_false(prefs[["load_workspace"]]) }) usethis/tests/testthat/test-addin.R0000644000175000017500000000066414131622147017260 0ustar nileshnileshtest_that("use_addin() creates the first addins.dcf as promised", { create_local_package() use_addin("addin.test") addin_dcf <- read_utf8(proj_path("inst", "rstudio", "addins.dcf")) expected_file <- path_package("usethis", "templates", "addins.dcf") addin_dcf_expected <- read_utf8(expected_file) addin_dcf_expected[3] <- "Binding: addin.test" addin_dcf_expected[5] <- "" expect_equal(addin_dcf, addin_dcf_expected) }) usethis/tests/testthat/test-release.R0000644000175000017500000000742314132445166017626 0ustar nileshnilesh # release bullets --------------------------------------------------------- test_that("release bullets don't change accidentally", { create_local_package() # First release expect_snapshot( writeLines(release_checklist("0.1.0", on_cran = FALSE)), transform = scrub_testpkg ) # Patch release expect_snapshot( writeLines(release_checklist("0.0.1", on_cran = TRUE)), transform = scrub_testpkg ) # Major release expect_snapshot( writeLines(release_checklist("1.0.0", on_cran = TRUE)), transform = scrub_testpkg ) }) test_that("get extra news bullets if available", { env <- env(release_bullets = function() "Extra bullets") expect_equal(release_extra(env), "* [ ] Extra bullets") env <- env(release_questions = function() "Extra bullets") expect_equal(release_extra(env), "* [ ] Extra bullets") env <- env() expect_equal(release_extra(env), character()) }) test_that("RStudio-ness detection works", { create_local_package() expect_false(is_rstudio_funded()) expect_false(is_in_rstudio_org()) desc <- desc::desc(file = proj_get()) desc$add_author(given = "RStudio", role = "fnd") desc$add_urls("https://github.com/tidyverse/WHATEVER") desc$write() expect_true(is_rstudio_funded()) expect_true(is_in_rstudio_org()) expect_snapshot( writeLines(release_checklist("1.0.0", on_cran = TRUE)), transform = scrub_testpkg ) }) # news -------------------------------------------------------------------- test_that("must have at least one heading", { expect_error( news_latest(""), regexp = "No top-level headings", class = "usethis_error" ) }) test_that("trims blank lines when extracting bullets", { lines <- c( "# Heading", "", "Contents", "" ) expect_equal(news_latest(lines), "Contents\n") lines <- c( "# Heading", "", "Contents 1", "", "# Heading", "", "Contents 2" ) expect_equal(news_latest(lines), "Contents 1\n") }) test_that("returns empty string if no bullets", { lines <- c( "# Heading", "", "# Heading" ) expect_equal(news_latest(lines), "") }) # draft release ---------------------------------------------------------------- test_that("get_release_data() works if no file found", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") res <- get_release_data() expect_equal(res$Version, "0.0.0.9000") expect_match(res$SHA, "[[:xdigit:]]{40}") }) test_that("get_release_data() works for old-style CRAN-RELEASE", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") HEAD <- gert::git_info(repo = git_repo())$commit write_utf8( proj_path("CRAN-RELEASE"), glue(" This package was submitted to CRAN on YYYY-MM-DD. Once it is accepted, delete this file and tag the release (commit {HEAD}).") ) res <- get_release_data(tr = list(repo_spec = "OWNER/REPO")) expect_equal(res$Version, "0.0.0.9000") expect_equal(res$SHA, HEAD) expect_equal(path_file(res$file), "CRAN-RELEASE") }) test_that("get_release_data() works for new-style CRAN-RELEASE", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore") gert::git_commit("we need at least one commit") HEAD <- gert::git_info(repo = git_repo())$commit write_utf8( proj_path("CRAN-SUBMISSION"), glue(" Version: 1.2.3 Date: 2021-10-14 23:57:41 UTC SHA: {HEAD}") ) res <- get_release_data(tr = list(repo_spec = "OWNER/REPO")) expect_equal(res$Version, "1.2.3") expect_equal(res$SHA, HEAD) expect_equal(path_file(res$file), "CRAN-SUBMISSION") }) usethis/tests/testthat/ref/0000755000175000017500000000000014117743363015657 5ustar nileshnileshusethis/tests/testthat/ref/foo/0000755000175000017500000000000014154505162016434 5ustar nileshnileshusethis/tests/testthat/ref/foo/file.txt0000644000175000017500000000004613676400413020115 0ustar nileshnileshI am file.txt which lives inside foo/ usethis/tests/testthat/ref/README.md0000644000175000017500000001124114117743363017135 0ustar nileshnileshZIP file structures ================ ``` r devtools::load_all("~/rrr/usethis") #> ℹ Loading usethis #> x unloadNamespace("usethis") failed because another loaded package needs it #> ℹ Forcing unload. If you encounter problems, please restart R. library(fs) ``` ## Different styles of ZIP file Examples based on foo folder found here. ``` bash tree foo #> foo #> └── file.txt #> #> 0 directories, 1 file ``` ### Not Loose Parts, a.k.a. GitHub style This is the structure of ZIP files yielded by GitHub via links of the forms and . ``` bash zip -r foo-not-loose.zip foo/ ``` Notice that everything is packaged below one top-level directory. ``` r foo_not_loose_files <- unzip("foo-not-loose.zip", list = TRUE) with( foo_not_loose_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 foo/ . foo #> 2 foo/file.txt foo file.txt ``` ### Loose Parts, the Regular Way This is the structure of many ZIP files I’ve seen, just in general. ``` bash cd foo zip ../foo-loose-regular.zip * cd .. ``` All the files are packaged in the ZIP archive as “loose parts”, i.e. there is no explicit top-level directory. ``` r foo_loose_regular_files <- unzip("foo-loose-regular.zip", list = TRUE) with( foo_loose_regular_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 file.txt . file.txt ``` ### Loose Parts, the DropBox Way This is the structure of ZIP files yielded by DropBox via links of this form . I can’t figure out how to even do this with zip locally, so I had to create an example on DropBox and download it. Jim Hester reports it is possible with `archive::archive_write_files()`. It’s basically like the “loose parts” above, except it includes a spurious top-level directory `"/"`. ``` r # curl::curl_download( # "https://www.dropbox.com/sh/5qfvssimxf2ja58/AABz3zrpf-iPYgvQCgyjCVdKa?dl=1", # destfile = "foo-loose-dropbox.zip" # ) foo_loose_dropbox_files <- unzip("foo-loose-dropbox.zip", list = TRUE) with( foo_loose_dropbox_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) #> Name dirname basename #> 1 / / #> 2 file.txt . file.txt ``` Also note that, when unzipping with `unzip` in the shell, you get this result: Archive: foo-loose-dropbox.zip warning: stripped absolute path spec from / mapname: conversion of failed inflating: file.txt So this is a pretty odd ZIP packing strategy. But we need to plan for it. ## Subdirs only at top-level Let’s make sure we detect loose parts (or not) when the top-level has only directories, not files. Example based on the yo directory here: ``` bash tree yo #> yo #> ├── subdir1 #> │   └── file1.txt #> └── subdir2 #> └── file2.txt #> #> 2 directories, 2 files ``` ``` bash zip -r yo-not-loose.zip yo/ ``` ``` r (yo_not_loose_files <- unzip("yo-not-loose.zip", list = TRUE)) #> Name Length Date #> 1 yo/ 0 2018-01-11 15:48:00 #> 2 yo/subdir1/ 0 2018-01-11 15:48:00 #> 3 yo/subdir1/file1.txt 42 2018-01-11 15:48:00 #> 4 yo/subdir2/ 0 2018-01-11 15:49:00 #> 5 yo/subdir2/file2.txt 42 2018-01-11 15:49:00 top_directory(yo_not_loose_files$Name) #> [1] "yo/" ``` ``` bash cd yo zip -r ../yo-loose-regular.zip * cd .. ``` ``` r (yo_loose_regular_files <- unzip("yo-loose-regular.zip", list = TRUE)) #> Name Length Date #> 1 subdir1/ 0 2018-01-11 15:48:00 #> 2 subdir1/file1.txt 42 2018-01-11 15:48:00 #> 3 subdir2/ 0 2018-01-11 15:49:00 #> 4 subdir2/file2.txt 42 2018-01-11 15:49:00 top_directory(yo_loose_regular_files$Name) #> [1] NA ``` ``` r # curl::curl_download( # "https://www.dropbox.com/sh/afydxe6pkpz8v6m/AADHbMZAaW3IQ8zppH9mjNsga?dl=1", # destfile = "yo-loose-dropbox.zip" # ) (yo_loose_dropbox_files <- unzip("yo-loose-dropbox.zip", list = TRUE)) #> Name Length Date #> 1 / 0 2018-01-11 23:57:00 #> 2 subdir1/file1.txt 42 2018-01-11 23:57:00 #> 3 subdir2/file2.txt 42 2018-01-11 23:57:00 #> 4 subdir1/ 0 2018-01-11 23:57:00 #> 5 subdir2/ 0 2018-01-11 23:57:00 top_directory(yo_loose_dropbox_files$Name) #> [1] NA ``` usethis/tests/testthat/ref/yo-loose-regular.zip0000644000175000017500000000131613676400413021604 0ustar nileshnileshPK ~+Lsubdir1/UT WZJWZux PK ~+Lם=**subdir1/file1.txtUT WZWZux I am file1.txt, located below yo/subdir1/ PK !~+Lsubdir2/UT WZJWZux PK $~+Ld**subdir2/file2.txtUT WZWZux I am file2.txt, located below yo/subdir2/ PK ~+LAsubdir1/UTWZux PK ~+Lם=**Bsubdir1/file1.txtUTWZux PK !~+LAsubdir2/UTWZux PK $~+Ld**subdir2/file2.txtUTWZux PKJnusethis/tests/testthat/ref/foo-not-loose.zip0000644000175000017500000000053413676400413021100 0ustar nileshnileshPK Q+Lfoo/UT WZWZux PK Q+L&& foo/file.txtUT  WZWZux I am file.txt which lives inside foo/ PK Q+LAfoo/UTWZux PK Q+L&& >foo/file.txtUT WZux PKusethis/tests/testthat/ref/README.Rmd0000644000175000017500000000642414117743363017266 0ustar nileshnilesh--- title: "ZIP file structures" output: github_document --- ```{r setup, include=FALSE} knitr::opts_chunk$set(comment = "#>", collapse = TRUE) ``` ```{r} devtools::load_all("~/rrr/usethis") library(fs) ``` ## Different styles of ZIP file Examples based on foo folder found here. ```{bash} tree foo ``` ### Not Loose Parts, a.k.a. GitHub style This is the structure of ZIP files yielded by GitHub via links of the forms and . ```{bash, eval = FALSE} zip -r foo-not-loose.zip foo/ ``` Notice that everything is packaged below one top-level directory. ```{r} foo_not_loose_files <- unzip("foo-not-loose.zip", list = TRUE) with( foo_not_loose_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` ### Loose Parts, the Regular Way This is the structure of many ZIP files I've seen, just in general. ```{bash, eval = FALSE} cd foo zip ../foo-loose-regular.zip * cd .. ``` All the files are packaged in the ZIP archive as "loose parts", i.e. there is no explicit top-level directory. ```{r} foo_loose_regular_files <- unzip("foo-loose-regular.zip", list = TRUE) with( foo_loose_regular_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` ### Loose Parts, the DropBox Way This is the structure of ZIP files yielded by DropBox via links of this form . I can't figure out how to even do this with zip locally, so I had to create an example on DropBox and download it. Jim Hester reports it is possible with `archive::archive_write_files()`. It's basically like the "loose parts" above, except it includes a spurious top-level directory `"/"`. ```{r} # curl::curl_download( # "https://www.dropbox.com/sh/5qfvssimxf2ja58/AABz3zrpf-iPYgvQCgyjCVdKa?dl=1", # destfile = "foo-loose-dropbox.zip" # ) foo_loose_dropbox_files <- unzip("foo-loose-dropbox.zip", list = TRUE) with( foo_loose_dropbox_files, data.frame(Name = Name, dirname = path_dir(Name), basename = path_file(Name)) ) ``` Also note that, when unzipping with `unzip` in the shell, you get this result: ``` Archive: foo-loose-dropbox.zip warning: stripped absolute path spec from / mapname: conversion of failed inflating: file.txt ``` So this is a pretty odd ZIP packing strategy. But we need to plan for it. ## Subdirs only at top-level Let's make sure we detect loose parts (or not) when the top-level has only directories, not files. Example based on the yo directory here: ```{bash} tree yo ``` ```{bash, eval = FALSE} zip -r yo-not-loose.zip yo/ ``` ```{r} (yo_not_loose_files <- unzip("yo-not-loose.zip", list = TRUE)) top_directory(yo_not_loose_files$Name) ``` ```{bash, eval = FALSE} cd yo zip -r ../yo-loose-regular.zip * cd .. ``` ```{r} (yo_loose_regular_files <- unzip("yo-loose-regular.zip", list = TRUE)) top_directory(yo_loose_regular_files$Name) ``` ```{r} # curl::curl_download( # "https://www.dropbox.com/sh/afydxe6pkpz8v6m/AADHbMZAaW3IQ8zppH9mjNsga?dl=1", # destfile = "yo-loose-dropbox.zip" # ) (yo_loose_dropbox_files <- unzip("yo-loose-dropbox.zip", list = TRUE)) top_directory(yo_loose_dropbox_files$Name) ``` usethis/tests/testthat/ref/yo-loose-dropbox.zip0000644000175000017500000000124613676400413021622 0ustar nileshnileshPK/+L/PKPK0+Lsubdir1/file1.txtTHUHI5+(QON,IMQHJ/W/.MJ,2PKם=,*PK/+Lsubdir2/file2.txtTHUHI5+(QON,IMQHJ/W/.MJ,2PKd,*PK0+Lsubdir1/PKPK/+Lsubdir2/PKPK/+L/PK0+Lם=,* 1subdir1/file1.txtPK/+Ld,* subdir2/file2.txtPK0+Lsubdir1/PK/+L?subdir2/PKwusethis/tests/testthat/ref/foo-loose-regular.zip0000644000175000017500000000031413676400413021735 0ustar nileshnileshPK Q+L&&file.txtUT  WZ}WZux I am file.txt which lives inside foo/ PK Q+L&&file.txtUT WZux PKNhusethis/tests/testthat/ref/yo-not-loose.zip0000644000175000017500000000155413676400413020747 0ustar nileshnileshPK ~+Lyo/UT WZWZux PK ~+L yo/subdir1/UT WZWZux PK ~+Lם=**yo/subdir1/file1.txtUT WZWZux I am file1.txt, located below yo/subdir1/ PK !~+L yo/subdir2/UT WZWZux PK $~+Ld**yo/subdir2/file2.txtUT WZWZux I am file2.txt, located below yo/subdir2/ PK ~+LAyo/UTWZux PK ~+L A=yo/subdir1/UTWZux PK ~+Lם=**yo/subdir1/file1.txtUTWZux PK !~+L Ayo/subdir2/UTWZux PK $~+Ld**?yo/subdir2/file2.txtUTWZux PKusethis/tests/testthat/ref/foo-loose-dropbox.zip0000644000175000017500000000041213676400413021750 0ustar nileshnileshPK!/PKPKr+Lfile.txtTHUHI+(Q(LP,K-V+LIUHPK(&PK!/PKr+L(& 1file.txtPKeusethis/tests/testthat/ref/yo/0000755000175000017500000000000014154505162016300 5ustar nileshnileshusethis/tests/testthat/ref/yo/subdir1/0000755000175000017500000000000013676400413017652 5ustar nileshnileshusethis/tests/testthat/ref/yo/subdir1/file1.txt0000644000175000017500000000005213676400413021410 0ustar nileshnileshI am file1.txt, located below yo/subdir1/ usethis/tests/testthat/ref/yo/subdir2/0000755000175000017500000000000013676400413017653 5ustar nileshnileshusethis/tests/testthat/ref/yo/subdir2/file2.txt0000644000175000017500000000005213676400413021412 0ustar nileshnileshI am file2.txt, located below yo/subdir2/ usethis/tests/testthat/teardown.R0000644000175000017500000000003213706406716017045 0ustar nileshnileshoptions(pre_test_options) usethis/tests/testthat/test-proj.R0000644000175000017500000001460614131645451017157 0ustar nileshnileshtest_that("proj_set() errors on non-existent path", { expect_usethis_error( proj_set("abcedefgihklmnopqrstuv"), "does not exist" ) }) test_that("proj_set() errors if no criteria are fulfilled", { tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project") expect_usethis_error( proj_set(tmpdir), "does not appear to be inside a project or package" ) }) test_that("proj_set() can be forced, even if no criteria are fulfilled", { tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project") expect_error_free(old <- proj_set(tmpdir, force = TRUE)) withr::defer(proj_set(old)) expect_identical(proj_get(), proj_path_prep(tmpdir)) }) test_that("is_package() detects package-hood", { create_local_package() expect_true(is_package()) create_local_project() expect_false(is_package()) }) test_that("check_is_package() errors for non-package", { create_local_project() expect_usethis_error(check_is_package(), "not an R package") }) test_that("check_is_package() can reveal who's asking", { create_local_project() expect_usethis_error(check_is_package("foo"), "foo") }) test_that("proj_path() appends to the project path", { create_local_project() expect_equal( proj_path("a", "b", "c"), path(proj_get(), "a/b/c") ) expect_identical(proj_path("a", "b", "c"), proj_path("a/b/c")) }) test_that("proj_rel_path() returns path part below the project", { create_local_project() expect_equal(proj_rel_path(proj_path("a/b/c")), "a/b/c") }) test_that("proj_rel_path() returns path 'as is' if not in project", { create_local_project() expect_identical(proj_rel_path(path_temp()), path_temp()) }) test_that("proj_set() enforces proj path preparation policy", { # specifically: check that proj_get() returns realized path t <- withr::local_tempdir("proj-set-path-prep") # a/b/d and a/b2/d identify same directory a <- path_real(dir_create(path(t, "a"))) b <- dir_create(path(a, "b")) b2 <- link_create(b, path(a, "b2")) d <- dir_create(path(b, "d")) # input path includes symbolic link path_with_symlinks <- path(b2, "d") expect_equal(path_rel(path_with_symlinks, a), path("b2/d")) # force = TRUE local_project(path_with_symlinks, force = TRUE) expect_equal(path_rel(proj_get(), a), path("b/d")) # force = FALSE file_create(path(b, "d", ".here")) proj_set(path_with_symlinks, force = FALSE) expect_equal(path_rel(proj_get(), a), path("b/d")) }) test_that("proj_path_prep() passes NULL through", { expect_null(proj_path_prep(NULL)) }) test_that("is_in_proj() detects whether files are (or would be) in project", { create_local_package() ## file does not exist but would be in project if created expect_true(is_in_proj(proj_path("fiction"))) ## file exists in project expect_true(is_in_proj(proj_path("DESCRIPTION"))) ## file does not exist and would not be in project if created expect_false(is_in_proj(file_temp())) ## file exists and is not in project expect_false(is_in_proj(path_temp())) }) test_that("is_in_proj() does not activate a project", { pkg <- create_local_package() path <- proj_path("DESCRIPTION") expect_true(is_in_proj(path)) local_project(NULL) expect_false(is_in_proj(path)) expect_false(proj_active()) }) test_that("proj_sitrep() reports current working/project state", { pkg <- create_local_package() x <- proj_sitrep() expect_s3_class(x, "sitrep") expect_false(is.null(x[["working_directory"]])) expect_identical( fs::path_file(pkg), fs::path_file(x[["active_usethis_proj"]]) ) }) test_that("with_project() runs code in temp proj, restores (lack of) proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) temp_proj <- create_project( file_temp(pattern = "TEMPPROJ"), rstudio = FALSE, open = FALSE ) proj_set_(NULL) expect_identical(proj_get_(), NULL) res <- with_project(path = temp_proj, proj_get_()) expect_identical(res, temp_proj) expect_identical(proj_get_(), NULL) }) test_that("with_project() runs code in temp proj, restores original proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) host <- create_project( file_temp(pattern = "host"), rstudio = FALSE, open = FALSE ) guest <- create_project( file_temp(pattern = "guest"), rstudio = FALSE, open = FALSE ) proj_set(host) expect_identical(proj_get_(), host) res <- with_project(path = guest, proj_get_()) expect_identical(res, guest) expect_identical(proj_get(), host) }) test_that("with_project() works when temp proj == original proj", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) host <- create_project( file_temp(pattern = "host"), rstudio = FALSE, open = FALSE ) proj_set(host) expect_identical(proj_get_(), host) res <- with_project(path = host, proj_get_()) expect_identical(res, host) expect_identical(proj_get(), host) }) test_that("local_project() activates proj til scope ends", { old_project <- proj_get_() withr::defer(proj_set_(old_project)) new_proj <- file_temp(pattern = "localprojtest") create_project(new_proj, rstudio = FALSE, open = FALSE) proj_set_(NULL) foo <- function() { local_project(new_proj) proj_sitrep() } res <- foo() expect_identical( res[["active_usethis_proj"]], as.character(proj_path_prep(new_proj)) ) expect_null(proj_get_()) }) # https://github.com/r-lib/usethis/issues/954 test_that("proj_activate() works with relative path when RStudio is not detected", { sandbox <- path_real(dir_create(file_temp("sandbox"))) withr::defer(dir_delete(sandbox)) orig_proj <- proj_get_() withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) rel_path_proj <- path_file(file_temp(pattern = "mno")) out_path <- create_project(rel_path_proj, rstudio = FALSE, open = FALSE) with_mock( # make sure we act as if not in RStudio rstudio_available = function(...) FALSE, expect_error_free( result <- proj_activate(rel_path_proj) ) ) expect_true(result) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) }) # https://github.com/r-lib/usethis/issues/1498 test_that("local_project()'s `quiet` argument works", { temp_proj <- create_project( file_temp(pattern = "TEMPPROJ"), rstudio = FALSE, open = FALSE ) withr::defer(dir_delete(temp_proj)) local_project(path = temp_proj, quiet = TRUE, force = TRUE, setwd = FALSE) expect_true(getOption("usethis.quiet")) }) usethis/tests/testthat/test-cran.R0000644000175000017500000000055514117743363017133 0ustar nileshnileshtest_that("use_cran_comments() requires a package", { create_local_project() expect_usethis_error(use_cran_comments(), "not an R package") }) test_that("use_cran_comments() creates and ignores the promised file", { create_local_package() use_cran_comments() expect_proj_file("cran-comments.md") expect_true(is_build_ignored("^cran-comments\\.md$")) }) usethis/tests/testthat/test-helpers.R0000644000175000017500000001077214132400710017633 0ustar nileshnileshtest_that("valid_package_name() enforces valid package names", { # Contain only ASCII letters, numbers, and '.' # Have at least two characters # Start with a letter # Not end with '.' expect_true(valid_package_name("aa")) expect_true(valid_package_name("a7")) expect_true(valid_package_name("a.2")) expect_false(valid_package_name("a")) expect_false(valid_package_name("a-2")) expect_false(valid_package_name("2fa")) expect_false(valid_package_name(".fa")) expect_false(valid_package_name("aa\u00C0")) # \u00C0 is a-grave expect_false(valid_package_name("a3.")) }) test_that("valid_file_name() enforces valid file names", { # Contain only ASCII letters, numbers, '-', and '_' expect_true(valid_file_name("aa.R")) expect_true(valid_file_name("a7.R")) expect_true(valid_file_name("a-2.R")) expect_true(valid_file_name("a_2.R")) expect_false(valid_file_name("aa\u00C0.R")) # \u00C0 is a-grave expect_false(valid_file_name("a?3.R")) }) # use_dependency ---------------------------------------------------------- test_that("we message for new type and are silent for same type", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message( use_dependency("crayon", "Imports"), "Adding 'crayon' to Imports field" ) expect_silent(use_dependency("crayon", "Imports")) }) test_that("we message for version change and are silent for same version", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message( use_dependency("crayon", "Imports"), "Adding 'crayon" ) expect_message( use_dependency("crayon", "Imports", min_version = "1.0.0"), "Increasing 'crayon'" ) expect_silent(use_dependency("crayon", "Imports", min_version = "1.0.0")) expect_message( use_dependency("crayon", "Imports", min_version = "2.0.0"), "Increasing 'crayon'" ) expect_silent(use_dependency("crayon", "Imports", min_version = "1.0.0")) }) ## https://github.com/r-lib/usethis/issues/99 test_that("use_dependency() upgrades a dependency", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message(use_dependency("usethis", "Suggests")) expect_match(desc::desc_get("Suggests", proj_get()), "usethis") expect_message(use_dependency("usethis", "Imports"), "Moving 'usethis'") expect_match(desc::desc_get("Imports", proj_get()), "usethis") expect_false(grepl("usethis", desc::desc_get("Suggests", proj_get()))) }) ## https://github.com/r-lib/usethis/issues/99 test_that("use_dependency() declines to downgrade a dependency", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message(use_dependency("usethis", "Imports")) expect_match(desc::desc_get("Imports", proj_get()), "usethis") expect_warning(use_dependency("usethis", "Suggests"), "no change") expect_match(desc::desc_get("Imports", proj_get()), "usethis") expect_false(grepl("usethis", desc::desc_get("Suggests", proj_get()))) }) test_that("can add LinkingTo dependency if other dependency already exists", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message(use_dependency("Rcpp", "Imports"), "Adding 'Rcpp'") expect_message(use_dependency("Rcpp", "LinkingTo"), "Adding 'Rcpp'") expect_message(use_dependency("Rcpp", "LinkingTo"), "Adding 'Rcpp'") expect_message(use_dependency("Rcpp", "Import"), "Adding 'Rcpp'") }) # use_system_requirement ------------------------------------------------ test_that("we message for new requirements and are silent for existing requirements", { create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message( use_system_requirement("C++11"), "Adding 'C++11' to SystemRequirements field in DESCRIPTION", fixed = TRUE ) expect_silent(use_system_requirement("C++11")) }) test_that("we can add multiple requirements with repeated calls", { pkg <- create_local_package() withr::local_options(list(usethis.quiet = FALSE, crayon.enabled = FALSE)) expect_message( use_system_requirement("C++11"), "Adding 'C++11' to SystemRequirements field in DESCRIPTION", fixed = TRUE ) expect_message( use_system_requirement("libxml2"), "Adding 'libxml2' to SystemRequirements field in DESCRIPTION", fixed = TRUE ) expect_equal( unname(desc::desc_get("SystemRequirements", pkg)), "C++11, libxml2" ) }) usethis/tests/testthat/test-ci.R0000644000175000017500000000223514117743363016600 0ustar nileshnileshtest_that("use_circleci() configures CircleCI", { skip_if_no_git_user() local_interactive(FALSE) create_local_package() use_git() with_mock( target_repo_spec = function(...) "OWNER/REPO", use_circleci(browse = FALSE) ) expect_true(is_build_ignored("^\\.circleci$")) expect_proj_dir(".circleci") expect_proj_file(".circleci/config.yml") yml <- yaml::yaml.load_file(proj_path(".circleci", "config.yml")) expect_identical( yml$jobs$build$steps[[7]]$store_artifacts$path, paste0(project_name(), ".Rcheck/") ) # use_circleci() properly formats keys for cache expect_identical( yml$jobs$build$steps[[1]]$restore_cache$keys, c("r-pkg-cache-{{ arch }}-{{ .Branch }}", "r-pkg-cache-{{ arch }}-") ) expect_identical( yml$jobs$build$steps[[8]]$save_cache$key, "r-pkg-cache-{{ arch }}-{{ .Branch }}" ) dir_delete(proj_path(".circleci")) docker <- "rocker/r-ver:3.5.3" with_mock( target_repo_spec = function(...) "OWNER/REPO", use_circleci(browse = FALSE, image = docker) ) yml <- yaml::yaml.load_file(proj_path(".circleci", "config.yml")) expect_identical(yml$jobs$build$docker[[1]]$image, docker) }) usethis/tests/testthat/test-make.R0000644000175000017500000000021414117743363017115 0ustar nileshnileshtest_that("use_make() creates a Makefile at project root", { pkg <- create_local_package() use_make() expect_proj_file("Makefile") }) usethis/tests/testthat/test-r.R0000644000175000017500000000367014117743363016452 0ustar nileshnileshtest_that("use_r() creates a .R file below R/", { create_local_package() use_r("foo") expect_proj_file("R/foo.R") }) test_that("use_test() creates a test file", { create_local_package() use_test("foo", open = FALSE) expect_proj_file("tests", "testthat", "test-foo.R") }) test_that("can use use_test() in a project", { create_local_project() expect_error(use_test("foofy"), NA) }) # rename_files ------------------------------------------------------------ test_that("renames R and test and snapshot files", { create_local_package() git_init() use_r("foo", open = FALSE) rename_files("foo", "bar") expect_proj_file("R/bar.R") use_test("foo", open = FALSE) rename_files("foo", "bar") expect_proj_file("tests/testthat/test-bar.R") dir_create(proj_path("tests", "testthat", "_snaps")) write_utf8(proj_path("tests", "testthat", "_snaps", "foo.md"), "abc") rename_files("foo", "bar") expect_proj_file("tests/testthat/_snaps/bar.md") }) test_that("strips context from test file", { create_local_package() git_init() use_testthat() write_utf8( proj_path("tests", "testthat", "test-foo.R"), c( "context('bar')", "", "a <- 1" ) ) rename_files("foo", "bar") lines <- read_utf8(proj_path("tests", "testthat", "test-bar.R")) expect_equal(lines, "a <- 1") }) test_that("rename paths in test file", { create_local_package() git_init() use_testthat() write_utf8(proj_path("tests", "testthat", "test-foo.txt"), "10") write_utf8(proj_path("tests", "testthat", "test-foo.R"), "test-foo.txt") rename_files("foo", "bar") expect_proj_file("tests/testthat/test-bar.txt") lines <- read_utf8(proj_path("tests", "testthat", "test-bar.R")) expect_equal(lines, "test-bar.txt") }) # helpers ----------------------------------------------------------------- test_that("check_file_name() requires single string", { expect_usethis_error(check_file_name(c("a", "b")), "single string") }) usethis/tests/testthat/test-course.R0000644000175000017500000002043214153502006017467 0ustar nileshnilesh## download_url ---- test_that("download_url() retry logic works as advertised", { faux_download <- function(n_failures) { i <- 0 function(url, destfile, quiet, mode, handle) { i <<- i + 1 if (i <= n_failures) simpleError(paste0("try ", i)) else "success" } } withr::local_options(list(usethis.quiet = FALSE)) # succeed on first try with_mock( try_download = faux_download(0), expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) ) expect_s3_class(out, "curl_handle") # fail, then succeed with_mock( try_download = faux_download(1), expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) ) expect_s3_class(out, "curl_handle") # fail, fail, then succeed (default n_tries = 3, so should allow) with_mock( try_download = faux_download(2), expect_snapshot(out <- download_url(url = "URL", destfile = "destfile")) ) expect_s3_class(out, "curl_handle") # fail, fail, fail (exceed n_failures > n_tries = 3) with_mock( try_download = faux_download(5), expect_snapshot( out <- download_url(url = "URL", destfile = "destfile", n_tries = 3), error = TRUE ) ) # fail, fail, fail, succeed (make sure n_tries is adjustable) with_mock( try_download = faux_download(3), expect_snapshot(out <- download_url(url = "URL", destfile = "destfile", n_tries = 10)) ) expect_s3_class(out, "curl_handle") }) ## tidy_download ---- test_that("tidy_download() errors early if destdir is not a directory", { tmp <- fs::path_temp("I_am_just_a_file") withr::defer(fs::file_delete(tmp)) expect_usethis_error(tidy_download("URL", destdir = tmp), "does not exist") fs::file_create(tmp) expect_usethis_error(tidy_download("URL", destdir = tmp), "not a directory") }) test_that("tidy_download() works", { skip_on_cran() skip_if_offline("github.com") tmp <- withr::local_tempdir(pattern = "tidy-download-test-") gh_url <- "https://github.com/r-lib/rematch2/archive/main.zip" expected <- fs::path(tmp, "rematch2-main.zip") capture.output( out <- tidy_download(gh_url, destdir = tmp) ) expect_true(fs::file_exists(expected)) expect_identical(out, expected, ignore_attr = TRUE) expect_identical(attr(out, "content-type"), "application/zip") # refuse to overwrite when non-interactive expect_error(capture.output( tidy_download(gh_url, destdir = tmp) )) }) ## tidy_unzip ---- test_that("tidy_unzip() deals with loose parts, reports unpack destination", { tmp <- file_temp(ext = ".zip") fs::file_copy(test_file("yo-loose-regular.zip"), tmp) dest <- tidy_unzip(tmp) loose_regular_files <- fs::path_file(fs::dir_ls(dest, recurse = TRUE)) fs::dir_delete(dest) tmp <- file_temp(ext = ".zip") fs::file_copy(test_file("yo-loose-dropbox.zip"), tmp) dest <- tidy_unzip(tmp) loose_dropbox_files <- fs::path_file(fs::dir_ls(dest, recurse = TRUE)) fs::dir_delete(dest) tmp <- file_temp(ext = ".zip") fs::file_copy(test_file("yo-not-loose.zip"), tmp) dest <- tidy_unzip(tmp) not_loose_files <- fs::path_file(fs::dir_ls(dest, recurse = TRUE)) fs::dir_delete(dest) expect_identical(loose_regular_files, loose_dropbox_files) expect_identical(loose_dropbox_files, not_loose_files) }) ## helpers ---- test_that("create_download_url() works", { expect_equal( create_download_url("https://rstudio.com"), "https://rstudio.com" ) expect_equal( create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz"), "https://drive.google.com/uc?export=download&id=123456789xxyyyzzz" ) expect_equal( create_download_url( "https://drive.google.com/file/d/123456789xxxyyyzzz/view" ), "https://drive.google.com/uc?export=download&id=123456789xxxyyyzzz" ) expect_equal( create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0"), "https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1" ) # GitHub usethis_url <- "https://github.com/r-lib/usethis/zipball/HEAD" expect_equal( create_download_url("https://github.com/r-lib/usethis"), usethis_url ) expect_equal( create_download_url("https://github.com/r-lib/usethis/issues"), usethis_url ) expect_equal( create_download_url("https://github.com/r-lib/usethis#readme"), usethis_url ) }) test_that("normalize_url() prepends https:// (or not)", { expect_error(normalize_url(1), "is\\.character.*not TRUE") expect_identical(normalize_url("http://bit.ly/abc"), "http://bit.ly/abc") expect_identical(normalize_url("bit.ly/abc"), "https://bit.ly/abc") expect_identical( normalize_url("https://github.com/r-lib/rematch2/archive/main.zip"), "https://github.com/r-lib/rematch2/archive/main.zip" ) expect_identical( normalize_url("https://rstd.io/usethis-src"), "https://rstd.io/usethis-src" ) expect_identical( normalize_url("rstd.io/usethis-src"), "https://rstd.io/usethis-src" ) }) test_that("shortlinks pass through", { url1 <- "bit.ly/usethis-shortlink-example" url2 <- "rstd.io/usethis-shortlink-example" expect_equal(normalize_url(url1), paste0("https://", url1)) expect_equal(normalize_url(url2), paste0("https://", url2)) expect_equal(normalize_url(paste0("https://", url1)), paste0("https://", url1)) expect_equal(normalize_url(paste0("http://", url1)), paste0("http://", url1)) }) test_that("github links get expanded", { expect_equal( normalize_url("OWNER/REPO"), "https://github.com/OWNER/REPO/zipball/HEAD" ) }) test_that("conspicuous_place() returns a writeable directory", { skip_on_cran_macos() # even $HOME is not writeable on CRAN macOS builder expect_error_free(x <- conspicuous_place()) expect_true(is_dir(x)) expect_true(file_access(x, mode = "write")) }) test_that("conspicuous_place() uses `usethis.destdir` when set", { destdir <- withr::local_tempdir(pattern = "destdir_temp") withr::local_options(list(usethis.destdir = destdir)) expect_error_free(x <- conspicuous_place()) expect_equal(path_tidy(destdir), x) }) test_that("use_course() errors if MIME type is not 'application/zip'", { skip_on_cran() skip_if_offline() path <- withr::local_tempdir() expect_usethis_error( use_course("https://httpbin.org/get", destdir = path), "does not have MIME type" ) }) test_that("parse_content_disposition() parses Content-Description", { ## typical DropBox expect_identical( parse_content_disposition( "attachment; filename=\"foo.zip\"; filename*=UTF-8''foo.zip\"" ), c( "filename" = "\"foo.zip\"", "filename*" = "UTF-8''foo.zip\"" ) ) ## typical GitHub expect_identical( parse_content_disposition("attachment; filename=foo-main.zip"), c("filename" = "foo-main.zip") ) }) test_that("parse_content_disposition() errors on ill-formed `content-disposition` header", { expect_usethis_error( parse_content_disposition("aa;bb=cc;dd"), "doesn't start with" ) }) test_that("make_filename() gets name from `content-disposition` header", { ## DropBox expect_identical( make_filename( c( "filename" = "\"usethis-test.zip\"", "filename*" = "UTF-8''usethis-test.zip\"" ) ), "usethis-test.zip" ) ## GitHub expect_identical( make_filename(c("filename" = "buzzy-main.zip")), "buzzy-main.zip" ) }) test_that("make_filename() uses fallback if no `content-disposition` header", { expect_match(make_filename(NULL), "^file[0-9a-z]+$") }) test_that("keep_lgl() keeps and drops correct files", { keepers <- c("foo", ".gitignore", "a/.gitignore", "foo.Rproj", ".here") expect_true(all(keep_lgl(keepers))) droppers <- c( ".git", "/.git", "/.git/", ".git/", "foo/.git", ".git/config", ".git/objects/06/3d3gysle", ".Rproj.user", ".Rproj.user/123jkl/persistent-state", ".Rhistory", ".RData" ) expect_false(any(keep_lgl(droppers))) }) test_that("top_directory() identifies a unique top directory (or not)", { ## there is >= 1 file at top-level or >1 directories expect_identical(top_directory("a"), NA_character_) expect_identical(top_directory(c("a/", "b")), NA_character_) expect_identical(top_directory(c("a/", "b/")), NA_character_) ## there are no files at top-level and exactly 1 directory expect_identical(top_directory("a/"), "a/") expect_identical(top_directory(c("a/", "a/b")), "a/") expect_identical(top_directory(c("a/", "a/b", "a/c")), "a/") }) usethis/tests/testthat/test-template.R0000644000175000017500000000157614117743363020027 0ustar nileshnileshtest_that("can leave existing file unchanged, without an error", { create_local_package() desc_lines_before <- read_utf8(proj_path("DESCRIPTION")) expect_error_free( use_template("NEWS.md", "DESCRIPTION") ) desc_lines_after <- read_utf8(proj_path("DESCRIPTION")) expect_identical(desc_lines_before, desc_lines_after) }) # helpers ----------------------------------------------------------------- test_that("find_template errors if template missing", { expect_usethis_error(find_template("xxx"), "Could not find template") }) test_that("find_template can find templates for tricky Rbuildignored files", { expect_match(find_template("travis.yml"), "travis\\.yml$") expect_match(find_template("codecov.yml"), "codecov\\.yml$") expect_match(find_template("cran-comments.md"), "cran-comments\\.md$") expect_match(find_template("template.Rproj"), "template\\.Rproj$") }) usethis/tests/testthat/test-block.R0000644000175000017500000000101214117743363017267 0ustar nileshnileshtest_that("block_append() only writes unique lines", { path <- withr::local_tempfile() writeLines(block_create(), path) block_append("---", c("x", "y"), path) block_append("---", c("y", "x"), path) expect_equal(block_show(path), c("x", "y")) }) test_that("block_append() can sort, if requested", { path <- withr::local_tempfile() writeLines(block_create(), path) block_append("---", c("z", "y"), path) block_append("---", "x", path, sort = TRUE) expect_equal(block_show(path), c("x", "y", "z")) }) usethis/tests/testthat/test-browse.R0000644000175000017500000001050014135602150017464 0ustar nileshnileshtest_that("github_url() errors if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_usethis_error(github_url(), "not.*inside a valid project") }) test_that("github_url() works on active project", { create_local_project() local_interactive(FALSE) use_git() expect_usethis_error(github_url(), "no DESCRIPTION") expect_usethis_error(github_url(), "no GitHub remotes") use_description() use_description_field("URL", "https://example.com") expect_usethis_error(github_url(), "no GitHub remotes") issues <- "https://github.com/OWNER/REPO_BUGREPORTS/issues" use_description_field("BugReports", issues) expect_equal(github_url(), "https://github.com/OWNER/REPO_BUGREPORTS") origin <- "https://github.com/OWNER/REPO_ORIGIN" use_git_remote("origin", origin) expect_equal(github_url(), "https://github.com/OWNER/REPO_ORIGIN") }) test_that("github_url() strips everything after USER/REPO", { expect_equal(github_url("usethis"), "https://github.com/r-lib/usethis") expect_equal(github_url("gh"), "https://github.com/r-lib/gh") }) test_that("github_url() has fall back for CRAN packages", { expect_warning(out <- github_url("utils"), "CRAN mirror") expect_equal(out, "https://github.com/cran/utils") }) test_that("github_url() errors for nonexistent package", { expect_usethis_error(github_url("1234"), "Can't find") }) test_that("cran_home() produces canonical URL", { pkg <- create_local_package(file_temp("abc")) expect_match(cran_home(), "https://cran.r-project.org/package=abc") expect_match(cran_home("bar"), "https://cran.r-project.org/package=bar") }) test_that("desc_urls() returns NULL if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_null(desc_urls()) }) test_that("desc_urls() returns NULL if no DESCRIPTION", { create_local_project() expect_null(desc_urls()) }) test_that("desc_urls() returns empty data frame if no URLs", { create_local_project() use_description() expect_equal( desc_urls(), data.frame( url = character(), desc_field = character(), is_github = logical(), stringsAsFactors = FALSE ) ) }) test_that("desc_urls() returns data frame for locally installed package", { out <- desc_urls("curl") expect_true(nrow(out) > 1) }) test_that("desc_urls() returns data frame for an uninstalled package", { skip_on_cran() skip_if_offline() pkg <- "devoid" if (requireNamespace(pkg, quietly = TRUE)) { skip(paste0(pkg, " is installed locally")) } out <- desc_urls(pkg) expect_true(nrow(out) > 1) }) test_that("desc_urls() returns NULL for an nonexistent package", { skip_on_cran() skip_if_offline() expect_null(desc_urls("1234")) }) test_that("browse_XXX() goes to correct URL", { local_interactive(FALSE) g <- function(x) paste0("https://github.com/", x) expect_equal(browse_github("gh"), g("r-lib/gh")) expect_match(browse_github_issues("gh"), g("r-lib/gh/issues")) expect_equal(browse_github_issues("gh", 1), g("r-lib/gh/issues/1")) expect_equal(browse_github_issues("gh", "new"), g("r-lib/gh/issues/new")) expect_match(browse_github_pulls("gh"), g("r-lib/gh/pulls")) expect_equal(browse_github_pulls("gh", 1), g("r-lib/gh/pull/1")) expect_match(browse_github_actions("gh"), g("r-lib/gh/actions")) expect_equal(browse_travis("usethis"), "https://travis-ci.com/r-lib/usethis") expect_equal(browse_travis("usethis", ext = "org"), "https://travis-ci.org/r-lib/usethis") expect_equal(browse_cran("usethis"), "https://cran.r-project.org/package=usethis") }) test_that("browse_package() errors if no project", { withr::local_dir(path_temp()) local_project(NULL, force = TRUE, setwd = TRUE) expect_usethis_error(browse_project(), "not.*inside a valid project") }) test_that("browse_package() returns URLs", { create_local_project() use_git() expect_equal(browse_package(), character()) origin <- "https://github.com/OWNER/REPO" use_git_remote("origin", origin) foofy <- "https://github.com/SOMEONE_ELSE/REPO" use_git_remote("foofy", foofy) use_description() pkgdown <- "https://example.com" use_description_field("URL", pkgdown) issues <- "https://github.com/OWNER/REPO/issues" use_description_field("BugReports", issues) out <- browse_package() expect_setequal(out, c(origin, foofy, pkgdown, issues)) }) usethis/tests/testthat/test-lifecycle.R0000644000175000017500000000054014131622147020131 0ustar nileshnileshtest_that("use_lifecycle() imports badges", { create_local_package(fs::path_temp("test_lifecycle")) use_package_doc() withr::local_options(usethis.quiet = FALSE) expect_snapshot({ use_lifecycle() }) expect_proj_file("man", "figures", "lifecycle-stable.svg") expect_equal(roxygen_ns_show(), "#' @importFrom lifecycle deprecated") }) usethis/tests/testthat/test-data-table.R0000644000175000017500000000176714131622147020204 0ustar nileshnileshtest_that("use_data_table() requires a package", { create_local_project() expect_usethis_error(use_data_table(), "not an R package") }) test_that("use_data_table() Imports data.table", { create_local_package() use_package_doc() with_mock( check_installed = function(pkg) TRUE, roxygen_update_ns = function(...) NULL, check_functions_exist = function(...) TRUE, use_data_table() ) expect_match(desc::desc_get("Imports"), "data.table") expect_snapshot(roxygen_ns_show()) }) test_that("use_data_table() blocks use of Depends", { create_local_package() use_package_doc() desc::desc_set_dep("data.table", "Depends") with_mock( check_installed = function(pkg) TRUE, roxygen_update_ns = function(...) NULL, check_functions_exist = function(...) TRUE, expect_warning( use_data_table(), "data.table should be in Imports or Suggests, not Depends" ) ) expect_match(desc::desc_get("Imports"), "data.table") expect_snapshot(roxygen_ns_show()) }) usethis/tests/testthat/test-latest-dependencies.R0000644000175000017500000000147013764577255022141 0ustar nileshnileshtest_that("use_tidy_versions() specifies a version for dependencies", { pkg <- create_local_package() use_package("usethis") use_package("desc") use_package("withr", "Suggests") use_package("gh", "Suggests") use_latest_dependencies() desc <- read_utf8(proj_path("DESCRIPTION")) desc <- grep("usethis|desc|withr|gh", desc, value = TRUE) expect_true(all(grepl("\\(>= [0-9.]+\\)", desc))) }) test_that("use_tidy_versions() does nothing for a base package", { ## if we ever depend on a recommended package, could beef up this test a bit pkg <- create_local_package() use_package("tools") use_package("stats", "Suggests") use_latest_dependencies() desc <- read_utf8(proj_path("DESCRIPTION")) desc <- grep("tools|stats", desc, value = TRUE) expect_false(any(grepl("\\(>= [0-9.]+\\)", desc))) }) usethis/tests/testthat/test-create.R0000644000175000017500000001130414117743363017445 0ustar nileshnileshtest_that("create_package() creates a package", { dir <- create_local_package() expect_true(possibly_in_proj(dir)) expect_true(is_package(dir)) }) test_that("create_project() creates a non-package project", { dir <- create_local_project() expect_true(possibly_in_proj(dir)) expect_false(is_package(dir)) }) test_that("create_*(open = FALSE) returns path to new proj, restores active proj", { path <- file_temp() cur_proj <- proj_get_() out_path <- create_package(path, open = FALSE) expect_equal(proj_get_(), cur_proj) expect_equal(proj_path_prep(path), out_path) dir_delete(out_path) out_path <- create_project(path, open = FALSE) expect_equal(proj_get_(), cur_proj) expect_equal(proj_path_prep(path), out_path) dir_delete(out_path) }) test_that("nested package is disallowed, by default", { dir <- create_local_package() expect_usethis_error(create_package(path(dir, "abcde")), "anyway") }) test_that("nested project is disallowed, by default", { dir <- create_local_project() expect_usethis_error(create_project(path(dir, "abcde")), "anyway") }) test_that("nested package can be created if user really, really wants to", { parent <- create_local_package() with_mock( # since user can't approve interactively, use the backdoor allow_nested_project = function() TRUE, child <- create_package(path(parent, "fghijk")) ) expect_true(possibly_in_proj(child)) expect_true(is_package(child)) }) test_that("nested project can be created if user really, really wants to", { parent <- create_local_project() with_mock( # since user can't approve interactively, use the backdoor allow_nested_project = function() TRUE, child <- create_project(path(parent, "fghijk")) ) expect_true(possibly_in_proj(child)) expect_false(is_package(child)) }) test_that("can create package in current directory (literally in '.')", { target_path <- dir_create(file_temp("mypackage")) withr::defer(dir_delete(target_path)) withr::local_dir(target_path) orig_proj <- proj_get_() orig_wd <- path_wd() expect_error_free( out_path <- create_package(".", open = FALSE) ) expect_equal(path_wd(), orig_wd) expect_equal(proj_get_(), orig_proj) }) ## https://github.com/r-lib/usethis/issues/227 test_that("create_* works w/ non-existing rel path, open = FALSE case", { sandbox <- path_real(dir_create(file_temp("sandbox"))) orig_proj <- proj_get_() orig_wd <- path_wd() withr::defer(dir_delete(sandbox)) withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) rel_path_pkg <- path_file(file_temp(pattern = "abc")) expect_error_free( out_path <- create_package(rel_path_pkg, open = FALSE) ) expect_true(dir_exists(rel_path_pkg)) expect_equal(out_path, proj_path_prep(rel_path_pkg)) expect_equal(proj_get_(), orig_proj) expect_equal(path_wd(), sandbox) rel_path_proj <- path_file(file_temp(pattern = "def")) expect_error_free( out_path <- create_project(rel_path_proj, open = FALSE) ) expect_true(dir_exists(rel_path_proj)) expect_equal(out_path, proj_path_prep(rel_path_proj)) expect_equal(proj_get_(), orig_proj) expect_equal(path_wd(), sandbox) }) # https://github.com/r-lib/usethis/issues/1122 test_that("create_*() works w/ non-existing rel path, open = TRUE, not in RStudio", { sandbox <- path_real(dir_create(file_temp("sandbox"))) orig_proj <- proj_get_() withr::defer(dir_delete(sandbox)) withr::defer(proj_set(orig_proj, force = TRUE)) withr::local_dir(sandbox) # package rel_path_pkg <- path_file(file_temp(pattern = "ghi")) with_mock( # make sure we act as if not in RStudio rstudio_available = function(...) FALSE, expect_error_free( out_path <- create_package(rel_path_pkg, open = TRUE) ) ) exp_path_pkg <- path(sandbox, rel_path_pkg) expect_equal(out_path, exp_path_pkg) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) setwd(sandbox) # project rel_path_proj <- path_file(file_temp(pattern = "jkl")) with_mock( # make sure we act as if not in RStudio rstudio_available = function(...) FALSE, expect_error_free( out_path <- create_project(rel_path_proj, open = TRUE) ) ) exp_path_proj <- path(sandbox, rel_path_proj) expect_equal(out_path, exp_path_proj) expect_equal(path_wd(), out_path) expect_equal(proj_get(), out_path) }) test_that("we discourage project creation in home directory", { local_interactive(FALSE) expect_usethis_error(create_package(path_home()), "create anyway") expect_usethis_error(create_project(path_home()), "create anyway") if (is_windows()) { expect_usethis_error(create_package(path_home_r()), "create anyway") expect_usethis_error(create_project(path_home_r()), "create anyway") } }) usethis/tests/testthat/test-tidy-upkeep.R0000644000175000017500000000015514132445166020441 0ustar nileshnileshtest_that("upkeep bullets don't change accidentally", { expect_snapshot(writeLines(upkeep_checklist())) }) usethis/tests/testthat/setup.R0000644000175000017500000000006614131622147016360 0ustar nileshnileshpre_test_options <- options( usethis.quiet = TRUE ) usethis/tests/testthat/test-github-actions.R0000644000175000017500000000733314140054263021117 0ustar nileshnileshtest_that("use_github_action() allows for custom urls", { skip_on_cran() skip_if_no_git_user() skip_if_offline() local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") withr::local_options(usethis.quiet = FALSE) expect_snapshot( use_github_action( url = "https://raw.githubusercontent.com/r-lib/actions/v1/examples/check-full.yaml", readme = "https://github.com/r-lib/actions/blob/v1/examples/README.md" ) ) expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/check-full.yaml") }) test_that("use_github_action() appends yaml in name if missing", { skip_on_cran() skip_if_no_git_user() skip_if_offline() local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_github_action("check-full") expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/check-full.yaml") }) test_that("uses_github_actions() reports usage of GitHub Actions", { skip_on_cran() skip_if_no_git_user() skip_if_offline() local_interactive(FALSE) create_local_package() expect_false(uses_github_actions()) use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") with_mock( use_github_actions_badge = function(name, repo_spec) NULL, use_github_actions() ) expect_true(uses_github_actions()) }) test_that("check_uses_github_actions() can throw error", { create_local_package() withr::local_options(list(crayon.enabled = FALSE)) expect_snapshot( check_uses_github_actions(), error = TRUE, transform = scrub_testpkg ) }) test_that("use_github_actions() configures the basic check action", { skip_on_cran() skip_if_no_git_user() skip_if_offline() local_interactive(FALSE) create_local_package() use_git() use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_readme_md() use_github_actions() expect_proj_dir(".github") expect_proj_dir(".github/workflows") expect_proj_file(".github/workflows/R-CMD-check.yaml") yml <- yaml::yaml.load_file(proj_path(".github/workflows/R-CMD-check.yaml")) expect_identical(yml$name, "R-CMD-check") expect_identical(names(yml$jobs), "R-CMD-check") readme_lines <- read_utf8(proj_path("README.md")) expect_true(any(grepl("R-CMD-check", readme_lines))) # .github has been Rbuildignored expect_true(is_build_ignored("^\\.github$")) }) test_that("use_tidy_github_actions() configures the full check and pr commands", { skip_on_cran() skip_if_no_git_user() skip_if_offline() local_interactive(FALSE) create_local_package() use_git() gert::git_add(".gitignore", repo = git_repo()) gert::git_commit("a commit, so we are not on an unborn branch", repo = git_repo()) use_git_remote(name = "origin", url = "https://github.com/OWNER/REPO") use_readme_md() use_tidy_github_actions() expect_proj_file(".github/workflows/R-CMD-check.yaml") yml <- yaml::yaml.load_file(proj_path(".github/workflows/R-CMD-check.yaml")) expect_identical(yml$name, "R-CMD-check") expect_identical(names(yml$jobs), "R-CMD-check") size_build_matrix <- length(yml[["jobs"]][["R-CMD-check"]][["strategy"]][["matrix"]][["config"]]) expect_true(size_build_matrix >= 6) # release, r-devel, 4 previous versions expect_proj_file(".github/workflows/pkgdown.yaml") expect_proj_file(".github/workflows/test-coverage.yaml") expect_proj_file(".github/workflows/pr-commands.yaml") readme_lines <- read_utf8(proj_path("README.md")) expect_true(any(grepl("R-CMD-check", readme_lines))) expect_true(any(grepl("test coverage", readme_lines))) }) usethis/tests/testthat/test-revdep.R0000644000175000017500000000055414117743363017474 0ustar nileshnileshtest_that("use_revdep() requires a package", { create_local_project() expect_usethis_error(use_revdep(), "not an R package") }) test_that("use_revdep() creates and ignores files/dirs", { create_local_package() use_revdep() expect_proj_file("revdep", "email.yml") expect_proj_file("revdep", ".gitignore") expect_true(is_build_ignored("^revdep$")) }) usethis/tests/testthat/test-utils.R0000644000175000017500000000221314132400710017320 0ustar nileshnileshtest_that("check_is_named_list() works", { l <- list(a = "a", b = 2, c = letters) expect_identical(l, check_is_named_list(l)) expect_usethis_error(check_is_named_list(NULL), "must be a list") expect_usethis_error(check_is_named_list(c(a = "a", b = "b")), "must be a list") expect_usethis_error(check_is_named_list(list("a", b = 2)), "Names of .+ must be") }) test_that("asciify() substitutes non-ASCII but respects case", { expect_identical(asciify("aB!d$F+_h"), "aB-d-F-_h") }) test_that("slug() sets file extension, iff 'ext' not aleady the extension", { expect_equal(slug("abc", "R"), "abc.R") expect_equal(slug("abc.R", "R"), "abc.R") expect_equal(slug("abc.r", "R"), "abc.r") expect_equal(slug("abc.R", "txt"), "abc.txt") }) test_that("path_first_existing() works", { create_local_project() all_3_files <- proj_path(c("alfa", "bravo", "charlie")) expect_null(path_first_existing(all_3_files)) write_utf8(proj_path("charlie"), "charlie") expect_equal(path_first_existing(all_3_files), proj_path("charlie")) write_utf8(proj_path("bravo"), "bravo") expect_equal(path_first_existing(all_3_files), proj_path("bravo")) }) usethis/tests/testthat/test-version.R0000644000175000017500000000447114117743363017676 0ustar nileshnileshtest_that("bump_version() presents all possible incremented versions", { expect_identical( bump_version("1.1.1.9000"), c(major = "2.0.0", minor = "1.2.0", patch = "1.1.2", dev = "1.1.1.9001") ) }) test_that("use_version() and use_dev_version() require a package", { create_local_project() expect_usethis_error(use_version("major"), "not an R package") expect_usethis_error(use_dev_version(), "not an R package") }) test_that("use_version() errors for invalid `which`", { create_local_package() expect_error(use_version("1.2.3"), "should be one of") }) test_that("use_version() increments version in DESCRIPTION, edits NEWS", { create_local_package() use_description_field( name = "Version", value = "1.1.1.9000", overwrite = TRUE ) use_news_md() use_version("major") expect_identical( as.character(desc::desc_get_version(proj_get())), "2.0.0" ) expect_match( read_utf8(proj_path("NEWS.md"), n = 1), "2.0.0" ) }) test_that("use_dev_version() appends .9000 to Version, exactly once", { create_local_package() use_description_field(name = "Version", value = "0.0.1", overwrite = TRUE) use_dev_version() expect_identical( as.character(desc::desc_get_version(proj_get())), "0.0.1.9000" ) use_dev_version() expect_identical( as.character(desc::desc_get_version(proj_get())), "0.0.1.9000" ) }) test_that("use_version() updates (development version) directly", { create_local_package() use_description_field(name = "Version", value = "0.0.1", overwrite = TRUE) use_news_md() # bump to dev to set (development version) use_dev_version() # directly overwrite development header use_version("patch") expect_match( read_utf8(proj_path("NEWS.md"), n = 1), "0[.]0[.]2" ) expect_match( read_utf8(proj_path("NEWS.md"), n = 3)[3], "0[.]0[.]1" ) }) test_that("use_version() updates version.c", { create_local_package() use_description_field(name = "Version", value = "1.0.0", overwrite = TRUE) name <- project_name() src_path <- proj_path("src") ver_path <- path(src_path, "version.c") dir_create(src_path) write_utf8(ver_path, glue(' foo; const char {name}_version = "1.0.0"; bar;')) use_dev_version() lines <- read_utf8(ver_path) expect_true(grepl("1.0.0.9000", lines, fixed = TRUE)[[2]]) }) usethis/tests/testthat/test-utils-github.R0000644000175000017500000001363614153502006020617 0ustar nileshnileshtest_that("parse_github_remotes() works, on named list or named character", { urls <- list( https = "https://github.com/OWNER/REPO.git", ghe = "https://github.acme.com/OWNER/REPO.git", browser = "https://github.com/OWNER/REPO", ssh1 = "git@github.com:OWNER/REPO.git", ssh2 = "ssh://git@github.com/OWNER/REPO.git", gitlab1 = "https://gitlab.com/OWNER/REPO.git", gitlab2 = "git@gitlab.com:OWNER/REPO.git", bitbucket1 = "https://bitbucket.org/OWNER/REPO.git", bitbucket2 = "git@bitbucket.org:OWNER/REPO.git" ) parsed <- parse_github_remotes(urls) expect_equal(parsed$name, names(urls)) expect_equal(unique(parsed$repo_owner), "OWNER") expect_equal( parsed$host, c("github.com", "github.acme.com", "github.com", "github.com", "github.com", "gitlab.com", "gitlab.com", "bitbucket.org", "bitbucket.org") ) expect_equal(unique(parsed$repo_name), "REPO") expect_equal( parsed$protocol, c("https", "https", "https", "ssh", "ssh", "https", "ssh", "https", "ssh") ) parsed2 <- parse_github_remotes(unlist(urls)) expect_equal(parsed, parsed2) }) test_that("parse_github_remotes() works on edge cases", { parsed <- parse_github_remotes("https://github.com/HenrikBengtsson/R.rsp") expect_equal(parsed$repo_owner, "HenrikBengtsson") expect_equal(parsed$repo_name, "R.rsp") }) test_that("parse_github_remotes() works for length zero input", { expect_error_free( parsed <- parse_github_remotes(character()) ) expect_equal(nrow(parsed), 0) expect_setequal( names(parsed), c("name", "url", "host", "repo_owner", "repo_name", "protocol") ) }) test_that("parse_repo_url() passes a naked repo spec through", { out <- parse_repo_url("OWNER/REPO") expect_equal( out, list(repo_spec = "OWNER/REPO", host = NULL) ) }) test_that("parse_repo_url() handles GitHub remote URLs", { urls <- list( https = "https://github.com/OWNER/REPO.git", ghe = "https://github.acme.com/OWNER/REPO.git", browser = "https://github.com/OWNER/REPO", ssh = "git@github.com:OWNER/REPO.git" ) out <- map(urls, parse_repo_url) expect_match(map_chr(out, "repo_spec"), "OWNER/REPO", fixed = TRUE) out_host <- map_chr(out, "host") expect_match( out_host[c("https", "browser", "ssh")], "https://github.com", fixed = TRUE ) expect_equal(out_host[["ghe"]], "https://github.acme.com") }) test_that("parse_repo_url() errors for non-GitHub remote URLs", { urls <- list( gitlab1 = "https://gitlab.com/OWNER/REPO.git", gitlab2 = "git@gitlab.com:OWNER/REPO.git", bitbucket1 = "https://bitbucket.org/OWNER/REPO.git", bitbucket2 = "git@bitbucket.org:OWNER/REPO.git" ) safely_parse_repo_url <- purrr::safely(parse_repo_url) out <- map(urls, safely_parse_repo_url) out_result <- map(out, "result") expect_true(all(map_lgl(out_result, is.null))) }) test_that("github_remote_list() works", { create_local_project() use_git() use_git_remote("origin", "https://github.com/OWNER/REPO.git") use_git_remote("upstream", "https://github.com/THEM/REPO.git") use_git_remote("foofy", "https://github.com/OTHERS/REPO.git") use_git_remote("gitlab", "https://gitlab.com/OTHERS/REPO.git") use_git_remote("bitbucket", "git@bitbucket.org:OWNER/REPO.git") grl <- github_remote_list() expect_setequal(grl$remote, c("origin", "upstream")) expect_setequal(grl$repo_spec, c("OWNER/REPO", "THEM/REPO")) grl <- github_remote_list(c("upstream", "foofy")) expect_setequal(grl$remote, c("upstream", "foofy")) nms <- names(grl) grl <- github_remote_list(c("gitlab", "bitbucket")) expect_equal(nrow(grl), 0) expect_named(grl, nms) }) test_that("github_remotes(), github_remote_list() accept explicit 0-row input", { x <- data.frame(name = character(), url = character(), stringsAsFactors = FALSE) grl <- github_remote_list(x = x) expect_equal(nrow(grl), 0) expect_true(all(map_lgl(grl, is.character))) gr <- github_remotes(x = x) expect_equal(nrow(grl), 0) }) test_that("github_remotes() works", { skip_on_cran() skip_if_offline("github.com") skip_if_no_git_user() create_local_project() use_git() # no git remotes = 0-row edge case expect_error_free( grl <- github_remotes() ) # a public remote = no token necessary to get github info use_git_remote("origin", "https://github.com/r-lib/usethis.git") expect_error_free( grl <- github_remotes() ) expect_false(grl$is_fork) expect_true(is.na(grl$parent_repo_owner)) # no git remote by this name = 0-row edge case expect_error_free( grl <- github_remotes("foofy") ) # gh::gh() call should fail, so we should get no info from github use_git_remote("origin", "https://github.com/r-lib/DOESNOTEXIST.git", overwrite = TRUE) expect_error_free( grl <- github_remotes() ) expect_true(is.na(grl$is_fork)) }) # GitHub remote configuration -------------------------------------------------- # very sparse, but you have to start somewhere! test_that("fork_upstream_is_not_origin_parent is detected", { # We've already encountered this in the wild. Here's how it happens: # 1. r-pkgs/gh is created # 2. user forks and clones: origin = USER/gh, upstream = r-pkgs/gh # 3. parent repo becomes r-lib/gh, due to transfer or ownership or owner # name change # Now upstream looks like it does not point to fork parent. local_interactive(FALSE) create_local_project() use_git() use_git_remote("origin", "https://github.com/jennybc/gh.git") use_git_remote("upstream", "https://github.com/r-pkgs/gh.git") gr <- github_remotes(github_get = FALSE) gr$github_got <- TRUE gr$is_fork <- c(TRUE, FALSE) gr$can_push <- TRUE gr$perm_known <- TRUE gr$parent_repo_owner <- c("r-lib", NA) with_mock( github_remotes = function(...) gr, cfg <- github_remote_config() ) expect_equal(cfg$type, "fork_upstream_is_not_origin_parent") expect_snapshot(error = TRUE, stop_bad_github_remote_config(cfg)) }) usethis/tests/testthat/test-jenkins.R0000644000175000017500000000030614117743363017643 0ustar nileshnileshtest_that("use_jenkins() creates a Makefile AND a Jenkinsfile at project root", { pkg <- create_local_package() use_jenkins() expect_proj_file("Makefile") expect_proj_file("Jenkinsfile") }) usethis/tests/testthat/test-cpp11.R0000644000175000017500000000227114117743363017131 0ustar nileshnileshtest_that("use_cpp11() requires a package", { create_local_project() expect_usethis_error(use_cpp11(), "not an R package") }) test_that("use_cpp11() creates files/dirs, edits DESCRIPTION and .gitignore", { create_local_package() use_roxygen_md() local_interactive(FALSE) with_mock( # Required to pass the check re: whether cpp11 is installed check_installed = function(pkg) TRUE, check_cpp_register_deps = function() invisible(), use_cpp11() ) expect_match(desc::desc_get("LinkingTo"), "cpp11") expect_proj_dir("src") ignores <- read_utf8(proj_path("src", ".gitignore")) expect_true(all(c("*.o", "*.so", "*.dll") %in% ignores)) }) test_that("check_cpp_register_deps is silent if all installed, emits todo if not", { withr::local_options(list(usethis.quiet = FALSE)) with_mock( get_cpp_register_deps = function() c("brio", "decor", "vctrs"), is_installed = function(pkg) TRUE, expect_silent( check_cpp_register_deps() ) ) with_mock( get_cpp_register_deps = function() c("brio", "decor", "vctrs"), is_installed = function(pkg) pkg == "brio", expect_message( check_cpp_register_deps(), "Now install" ) ) }) usethis/tests/testthat/_snaps/0000755000175000017500000000000014153502006016351 5ustar nileshnileshusethis/tests/testthat/_snaps/release.md0000644000175000017500000001041214153757527020335 0ustar nileshnilesh# release bullets don't change accidentally Code writeLines(release_checklist("0.1.0", on_cran = FALSE)) Output First release: * [ ] `usethis::use_cran_comments()` * [ ] Update (aspirational) install instructions in README * [ ] Proofread `Title:` and `Description:` * [ ] Check that all exported functions have `@return` and `@examples` * [ ] Check that `Authors@R:` includes a copyright holder (role 'cph') * [ ] Check [licensing of included files](https://r-pkgs.org/license.html#code-you-bundle) * [ ] Review Prepare for release: * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `rhub::check_for_cran()` * [ ] Draft blog post Submit to CRAN: * [ ] `usethis::use_version('minor')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version()` * [ ] `usethis::use_news_md()` * [ ] Finish blog post * [ ] Tweet * [ ] Add link to blog post in pkgdown news menu --- Code writeLines(release_checklist("0.0.1", on_cran = TRUE)) Output Prepare for release: * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `rhub::check_for_cran()` * [ ] `revdepcheck::revdep_check(num_workers = 4)` * [ ] Update `cran-comments.md` Submit to CRAN: * [ ] `usethis::use_version('patch')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version()` * [ ] `usethis::use_news_md()` --- Code writeLines(release_checklist("1.0.0", on_cran = TRUE)) Output Prepare for release: * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `rhub::check_for_cran()` * [ ] `revdepcheck::revdep_check(num_workers = 4)` * [ ] Update `cran-comments.md` * [ ] Draft blog post Submit to CRAN: * [ ] `usethis::use_version('major')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version()` * [ ] `usethis::use_news_md()` * [ ] Finish blog post * [ ] Tweet * [ ] Add link to blog post in pkgdown news menu # RStudio-ness detection works Code writeLines(release_checklist("1.0.0", on_cran = TRUE)) Output Prepare for release: * [ ] Check [current CRAN check results](https://cran.rstudio.org/web/checks/check_results_{TESTPKG}.html) * [ ] [Polish NEWS](https://style.tidyverse.org/news.html#news-release) * [ ] `urlchecker::url_check()` * [ ] `devtools::check(remote = TRUE, manual = TRUE)` * [ ] `devtools::check_win_devel()` * [ ] `rhub::check_for_cran()` * [ ] `revdepcheck::cloud_check()` * [ ] Update `cran-comments.md` * [ ] Draft blog post * [ ] Ping Tracy Teal on Slack Submit to CRAN: * [ ] `usethis::use_version('major')` * [ ] `devtools::submit_cran()` * [ ] Approve email Wait for CRAN... * [ ] Accepted :tada: * [ ] `usethis::use_github_release()` * [ ] `usethis::use_dev_version()` * [ ] `usethis::use_news_md()` * [ ] Finish blog post * [ ] Tweet * [ ] Add link to blog post in pkgdown news menu usethis/tests/testthat/_snaps/readme.md0000644000175000017500000001136614153757526020162 0ustar nileshnilesh# use_readme_md() has expected form for a non-GitHub package Code writeLines(read_utf8("README.md")) Output # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` ## Example This is a basic example which shows you how to solve a common problem: ``` r library({TESTPKG}) ## basic example code ``` # use_readme_md() has expected form for a GitHub package Code writeLines(read_utf8("README.md")) Output # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} from [GitHub](https://github.com/) with: ``` r # install.packages("devtools") devtools::install_github("OWNER/TESTPKG") ``` ## Example This is a basic example which shows you how to solve a common problem: ``` r library({TESTPKG}) ## basic example code ``` # use_readme_rmd() has expected form for a non-GitHub package Code writeLines(read_utf8("README.Rmd")) Output --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` ## Example This is a basic example which shows you how to solve a common problem: ```{r example} library({TESTPKG}) ## basic example code ``` What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You could also use GitHub Actions to re-render `README.Rmd` every time you push. An example workflow can be found here: . You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. # use_readme_rmd() has expected form for a GitHub package Code writeLines(read_utf8("README.Rmd")) Output --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` # {TESTPKG} The goal of {TESTPKG} is to ... ## Installation You can install the development version of {TESTPKG} from [GitHub](https://github.com/) with: ``` r # install.packages("devtools") devtools::install_github("OWNER/TESTPKG") ``` ## Example This is a basic example which shows you how to solve a common problem: ```{r example} library({TESTPKG}) ## basic example code ``` What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You could also use GitHub Actions to re-render `README.Rmd` every time you push. An example workflow can be found here: . You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. usethis/tests/testthat/_snaps/vignette.md0000644000175000017500000000054514153757540020543 0ustar nileshnilesh# use_vignette() gives useful errors Code use_vignette() Error argument "name" is missing, with no default Code use_vignette("bad name") Error 'bad name' is not a valid filename for a vignette. It must: * Start with a letter. * Contain only letters, numbers, '_', and '-'. usethis/tests/testthat/_snaps/badge.md0000644000175000017500000000055014153757477017765 0ustar nileshnilesh# use_lifecycle_badge() handles bad and good input Code use_lifecycle_badge() Error argument "stage" is missing, with no default Code use_lifecycle_badge("eperimental") Error `stage` must be one of "experimental", "stable", "superseded", or "deprecated". Did you mean "experimental"? usethis/tests/testthat/_snaps/package.md0000644000175000017500000000075214153757520020307 0ustar nileshnilesh# use_package() guides new packages but not pre-existing ones Code use_package("withr") Message v Adding 'withr' to Imports field in DESCRIPTION * Refer to functions with `withr::fun()` Code use_package("withr") Message * Refer to functions with `withr::fun()` Code use_package("withr", "Suggests") Warning Package 'withr' is already listed in 'Imports' in DESCRIPTION, no change made. usethis/tests/testthat/_snaps/utils-github.md0000644000175000017500000000115514153757537021342 0ustar nileshnilesh# fork_upstream_is_not_origin_parent is detected Code stop_bad_github_remote_config(cfg) Error Unsupported GitHub remote configuration: 'fork_upstream_is_not_origin_parent' * Host = 'https://github.com' * origin = 'jennybc/gh' (can push) = fork of NA * upstream = 'r-pkgs/gh' (can push) * The 'origin' GitHub remote is a fork, but its parent is not configured as the 'upstream' remote. Read more about the GitHub remote configurations that usethis supports at: 'https://happygitwithr.com/common-remote-setups.html' usethis/tests/testthat/_snaps/github-actions.md0000644000175000017500000000152414153757514021635 0ustar nileshnilesh# use_github_action() allows for custom urls Code use_github_action(url = "https://raw.githubusercontent.com/r-lib/actions/v1/examples/check-full.yaml", readme = "https://github.com/r-lib/actions/blob/v1/examples/README.md") Message v Creating '.github/' v Adding '^\\.github$' to '.Rbuildignore' v Adding '*.html' to '.github/.gitignore' v Creating '.github/workflows/' v Saving 'r-lib/actions/examples/check-full.yaml@v1' to '.github/workflows/check-full.yaml' * Learn more at . # check_uses_github_actions() can throw error Code check_uses_github_actions() Error Cannot detect that package '{TESTPKG}' already uses GitHub Actions. Do you need to run `use_github_actions()`? usethis/tests/testthat/_snaps/course.md0000644000175000017500000000160514153757503020213 0ustar nileshnilesh# download_url() retry logic works as advertised Code out <- download_url(url = "URL", destfile = "destfile") --- Code out <- download_url(url = "URL", destfile = "destfile") Message i Retrying download ... attempt 2 --- Code out <- download_url(url = "URL", destfile = "destfile") Message i Retrying download ... attempt 2 i Retrying download ... attempt 3 --- Code out <- download_url(url = "URL", destfile = "destfile", n_tries = 3) Message i Retrying download ... attempt 2 i Retrying download ... attempt 3 Error try 3 --- Code out <- download_url(url = "URL", destfile = "destfile", n_tries = 10) Message i Retrying download ... attempt 2 i Retrying download ... attempt 3 i Retrying download ... attempt 4 usethis/tests/testthat/_snaps/git-default-branch.md0000644000175000017500000000132214153757510022345 0ustar nileshnilesh# git_default_branch_rename() surfaces files that smell fishy Code git_default_branch_rename() Message i Local branch 'master' appears to play the role of the default branch. v Moving local 'master' branch to 'main'. * Be sure to update files that refer to the default branch by name. Consider searching within your project for 'master'. x These GitHub Action files don't mention the new default branch 'main': - '.github/workflows/blah.yml' x Some badges may refer to the old default branch 'master': - 'README.md' x The bookdown configuration file may refer to the old default branch 'master': - 'whatever/foo/_bookdown.yaml' usethis/tests/testthat/_snaps/ui.md0000644000175000017500000000105214153757535017331 0ustar nileshnilesh# basic UI actions behave as expected Code ui_line("line") Message line Code ui_todo("to do") Message * to do Code ui_done("done") Message v done Code ui_oops("oops") Message x oops Code ui_info("info") Message i info Code ui_code_block(c("x <- 1", "y <- 2")) Message x <- 1 y <- 2 Code ui_warn("a warning") Warning a warning usethis/tests/testthat/_snaps/data-table.md0000644000175000017500000000203514153757505020711 0ustar nileshnilesh# use_data_table() Imports data.table Code roxygen_ns_show() Output [1] "#' @importFrom data.table .BY" [2] "#' @importFrom data.table .EACHI" [3] "#' @importFrom data.table .GRP" [4] "#' @importFrom data.table .I" [5] "#' @importFrom data.table .N" [6] "#' @importFrom data.table .NGRP" [7] "#' @importFrom data.table .SD" [8] "#' @importFrom data.table :=" [9] "#' @importFrom data.table data.table" # use_data_table() blocks use of Depends Code roxygen_ns_show() Output [1] "#' @importFrom data.table .BY" [2] "#' @importFrom data.table .EACHI" [3] "#' @importFrom data.table .GRP" [4] "#' @importFrom data.table .I" [5] "#' @importFrom data.table .N" [6] "#' @importFrom data.table .NGRP" [7] "#' @importFrom data.table .SD" [8] "#' @importFrom data.table :=" [9] "#' @importFrom data.table data.table" usethis/tests/testthat/_snaps/lifecycle.md0000644000175000017500000000111414153757517020652 0ustar nileshnilesh# use_lifecycle() imports badges Code use_lifecycle() Message v Adding 'lifecycle' to Imports field in DESCRIPTION * Refer to functions with `lifecycle::fun()` v Adding '@importFrom lifecycle deprecated' to 'R/test_lifecycle-package.R' v Writing 'NAMESPACE' v Creating 'man/figures/' v Copied SVG badges to 'man/figures/' * Add badges in documentation topics by inserting one of: #' `r lifecycle::badge('experimental')` #' `r lifecycle::badge('superseded')` #' `r lifecycle::badge('deprecated')` usethis/tests/testthat/_snaps/use_import_from.md0000644000175000017500000000113314153757536022126 0ustar nileshnilesh# use_import_from() adds one line for each function Code roxygen_ns_show() Output [1] "#' @importFrom tibble deframe" "#' @importFrom tibble enframe" [3] "#' @importFrom tibble tibble" # use_import_from() generates helpful errors Code use_import_from(1) Error `package` must be a single string Code use_import_from(c("tibble", "rlang")) Error `package` must be a single string Code use_import_from("tibble", "pool_noodle") Error Can't find `tibble::pool_noodle()` usethis/tests/testthat/_snaps/tidyverse.md0000644000175000017500000000125414153757534020735 0ustar nileshnilesh# use_tidy_dependencies() isn't overly informative Code use_tidy_dependencies() Message v Adding 'rlang' to Imports field in DESCRIPTION v Adding 'lifecycle' to Imports field in DESCRIPTION v Adding 'cli' to Imports field in DESCRIPTION v Adding 'glue' to Imports field in DESCRIPTION v Adding 'withr' to Imports field in DESCRIPTION v Adding '@import rlang' to 'R/tidydeps-package.R' v Adding '@importFrom glue glue' to 'R/tidydeps-package.R' v Adding '@importFrom lifecycle deprecated' to 'R/tidydeps-package.R' v Writing 'NAMESPACE' v Saving 'r-lib/rlang/R/compat-purrr.R' to 'R/compat-purrr.R' usethis/tests/testthat/_snaps/tidy-upkeep.md0000644000175000017500000000263014153757532021154 0ustar nileshnilesh# upkeep bullets don't change accidentally Code writeLines(upkeep_checklist()) Output Pre-history * [ ] `usethis::use_readme_rmd()` * [ ] `usethis::use_roxygen_md()` * [ ] `usethis::use_github_links()` * [ ] `usethis::use_pkgdown_github_pages()` * [ ] `usethis::use_tidy_labels()` * [ ] `usethis::use_tidy_style()` * [ ] `usethis::use_tidy_description()` * [ ] `urlchecker::url_check()` 2020 * [ ] `usethis::use_package_doc()` Consider letting usethis manage your `@importFrom` directives here. `usethis::use_import_from()` is handy for this. * [ ] `usethis::use_testthat(3)` and upgrade to 3e, [testthat 3e vignette](https://testthat.r-lib.org/articles/third-edition.html) * [ ] Align the names of `R/` files and `test/` files for workflow happiness. `usethis::rename_files()` can be helpful. 2021 * [ ] `usethis::use_tidy_dependencies()` * [ ] `usethis::use_tidy_github_actions()` and update artisanal actions to use `setup-r-dependencies` * [ ] Remove check environments section from `cran-comments.md` * [ ] Bump required R version in DESCRIPTION to 3.4 * [ ] Use lifecycle instead of artisanal deprecation messages, as described in [Communicate lifecycle changes in your functions](https://lifecycle.r-lib.org/articles/communicate.html) usethis/tests/testthat/_snaps/tibble.md0000644000175000017500000000046214153757531020155 0ustar nileshnilesh# use_tibble() Imports tibble Code use_tibble() Message v Adding 'tibble' to Imports field in DESCRIPTION v Adding '@importFrom tibble tibble' to 'R/mypackage-package.R' * Document a returned tibble like so: #' @return a [tibble][tibble::tibble-package] usethis/tests/testthat/test-test.R0000644000175000017500000000051014117743363017156 0ustar nileshnileshtest_that("check_edition() validates inputs", { expect_error(check_edition(20), "not available") expect_error(check_edition("x"), "single number") expect_equal(check_edition(1.5), 1) if (packageVersion("testthat") >= "2.99") { expect_equal(check_edition(), 3) } else { expect_equal(check_edition(), 2) } }) usethis/tests/testthat.R0000644000175000017500000000007213676400413015220 0ustar nileshnileshlibrary(testthat) library(usethis) test_check("usethis") usethis/tests/spelling.R0000644000175000017500000000021113676400413015170 0ustar nileshnileshif (requireNamespace("spelling", quietly = TRUE)) { spelling::spell_check_test(vignettes = TRUE, error = FALSE, skip_on_cran = TRUE) } usethis/R/0000755000175000017500000000000014153502006012265 5ustar nileshnileshusethis/R/logo.R0000644000175000017500000000364214153502006013355 0ustar nileshnilesh#' Use a package logo #' #' This function helps you use a logo in your package: #' * Enforces a specific size #' * Stores logo image file at `man/figures/logo.png` #' * Produces the markdown text you need in README to include the logo #' #' @param img The path to an existing image file #' @param geometry a [magick::geometry] string specifying size. The default #' assumes that you have a hex logo using spec from #' . #' @param retina `TRUE`, the default, scales the image on the README, #' assuming that geometry is double the desired size. #' #' @examples #' \dontrun{ #' use_logo("usethis.png") #' } #' @export use_logo <- function(img, geometry = "240x278", retina = TRUE) { check_is_package("use_logo()") logo_path <- proj_path("man", "figures", "logo", ext = path_ext(img)) create_directory(path_dir(logo_path)) if (!can_overwrite(logo_path)) { return(invisible(FALSE)) } if (path_ext(img) == "svg") { logo_path <- path("man", "figures", "logo.svg") file_copy(img, proj_path(logo_path)) ui_done("Copied {ui_path(img)} to {ui_path(logo_path)}") height <- as.integer(sub(".*x", "", geometry)) } else { check_installed("magick") img_data <- magick::image_read(img) img_data <- magick::image_resize(img_data, geometry) magick::image_write(img_data, logo_path) ui_done("Resized {ui_path(img)} to {geometry}") height <- magick::image_info(magick::image_read(logo_path))$height } pkg <- project_name() if (retina) { height <- round(height / 2) } ui_todo("Add logo to your README with the following html:") pd_link <- pkgdown_url(pedantic = TRUE) if (is.null(pd_link)) { ui_code_block("# {pkg} ") } else { ui_code_block("# {pkg} ") } } usethis/R/release.R0000644000175000017500000002725614133123420014041 0ustar nileshnilesh#' Create a release checklist in a GitHub issue #' #' @description #' When preparing to release a package to CRAN there are quite a few steps that #' need to be performed, and some of the steps can take multiple hours. This #' function creates a checklist in a GitHub issue to: #' #' * Help you keep track of where you are in the process #' * Feel a sense of satisfaction as you progress towards final submission #' * Help watchers of your package stay informed. #' #' The checklist contains a generic set of steps that we've found to be helpful, #' based on the type of release ("patch", "minor", or "major"). You're #' encouraged to edit the issue to customize this list to meet your needs. #' If you want to consistently add extra bullets for every release, you can #' include your own custom bullets by providing a (unexported) a #' `release_bullets()` function that returns a character vector. #' (For historical reasons, `release_questions()` is also supported). #' #' @param version Optional version number for release. If unspecified, you can #' make an interactive choice. #' @export #' @examples #' \dontrun{ #' use_release_issue("2.0.0") #' } use_release_issue <- function(version = NULL) { check_is_package("use_release_issue()") tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_line(" It is very unusual to open a release issue on a repo you can't push to: {ui_value(tr$repo_spec)}") if (ui_nope("Do you really want to do this?")) { ui_oops("Cancelling.") return(invisible()) } } version <- version %||% choose_version("What should the release version be?") if (is.null(version)) { return(invisible(FALSE)) } on_cran <- !is.null(cran_version()) checklist <- release_checklist(version, on_cran) gh <- gh_tr(tr) issue <- gh( "POST /repos/{owner}/{repo}/issues", title = glue("Release {project_name()} {version}"), body = paste0(checklist, "\n", collapse = "") ) view_url(issue$html_url) } release_checklist <- function(version, on_cran) { type <- release_type(version) cran_results <- cran_results_url() has_src <- dir_exists(proj_path("src")) has_news <- file_exists(proj_path("NEWS.md")) has_pkgdown <- uses_pkgdown() has_readme <- file_exists(proj_path("README.Rmd")) is_rstudio_pkg <- is_rstudio_pkg() c( if (!on_cran) c( "First release:", "", todo("`usethis::use_cran_comments()`"), todo("Update (aspirational) install instructions in README"), todo("Proofread `Title:` and `Description:`"), todo("Check that all exported functions have `@return` and `@examples`"), todo("Check that `Authors@R:` includes a copyright holder (role 'cph')"), todo("Check [licensing of included files](https://r-pkgs.org/license.html#code-you-bundle)"), todo("Review "), "" ), "Prepare for release:", "", todo("Check [current CRAN check results]({cran_results})", on_cran), todo("[Polish NEWS](https://style.tidyverse.org/news.html#news-release)", on_cran), todo("`devtools::build_readme()`", has_readme), todo("`urlchecker::url_check()`"), todo("`devtools::check(remote = TRUE, manual = TRUE)`"), todo("`devtools::check_win_devel()`"), todo("`rhub::check_for_cran()`"), todo("`rhub::check(platform = 'ubuntu-rchk')`", has_src), todo("`rhub::check_with_sanitizers()`", has_src), todo("`revdepcheck::revdep_check(num_workers = 4)`", on_cran && !is_rstudio_pkg), todo("`revdepcheck::cloud_check()`", on_cran && is_rstudio_pkg), todo("Update `cran-comments.md`", on_cran), todo("Review pkgdown reference index for, e.g., missing topics", has_pkgdown && type != "patch"), todo("Draft blog post", type != "patch"), todo("Ping Tracy Teal on Slack", type != "patch" && is_rstudio_pkg), release_extra(), "", "Submit to CRAN:", "", todo("`usethis::use_version('{type}')`"), todo("`devtools::submit_cran()`"), todo("Approve email"), "", "Wait for CRAN...", "", todo("Accepted :tada:"), todo("`usethis::use_github_release()`"), todo("`usethis::use_dev_version()`"), todo("`usethis::use_news_md()`", !has_news), todo("Finish blog post", type != "patch"), todo("Tweet", type != "patch"), todo("Add link to blog post in pkgdown news menu", type != "patch") ) } release_extra <- function(env = parent.env(globalenv())) { if (env_has(env, "release_bullets")) { paste0("* [ ] ", env$release_bullets()) } else if (env_has(env, "release_questions")) { # For backwards compatibility with devtools paste0("* [ ] ", env$release_questions()) } else { character() } } release_type <- function(version) { x <- unclass(numeric_version(version))[[1]] n <- length(x) if (n >= 3 && x[[3]] != 0L) { "patch" } else if (n >= 2 && x[[2]] != 0L) { "minor" } else { "major" } } #' Draft a GitHub release #' #' @description #' Creates a __draft__ GitHub release for the current package. Once you are #' satisfied that it is correct, you will need to publish the release from #' GitHub. The key pieces of info are which commit / SHA to tag, the associated #' package version, and the relevant NEWS entries. #' #' If you use `devtools::release()` or `devtools::submit_cran()` to submit to #' CRAN, information about the submitted state is captured in a CRAN-SUBMISSION #' or CRAN-RELEASE file. `use_github_release()` uses this info to populate the #' draft GitHub release and, after success, deletes the CRAN-SUBMISSION or #' CRAN-RELEASE file. #' #' In the absence of such a file, we must fall back to assuming the current #' state (SHA of `HEAD`, package version, NEWS) is the submitted state. #' #' @param host,auth_token `r lifecycle::badge("deprecated")`: No longer #' consulted now that usethis allows the gh package to lookup a token based on #' a URL determined from the current project's GitHub remotes. #' @export use_github_release <- function(host = deprecated(), auth_token = deprecated()) { check_is_package("use_github_release()") if (lifecycle::is_present(host)) { deprecate_warn_host("use_github_release") } if (lifecycle::is_present(auth_token)) { deprecate_warn_auth_token("use_github_release") } tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_stop(" You don't seem to have push access for {ui_value(tr$repo_spec)}, which \\ is required to draft a release.") } dat <- get_release_data(tr) news <- get_release_news(SHA = dat$SHA, tr = tr) release_name <- glue("{dat$Package} {dat$Version}") tag_name <- glue("v{dat$Version}") kv_line("Release name", release_name) kv_line("Tag name", tag_name) kv_line("SHA", dat$SHA) gh <- gh_tr(tr) release <- gh( "POST /repos/{owner}/{repo}/releases", name = release_name, tag_name = tag_name, target_commitish = dat$SHA, body = news, draft = TRUE ) if (!is.null(dat$file)) { ui_done("{ui_path(dat$file)} deleted") file_delete(dat$file) } view_url(release$html_url) ui_todo("Publish the release via \"Edit draft\" > \"Publish release\"") } get_release_data <- function(tr = target_repo(github_get = TRUE)) { package <- package_data() cran_submission <- path_first_existing(proj_path(c("CRAN-SUBMISSION", "CRAN-RELEASE"))) if (is.null(cran_submission)) { ui_done("Using current HEAD commit for the release") challenge_non_default_branch() check_branch_pushed() return(list( Package = package$Package, Version = package$Version, SHA = gert::git_info(repo = git_repo())$commit )) } if (path_file(cran_submission) == "CRAN-SUBMISSION") { # new style ---- # Version: 2.4.2 # Date: 2021-10-13 20:40:36 UTC # SHA: fbe18b5a22be8ebbb61fa7436e826ba8d7f485a9 out <- as.list(read.dcf(cran_submission)[1, ]) } if (path_file(cran_submission) == "CRAN-RELEASE") { gh <- gh_tr(tr) # old style ---- # This package was submitted to CRAN on 2021-10-13. # Once it is accepted, delete this file and tag the release (commit e10658f5). lines <- read_utf8(cran_submission) str_extract <- function(marker, pattern) { re_match(grep(marker, lines, value = TRUE), pattern)$.match } date <- str_extract("submitted.*on", "[0-9]{4}-[0-9]{2}-[0-9]{2}") sha <- str_extract("commit", "[[:xdigit:]]{7,40}") if (nchar(sha) != 40) { # the release endpoint requires the full sha sha <- gh("/repos/{owner}/{repo}/commits/{commit_sha}", commit_sha = sha)$sha } HEAD <- gert::git_info(repo = git_repo())$commit if (HEAD == sha) { version <- package$Version } else { tf <- glue("{package_data()$Package}-DESCRIPTION-{substr(sha, 1, 7)}-") tf <- withr::local_tempfile(pattern = tf) gh( "/repos/{owner}/{repo}/contents/{path}", path = "DESCRIPTION", ref = sha, .destfile = tf, .accept = "application/vnd.github.v3.raw" ) version <- desc::desc(file = tf)$get_version() } out <- list( Version = version, Date = Sys.Date(), SHA = sha ) } out$Package <- package$Package out$file <- cran_submission ui_done(" {ui_path(out$file)} file found, from a submission on {as.Date(out$Date)}") out } get_release_news <- function(SHA = gert::git_info(repo = git_repo())$commit, tr = target_repo(github_get = TRUE)) { package <- package_data() HEAD <- gert::git_info(repo = git_repo())$commit if (HEAD == SHA) { news_path <- proj_path("NEWS.md") } else { news_path <- glue("{package_data()$Package}-NEWS-{substr(SHA, 1, 7)}-") news_path <- withr::local_tempfile(pattern = news_path) gh <- purrr::possibly(gh_tr(tr), otherwise = NULL) gh( "/repos/{owner}/{repo}/contents/{path}", path = "NEWS.md", ref = SHA, .destfile = news_path, .accept = "application/vnd.github.v3.raw" ) } if (file_exists(news_path)) { news <- news_latest(read_utf8(news_path)) } else { news <- "Initial release" } news } cran_version <- function(package = project_name(), available = utils::available.packages()) { idx <- available[, "Package"] == package if (any(idx)) { as.package_version(available[package, "Version"]) } else { NULL } } cran_results_url <- function(package = project_name()) { glue("https://cran.rstudio.org/web/checks/check_results_{package}.html") } news_latest <- function(lines) { headings <- which(grepl("^#\\s+", lines)) if (length(headings) == 0) { ui_stop("No top-level headings found in {ui_value('NEWS.md')}") } else if (length(headings) == 1) { news <- lines[seq2(headings + 1, length(lines))] } else { news <- lines[seq2(headings[[1]] + 1, headings[[2]] - 1)] } # Remove leading and trailing empty lines text <- which(news != "") if (length(text) == 0) { return("") } news <- news[text[[1]]:text[[length(text)]]] paste0(news, "\n", collapse = "") } is_rstudio_pkg <- function() { is_rstudio_funded() || is_in_rstudio_org() } is_rstudio_funded <- function() { if (!is_package()) { return(FALSE) } desc <- desc::desc(file = proj_get()) funders <- unclass(desc$get_author("fnd")) purrr::some(funders, ~ .x$given == "RStudio") } is_in_rstudio_org <- function() { if (!is_package()) { return(FALSE) } desc <- desc::desc(file = proj_get()) urls <- desc$get_urls() dat <- parse_github_remotes(urls) dat <- dat[dat$host == "github.com", ] purrr::some(dat$repo_owner, ~ .x %in% rstudio_orgs()) } rstudio_orgs <- function() { c( "tidyverse", "r-lib", "tidymodels", "rstudio" ) } todo <- function(x, cond = TRUE) { x <- glue(x, .envir = parent.frame()) if (cond) { paste0("* [ ] ", x) } } usethis/R/cran.R0000644000175000017500000000133713737204645013356 0ustar nileshnilesh#' CRAN submission comments #' #' Creates `cran-comments.md`, a template for your communications with CRAN when #' submitting a package. The goal is to clearly communicate the steps you have #' taken to check your package on a wide range of operating systems. If you are #' submitting an update to a package that is used by other packages, you also #' need to summarize the results of your [reverse dependency #' checks][use_revdep]. #' #' @export #' @inheritParams use_template use_cran_comments <- function(open = rlang::is_interactive()) { check_is_package("use_cran_comments()") use_template( "cran-comments.md", data = list(rversion = glue("{version$major}.{version$minor}")), ignore = TRUE, open = open ) } usethis/R/rmarkdown.R0000644000175000017500000000341413676400475014436 0ustar nileshnilesh#' Add an RMarkdown Template #' #' Adds files and directories necessary to add a custom rmarkdown template to #' RStudio. It creates: #' * `inst/rmarkdown/templates/{{template_dir}}`. Main directory. #' * `skeleton/skeleton.Rmd`. Your template Rmd file. #' * `template.yml` with basic information filled in. #' #' @param template_name The name as printed in the template menu. #' @param template_dir Name of the directory the template will live in within #' `inst/rmarkdown/templates`. If none is provided by the user, it will be #' created from `template_name`. #' @param template_description Sets the value of `description` in #' `template.yml`. #' @param template_create_dir Sets the value of `create_dir` in `template.yml`. #' #' @export #' @examples #' \dontrun{ #' use_rmarkdown_template() #' } use_rmarkdown_template <- function(template_name = "Template Name", template_dir = NULL, template_description = "A description of the template", template_create_dir = FALSE) { # Process some of the inputs template_dir <- template_dir %||% tolower(asciify(template_name)) template_create_dir <- as.character(template_create_dir) template_dir <- path("inst", "rmarkdown", "templates", template_dir) # Scaffold files use_directory(path(template_dir, "skeleton")) use_template( "rmarkdown-template.yml", data = list( template_dir = template_dir, template_name = template_name, template_description = template_description, template_create_dir = template_create_dir ), save_as = path(template_dir, "template.yaml") ) use_template( "rmarkdown-template.Rmd", path(template_dir, "skeleton", "skeleton.Rmd") ) invisible(TRUE) } usethis/R/vignette.R0000644000175000017500000000551714117743363014262 0ustar nileshnilesh#' Create a vignette or article #' #' Creates a new vignette or article in `vignettes/`. Articles are a special #' type of vignette that appear on pkgdown websites, but are not included #' in the package itself (because they are added to `.Rbuildignore` #' automatically). #' #' @section General setup: #' * Adds needed packages to `DESCRIPTION`. #' * Adds `inst/doc` to `.gitignore` so built vignettes aren't tracked. #' * Adds `vignettes/*.html` and `vignettes/*.R` to `.gitignore` so #' you never accidentally track rendered vignettes. #' @param name Base for file name to use for new vignette. Should consist only #' of numbers, letters, `_` and `-`. Lower case is recommended. #' @param title The title of the vignette. #' @seealso The [vignettes chapter](https://r-pkgs.org/vignettes.html) of #' [R Packages](https://r-pkgs.org). #' @export #' @examples #' \dontrun{ #' use_vignette("how-to-do-stuff", "How to do stuff") #' } use_vignette <- function(name, title = name) { check_is_package("use_vignette()") check_vignette_name(name) use_dependency("knitr", "Suggests") use_description_field("VignetteBuilder", "knitr", overwrite = TRUE) use_git_ignore("inst/doc") use_vignette_template("vignette.Rmd", name, title) invisible() } #' @export #' @rdname use_vignette use_article <- function(name, title = name) { check_is_package("use_article()") use_vignette_template("article.Rmd", name, title, subdir = "articles") use_build_ignore("vignettes/articles") invisible() } use_vignette_template <- function(template, name, title, subdir = NULL) { stopifnot(is_string(name)) stopifnot(is_string(title)) use_directory("vignettes") if (!is.null(subdir)) { use_directory(path("vignettes", subdir)) } use_git_ignore(c("*.html", "*.R"), directory = "vignettes") use_dependency("rmarkdown", "Suggests") if (!is.null(subdir)) { path <- path("vignettes", subdir, asciify(name), ext = "Rmd") } else { path <- path("vignettes", asciify(name), ext = "Rmd") } data <- list( Package = project_name(), vignette_title = title, braced_vignette_title = glue("{{{title}}}") ) use_template(template, save_as = path, data = data, open = TRUE ) path } check_vignette_name <- function(name) { if (!valid_vignette_name(name)) { ui_stop(c( "{ui_value(name)} is not a valid filename for a vignette. It must:", "* Start with a letter.", "* Contain only letters, numbers, '_', and '-'." )) } } # https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Writing-package-vignettes # "To ensure that they can be accessed from a browser (as an HTML index is # provided), the file names should start with an ASCII letter and be comprised # entirely of ASCII letters or digits or hyphen or underscore." valid_vignette_name <- function(x) { grepl("^[[:alpha:]][[:alnum:]_-]*$", x) } usethis/R/utils-gh.R0000644000175000017500000000310614117743363014161 0ustar nileshnilesh# Functions that are in a grey area between usethis and gh gh_tr <- function(tr) { force(tr) function(endpoint, ...) { gh::gh( endpoint, ..., owner = tr$repo_owner, repo = tr$repo_name, .api_url = tr$api_url ) } } # Functions inlined from gh ---- get_baseurl <- function(url) { # https://github.uni.edu/api/v3/ if (!any(grepl("^https?://", url))) { stop("Only works with HTTP(S) protocols") } prot <- sub("^(https?://).*$", "\\1", url) # https:// rest <- sub("^https?://(.*)$", "\\1", url) # github.uni.edu/api/v3/ host <- sub("/.*$", "", rest) # github.uni.edu paste0(prot, host) # https://github.uni.edu } # https://api.github.com --> https://github.com # api.github.com --> github.com normalize_host <- function(x) { sub("api[.]github[.]com", "github.com", x) } get_hosturl <- function(url) { url <- get_baseurl(url) normalize_host(url) } # (almost) the inverse of get_hosturl() # https://github.com --> https://api.github.com # https://github.uni.edu --> https://github.uni.edu/api/v3 get_apiurl <- function(url) { host_url <- get_hosturl(url) prot_host <- strsplit(host_url, "://", fixed = TRUE)[[1]] if (is_github_dot_com(host_url)) { paste0(prot_host[[1]], "://api.github.com") } else { paste0(host_url, "/api/v3") } } is_github_dot_com <- function(url) { url <- get_baseurl(url) url <- normalize_host(url) grepl("^https?://github.com", url) } default_api_url <- function() { Sys.getenv("GITHUB_API_URL", unset = "https://api.github.com") } usethis/R/badge.R0000644000175000017500000001071514131645451013466 0ustar nileshnilesh#' README badges #' #' These helpers produce the markdown text you need in your README to include #' badges that report information, such as the CRAN version or test coverage, #' and link out to relevant external resources. To add badges automatically #' ensure your badge block starts with a line containing only #' `` and ends with a line containing only #' ``. #' #' @details #' #' * `use_badge()`: a general helper used in all badge functions #' * `use_bioc_badge()`: badge indicates [BioConductor build #' status](https://bioconductor.org/developers/) #' * `use_cran_badge()`: badge indicates what version of your package is #' available on CRAN, powered by #' * `use_lifecycle_badge()`: badge declares the developmental stage of a #' package according to . #' #' * `use_binder_badge()`: badge indicates that your repository can be launched #' in an executable environment on #' #' @param badge_name Badge name. Used in error message and alt text #' @param href,src Badge link and image src #' @param stage Stage of the package lifecycle. One of "experimental", #' "stable", "superseded", or "deprecated". #' @seealso Functions that configure continuous integration, such as #' [use_github_actions()], also create badges. #' #' @name badges #' @examples #' \dontrun{ #' use_cran_badge() #' use_lifecycle_badge("stable") #' } NULL #' @rdname badges #' @export use_badge <- function(badge_name, href, src) { path <- find_readme() if (is.null(path)) { ui_oops(" Can't find a README for the current project. See {ui_code('usethis::use_readme_rmd()')} for help creating this file. Badge link can only be printed to screen. ") path <- "README" } changed <- block_append( glue("{ui_field(badge_name)} badge"), glue("[![{badge_name}]({src})]({href})"), path = path, block_start = badge_start, block_end = badge_end ) if (changed && path_ext(path) == "Rmd") { ui_todo("Re-knit {ui_path(path)}") } invisible(changed) } #' @rdname badges #' @export use_cran_badge <- function() { check_is_package("use_cran_badge()") pkg <- project_name() src <- glue("https://www.r-pkg.org/badges/version/{pkg}") href <- glue("https://CRAN.R-project.org/package={pkg}") use_badge("CRAN status", href, src) invisible(TRUE) } #' @rdname badges #' @export use_bioc_badge <- function() { check_is_package("use_bioc_badge()") pkg <- project_name() src <- glue("http://www.bioconductor.org/shields/build/release/bioc/{pkg}.svg") href <- glue("https://bioconductor.org/checkResults/release/bioc-LATEST/{pkg}") use_badge("BioC status", href, src) invisible(TRUE) } #' @rdname badges #' @export use_lifecycle_badge <- function(stage) { check_is_package("use_lifecycle_badge()") pkg <- project_name() stage <- tolower(stage) stage <- arg_match0(stage, names(stages)) colour <- stages[[stage]] src <- glue("https://img.shields.io/badge/lifecycle-{stage}-{colour}.svg") href <- glue("https://lifecycle.r-lib.org/articles/stages.html#{stage}") use_badge(paste0("Lifecycle: ", stage), href, src) invisible(TRUE) } stages <- c( experimental = "orange", stable = "brightgreen", superseded = "blue", deprecated = "orange" ) #' @rdname badges #' @param ref A Git branch, tag, or SHA #' @param urlpath An optional `urlpath` component to add to the link, e.g. #' `"rstudio"` to open an RStudio IDE instead of a Jupyter notebook. See the #' [binder #' documentation](https://mybinder.readthedocs.io/en/latest/howto/user_interface.html) #' for additional examples. #' @export use_binder_badge <- function(ref = git_default_branch(), urlpath = NULL) { repo_spec <- target_repo_spec() if (is.null(urlpath)) { urlpath <- "" } else { urlpath <- glue("?urlpath={urlpath}") } url <- glue("https://mybinder.org/v2/gh/{repo_spec}/{ref}{urlpath}") img <- "https://mybinder.org/badge_logo.svg" use_badge("Launch binder", url, img) invisible(TRUE) } has_badge <- function(href) { readme_path <- proj_path("README.md") if (!file_exists(readme_path)) { return(FALSE) } readme <- read_utf8(readme_path) any(grepl(href, readme, fixed = TRUE)) } # Badge data structure ---------------------------------------------------- badge_start <- "" badge_end <- "" find_readme <- function() { path_first_existing(proj_path(c("README.Rmd", "README.md"))) } usethis/R/lifecycle.R0000644000175000017500000000251414131622147014356 0ustar nileshnilesh#' Use lifecycle badges #' #' @description #' This helper: #' #' * Adds lifecycle as a dependency. #' * Imports [lifecycle::deprecated()] for use in function arguments. #' * Copies the lifecycle badges into `man/figures`. #' * Reminds you how to use the badge syntax. #' #' Learn more at #' #' @seealso [use_lifecycle_badge()] to signal the #' [lifecycle stage](https://lifecycle.r-lib.org/articles/stages.html) of #' your package as whole #' @export use_lifecycle <- function() { check_is_package("use_lifecycle()") check_uses_roxygen("use_lifecycle()") if (!uses_roxygen_md()) { ui_stop(" Turn on roxygen2 markdown support {ui_code('use_roxygen_md()')}") } use_package("lifecycle") use_import_from("lifecycle", "deprecated") dest_dir <- proj_path("man", "figures") create_directory(dest_dir) templ_dir <- path_package("usethis", "templates") templ_files <- dir_ls(templ_dir, glob = "*/lifecycle-*.svg") purrr::walk(templ_files, file_copy, dest_dir, overwrite = TRUE) ui_done("Copied SVG badges to {ui_path(dest_dir)}") ui_todo(c( "Add badges in documentation topics by inserting one of:", "#' `r lifecycle::badge('experimental')`", "#' `r lifecycle::badge('superseded')`", "#' `r lifecycle::badge('deprecated')`" )) invisible(TRUE) } usethis/R/coverage.R0000644000175000017500000000330514132400710014200 0ustar nileshnilesh#' Test coverage #' #' Adds test coverage reporting to a package, using either Codecov #' (`https://codecov.io`) or Coveralls (`https://coveralls.io`). #' #' @param type Which web service to use. #' @eval param_repo_spec() #' @export use_coverage <- function(type = c("codecov", "coveralls"), repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() use_dependency("covr", "Suggests") type <- match.arg(type) if (type == "codecov") { new <- use_template("codecov.yml", ignore = TRUE) if (!new) { return(invisible(FALSE)) } } else if (type == "coveralls") { ui_todo("Turn on coveralls for this repo at https://coveralls.io/repos/new") } switch( type, codecov = use_codecov_badge(repo_spec), coveralls = use_coveralls_badge(repo_spec) ) ui_todo(" Call {ui_code('use_github_action(\"test-coverage\")')} to continuously \\ monitor test coverage.") invisible(TRUE) } #' @export #' @rdname use_coverage #' @param files Character vector of file globs. use_covr_ignore <- function(files) { use_build_ignore(".covrignore") write_union(proj_path(".covrignore"), files) } use_codecov_badge <- function(repo_spec) { default_branch <- git_default_branch() url <- glue("https://app.codecov.io/gh/{repo_spec}?branch={default_branch}") img <- glue("https://codecov.io/gh/{repo_spec}/branch/{default_branch}/graph/badge.svg") use_badge("Codecov test coverage", url, img) } use_coveralls_badge <- function(repo_spec) { default_branch <- git_default_branch() url <- glue("https://coveralls.io/r/{repo_spec}?branch={default_branch}") img <- glue("https://coveralls.io/repos/github/{repo_spec}/badge.svg") use_badge("Coveralls test coverage", url, img) } usethis/R/pkgdown.R0000644000175000017500000001410114153502006014056 0ustar nileshnilesh#' Use pkgdown #' #' @description #' [pkgdown](https://pkgdown.r-lib.org) makes it easy to turn your package into #' a beautiful website. usethis provides two functions to help you use pkgdown: #' #' * `use_pkgdown()`: creates a pkgdown config file and adds relevant files or #' directories to `.Rbuildignore` and `.gitignore`. #' #' * `use_pkgdown_github_pages()`: implements the GitHub setup needed to #' automatically publish your pkgdown site to GitHub pages: #' #' - (first, it calls `use_pkgdown()`) #' - [use_github_pages()] prepares to publish the pkgdown site from the #' `github-pages` branch #' - [`use_github_action("pkgdown")`][use_github_action()] configures a #' GitHub Action to automatically build the pkgdown site and deploy it via #' GitHub Pages #' - The pkgdown site's URL is added to the pkgdown configuration file, #' to the URL field of DESCRIPTION, and to the GitHub repo. #' - Packages owned by certain GitHub organizations (tidyverse, r-lib, and #' tidymodels) get some special treatment, in terms of anticipating the #' (eventual) site URL and the use of a pkgdown template. #' #' `use_pkgdown_travis()` is deprecated; we no longer recommend that you use #' Travis-CI. #' #' @seealso #' @param config_file Path to the pkgdown yaml config file #' @param destdir Target directory for pkgdown docs #' @export use_pkgdown <- function(config_file = "_pkgdown.yml", destdir = "docs") { check_is_package("use_pkgdown()") check_installed("pkgdown") use_build_ignore(c(config_file, destdir, "pkgdown")) use_git_ignore(destdir) config <- pkgdown_config(destdir) config_path <- proj_path(config_file) write_over(config_path, yaml::as.yaml(config)) edit_file(config_path) invisible(TRUE) } pkgdown_config <- function(destdir) { config <- list( url = NULL ) if (pkgdown_version() >= "1.9000") { config$template <- list(bootstrap = 5L) } if (!identical(destdir, "docs")) { config$destination <- destdir } config } # wrapping because I need to be able to mock this in tests pkgdown_version <- function() { utils::packageVersion("pkgdown") } #' @rdname use_pkgdown #' @export use_pkgdown_github_pages <- function() { tr <- target_repo(github_get = TRUE) use_pkgdown() site <- use_github_pages() use_github_action("pkgdown") site_url <- tidyverse_url(url = site$html_url, tr = tr) use_pkgdown_url(url = site_url, tr = tr) if (tr$repo_owner %in% c("tidyverse", "tidymodels", "r-lib")) { ui_done(" Adding {ui_value('tidyverse/tidytemplate')} to \\ {ui_field('Config/Needs/website')}") use_description_list("Config/Needs/website", "tidyverse/tidytemplate") } } # helpers ---------------------------------------------------------------------- use_pkgdown_url <- function(url, tr = NULL) { tr <- tr %||% target_repo(github_get = TRUE) config_path <- pkgdown_config_path() ui_done(" Recording {ui_value(url)} as site's {ui_field('url')} in \\ {ui_path(config_path)}") config <- pkgdown_config_meta() if (has_name(config, "url")) { config$url <- url } else { config <- c(url = url, config) } write_utf8(config_path, yaml::as.yaml(config)) ui_done("Adding {ui_value(url)} to {ui_field('URL')} field in DESCRIPTION") desc <- desc::desc(file = proj_get()) desc$add_urls(url) desc$write() gh <- gh_tr(tr) homepage <- gh("GET /repos/{owner}/{repo}")[["homepage"]] if (is.null(homepage) || homepage != url) { ui_done("Setting {ui_value(url)} as homepage of GitHub repo \\ {ui_value(tr$repo_spec)}") gh("PATCH /repos/{owner}/{repo}", homepage = url) } invisible() } tidyverse_url <- function(url, tr = NULL) { tr <- tr %||% target_repo(github_get = TRUE) if (!is_interactive() || !tr$repo_owner %in% c("tidyverse", "r-lib", "tidymodels")) { return(url) } custom_url <- glue("https://{tr$repo_name}.{tr$repo_owner}.org") if (grepl(glue("{custom_url}/?"), url)) { return(url) } if (ui_yeah(" {ui_value(tr$repo_name)} is owned by the {ui_value(tr$repo_owner)} GitHub \\ organization. Shall we configure {ui_value(custom_url)} as the (eventual) \\ pkgdown URL?")) { custom_url } else { url } } pkgdown_config_path <- function() { path_first_existing( proj_path( c( "_pkgdown.yml", "_pkgdown.yaml", "pkgdown/_pkgdown.yml", "pkgdown/_pkgdown.yaml", "inst/_pkgdown.yml", "inst/_pkgdown.yaml" ) ) ) } uses_pkgdown <- function() { !is.null(pkgdown_config_path()) } pkgdown_config_meta <- function() { if (!uses_pkgdown()) { return(list()) } path <- pkgdown_config_path() yaml::read_yaml(path) %||% list() } pkgdown_url <- function(pedantic = FALSE) { if (!uses_pkgdown()) { return(NULL) } meta <- pkgdown_config_meta() url <- meta$url if (!is.null(url)) { return(url) } if (pedantic) { ui_warn(" pkgdown config does not specify the site's {ui_field('url')}, \\ which is optional but recommended") } NULL } # travis ---- #' @export #' @rdname use_pkgdown use_pkgdown_travis <- function() { lifecycle::deprecate_soft( when = "2.0.0", what = "usethis::use_pkgdown_travis()", details = 'We recommend `use_github_action("pkgdown")` for new pkgdown setups.' ) check_installed("pkgdown") if (!uses_pkgdown()) { ui_stop(" Package doesn't use pkgdown. Do you need to call {ui_code('use_pkgdown()')}?") } tr <- target_repo(github_get = TRUE) use_build_ignore(c("docs/", "pkgdown")) use_git_ignore("docs/") # TODO: suggest git rm -r --cache docs/ # Can't currently detect if git known files in that directory ui_todo(" Set up deploy keys by running {ui_code('travis::use_travis_deploy()')}") ui_todo("Insert the following code in {ui_path('.travis.yml')}") ui_code_block( " before_cache: Rscript -e 'remotes::install_cran(\"pkgdown\")' deploy: provider: script script: Rscript -e 'pkgdown::deploy_site_github()' skip_cleanup: true " ) use_github_pages() invisible() } usethis/R/license.R0000644000175000017500000001410514117743363014050 0ustar nileshnilesh#' License a package #' #' @description #' Adds the necessary infrastructure to declare your package as licensed #' with one of these popular open source licenses: #' #' Permissive: #' * [MIT](https://choosealicense.com/licenses/mit/): simple and permissive. #' * [Apache 2.0](https://choosealicense.com/licenses/apache-2.0/): MIT + #' provides patent protection. #' #' Copyleft: #' * [GPL v2](https://choosealicense.com/licenses/gpl-2.0/): requires sharing #' of improvements. #' * [GPL v3](https://choosealicense.com/licenses/gpl-3.0/): requires sharing #' of improvements. #' * [AGPL v3](https://choosealicense.com/licenses/agpl-3.0/): requires sharing #' of improvements. #' * [LGPL v2.1](https://choosealicense.com/licenses/lgpl-2.1/): requires sharing #' of improvements. #' * [LGPL v3](https://choosealicense.com/licenses/lgpl-3.0/): requires sharing #' of improvements. #' #' Creative commons licenses appropriate for data packages: #' * [CC0](https://creativecommons.org/publicdomain/zero/1.0/): dedicated #' to public domain. #' * [CC-BY](https://creativecommons.org/licenses/by/4.0/): Free to share and #' adapt, must give appropriate credit. #' #' See for more details and other options. #' #' Alternatively, for code that you don't want to share with others, #' `use_proprietary_license()` makes it clear that all rights are reserved, #' and the code is not open source. #' #' @details #' CRAN does not permit you to include copies of standard licenses in your #' package, so these functions save the license as `LICENSE.md` and add it #' to `.Rbuildignore`. #' #' @name licenses #' @param copyright_holder Name of the copyright holder or holders. This #' defaults to "{package name} authors"; you should only change this if you #' use a CLA to assign copyright to a single entity. #' @param version License version. This defaults to latest version all licenses. #' @param include_future If `TRUE`, will license your package under the current #' and any potential future versions of the license. This is generally #' considered to be good practice because it means your package will #' automatically include "bug" fixes in licenses. #' @seealso For more details, refer to the the #' [license chapter](https://r-pkgs.org/license.html) in _R Packages_. #' @aliases NULL NULL #' @rdname licenses #' @export use_mit_license <- function(copyright_holder = NULL) { data <- list( year = format(Sys.Date(), "%Y"), copyright_holder = copyright_holder %||% glue("{project_name()} authors") ) if (is_package()) { use_description_field("License", "MIT + file LICENSE", overwrite = TRUE) use_template("year-copyright.txt", save_as = "LICENSE", data = data) } use_license_template("mit", data) } #' @rdname licenses #' @export use_gpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, 2:3) if (is_package()) { abbr <- license_abbr("GPL", version, include_future) use_description_field("License", abbr, overwrite = TRUE) } use_license_template(glue("GPL-{version}")) } #' @rdname licenses #' @export use_agpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, 3) if (is_package()) { abbr <- license_abbr("AGPL", version, include_future) use_description_field("License", abbr, overwrite = TRUE) } use_license_template(glue("AGPL-{version}")) } #' @rdname licenses #' @export use_lgpl_license <- function(version = 3, include_future = TRUE) { version <- check_license_version(version, c(2.1, 3)) if (is_package()) { abbr <- license_abbr("LGPL", version, include_future) use_description_field("License", abbr, overwrite = TRUE) } use_license_template(glue("LGPL-{version}")) } #' @rdname licenses #' @export use_apache_license <- function(version = 2, include_future = TRUE) { version <- check_license_version(version, 2) if (is_package()) { abbr <- license_abbr("Apache License", version, include_future) use_description_field("License", abbr, overwrite = TRUE) } use_license_template(glue("apache-{version}")) } #' @rdname licenses #' @export use_cc0_license <- function() { if (is_package()) { use_description_field("License", "CC0", overwrite = TRUE) } use_license_template("cc0") } #' @rdname licenses #' @export use_ccby_license <- function() { if (is_package()) { use_description_field("License", "CC BY 4.0", overwrite = TRUE) } use_license_template("ccby-4") } #' @rdname licenses #' @export use_proprietary_license <- function(copyright_holder) { data <- list( year = year(), copyright_holder = copyright_holder ) if (is_package()) { use_description_field("License", "file LICENSE", overwrite = TRUE) } use_template("license-proprietary.txt", save_as = "LICENSE", data = data) } # Fallbacks --------------------------------------------------------------- #' @rdname licenses #' @export #' @usage NULL use_gpl3_license <- function() { use_gpl_license(3) } #' @rdname licenses #' @export #' @usage NULL use_agpl3_license <- function() { use_agpl_license(3) } #' @rdname licenses #' @export #' @usage NULL use_apl2_license <- function() { use_apache_license(2) } # Helpers ----------------------------------------------------------------- use_license_template <- function(license, data = list()) { license_template <- glue("license-{license}.md") use_template(license_template, save_as = "LICENSE.md", data = data, ignore = TRUE ) } check_license_version <- function(version, possible) { version <- as.double(version) if (!version %in% possible) { possible <- glue_collapse(possible, sep = ", ", last = ", or ") ui_stop("`version` must be {possible}") } version } license_abbr <- function(name, version, include_future) { if (include_future) { glue_chr("{name} (>= {version})") } else { if (name %in% c("GPL", "LGPL", "AGPL")) { # Standard abbreviations listed at # https://cran.rstudio.com/doc/manuals/r-devel/R-exts.html#Licensing glue_chr("{name}-{version}") } else { glue_chr("{name} (== {version})") } } } usethis/R/proj.R0000644000175000017500000002125114131645451013373 0ustar nileshnileshproj <- new.env(parent = emptyenv()) proj_get_ <- function() proj$cur proj_set_ <- function(path) { old <- proj$cur proj$cur <- path invisible(old) } #' Utility functions for the active project #' #' Most `use_*()` functions act on the **active project**. If it is #' unset, usethis uses [rprojroot](https://rprojroot.r-lib.org) to #' find the project root of the current working directory. It establishes the #' project root by looking for a `.here` file, an RStudio Project, a package #' `DESCRIPTION`, Git infrastructure, a `remake.yml` file, or a `.projectile` #' file. It then stores the active project for use for the remainder of the #' session. #' #' In general, end user scripts should not contain direct calls to #' `usethis::proj_*()` utility functions. They are internal functions that are #' exported for occasional interactive use or use in packages that extend #' usethis. End user code should call functions in #' [rprojroot](https://rprojroot.r-lib.org) or its simpler companion, #' [here](https://here.r-lib.org), to programmatically detect a project and #' build paths within it. #' #' @name proj_utils #' @family project functions #' @examples #' \dontrun{ #' ## see the active project #' proj_get() #' #' ## manually set the active project #' proj_set("path/to/target/project") #' #' ## build a path within the active project (both produce same result) #' proj_path("R/foo.R") #' proj_path("R", "foo", ext = "R") #' #' ## build a path within SOME OTHER project #' with_project("path/to/some/other/project", proj_path("blah.R")) #' #' ## convince yourself that with_project() temporarily changes the project #' with_project("path/to/some/other/project", print(proj_sitrep())) #' } NULL #' @describeIn proj_utils Retrieves the active project and, if necessary, #' attempts to set it in the first place. #' @export proj_get <- function() { # Called for first time so try working directory if (!proj_active()) { proj_set(".") } proj_get_() } #' @describeIn proj_utils Sets the active project. #' @param path Path to set. This `path` should exist or be `NULL`. #' @param force If `TRUE`, use this path without checking the usual criteria for #' a project. Use sparingly! The main application is to solve a temporary #' chicken-egg problem: you need to set the active project in order to add #' project-signalling infrastructure, such as initialising a Git repo or #' adding a `DESCRIPTION` file. #' @export proj_set <- function(path = ".", force = FALSE) { if (dir_exists(path %||% "") && is_in_proj(path)) { return(invisible(proj_get_())) } path <- proj_path_prep(path) if (is.null(path) || force) { proj_string <- if (is.null(path)) "" else path ui_done("Setting active project to {ui_value(proj_string)}") return(proj_set_(path)) } check_path_is_directory(path) new_project <- proj_find(path) if (is.null(new_project)) { ui_stop( "Path {ui_path(path)} does not appear to be inside a project or package." ) } proj_set(path = new_project, force = TRUE) } #' @describeIn proj_utils Builds a path within the active project returned by #' `proj_get()`. Thin wrapper around [fs::path()]. #' @inheritParams fs::path #' @export proj_path <- function(..., ext = "") { path_norm(path(proj_get(), ..., ext = ext)) } #' @describeIn proj_utils Runs code with a temporary active project and, #' optionally, working directory. It is an example of the `with_*()` functions #' in [withr](https://withr.r-lib.org). #' @param code Code to run with temporary active project #' @param setwd Whether to also temporarily set the working directory to the #' active project, if it is not `NULL` #' @param quiet Whether to suppress user-facing messages, while operating in the #' temporary active project #' @export with_project <- function(path = ".", code, force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE)) { local_project(path = path, force = force, setwd = setwd, quiet = quiet) force(code) } #' @describeIn proj_utils Sets an active project and, optionally, working #' directory until the current execution environment goes out of scope, e.g. #' the end of the current function or test. It is an example of the #' `local_*()` functions in [withr](https://withr.r-lib.org). #' @param .local_envir The environment to use for scoping. Defaults to current #' execution environment. #' @export local_project <- function(path = ".", force = FALSE, setwd = TRUE, quiet = getOption("usethis.quiet", default = FALSE), .local_envir = parent.frame()) { withr::local_options(usethis.quiet = quiet, .local_envir = .local_envir) old_project <- proj_get_() # this could be `NULL`, i.e. no active project withr::defer(proj_set(path = old_project, force = TRUE), envir = .local_envir) proj_set(path = path, force = force) temp_proj <- proj_get_() # this could be `NULL` if (isTRUE(setwd) && !is.null(temp_proj)) { withr::local_dir(temp_proj, .local_envir = .local_envir) } } ## usethis policy re: preparation of the path to active project proj_path_prep <- function(path) { if (is.null(path)) { return(path) } path <- path_abs(path) if (file_exists(path)) { path_real(path) } else { path } } ## usethis policy re: preparation of user-provided path to a resource on user's ## file system user_path_prep <- function(path) { ## usethis uses fs's notion of home directory ## this ensures we are consistent about that path_expand(path) } proj_rel_path <- function(path) { if (is_in_proj(path)) { as.character(path_rel(path, start = proj_get())) } else { path } } proj_crit <- function() { rprojroot::has_file(".here") | rprojroot::is_rstudio_project | rprojroot::is_r_package | rprojroot::is_git_root | rprojroot::is_remake_project | rprojroot::is_projectile_project } proj_find <- function(path = ".") { tryCatch( rprojroot::find_root(proj_crit(), path = path), error = function(e) NULL ) } possibly_in_proj <- function(path = ".") !is.null(proj_find(path)) is_package <- function(base_path = proj_get()) { res <- tryCatch( rprojroot::find_package_root_file(path = base_path), error = function(e) NULL ) !is.null(res) } check_is_package <- function(whos_asking = NULL) { if (is_package()) { return(invisible()) } message <- "Project {ui_value(project_name())} is not an R package." if (!is.null(whos_asking)) { message <- c( "{ui_code(whos_asking)} is designed to work with packages.", message ) } ui_stop(message) } check_is_project <- function() { if (!possibly_in_proj()) { ui_stop(" We do not appear to be inside a valid project or package Read more in the help for {ui_code(\"proj_get()\")}") } } proj_active <- function() !is.null(proj_get_()) is_in_proj <- function(path) { if (!proj_active()) { return(FALSE) } identical( proj_get(), ## use path_abs() in case path does not exist yet path_common(c(proj_get(), path_expand(path_abs(path)))) ) } package_data <- function(base_path = proj_get()) { desc <- desc::description$new(base_path) as.list(desc$get(desc$fields())) } project_name <- function(base_path = proj_get()) { ## escape hatch necessary to solve this chicken-egg problem: ## create_package() calls use_description(), which calls project_name() ## to learn package name from the path, in order to make DESCRIPTION ## and DESCRIPTION is how we recognize a package as a usethis project if (!possibly_in_proj(base_path)) { return(path_file(base_path)) } if (is_package(base_path)) { package_data(base_path)$Package } else { path_file(base_path) } } #' Activate a project #' #' Activates a project in usethis, R session, and (if relevant) RStudio senses. #' If you are in RStudio, this will open a new RStudio session. If not, it will #' change the working directory and [active project][proj_set()]. #' #' @param path Project directory #' @return Single logical value indicating if current session is modified. #' @export proj_activate <- function(path) { check_path_is_directory(path) path <- user_path_prep(path) if (rstudio_available() && rstudioapi::hasFun("openProject")) { ui_done("Opening {ui_path(path, base = NA)} in new RStudio session") rstudioapi::openProject(path, newSession = TRUE) invisible(FALSE) } else { proj_set(path) rel_path <- path_rel(proj_get(), path_wd()) if (rel_path != ".") { ui_done("Changing working directory to {ui_path(path, base = NA)}") setwd(proj_get()) } invisible(TRUE) } } usethis/R/template.R0000644000175000017500000000562314117743363014246 0ustar nileshnilesh#' Use a usethis-style template #' #' Creates a file from data and a template found in a package. Provides control #' over file name, the addition to `.Rbuildignore`, and opening the file for #' inspection. #' #' This function can be used as the engine for a templating function in other #' packages. The `template` argument is used along with the `package` argument #' to derive the path to your template file; it will be expected at #' `fs::path_package(package = package, "templates", template)`. We use #' `fs::path_package()` instead of `base::system.file()` so that path #' construction works even in a development workflow, e.g., works with #' `devtools::load_all()` or `pkgload::load_all()`. *Note this describes the #' behaviour of `fs::path_package()` in fs v1.2.7.9001 and higher.* #' #' To interpolate your data into the template, supply a list using #' the `data` argument. Internally, this function uses #' [whisker::whisker.render()] to combine your template file with your data. #' #' @param template Path to template file relative to `templates/` directory #' within `package`; see details. #' @param save_as Path of file to create, relative to root of active project. #' Defaults to `template` #' @param data A list of data passed to the template. #' @param ignore Should the newly created file be added to `.Rbuildignore`? #' @param open Open the newly created file for editing? Happens in RStudio, if #' applicable, or via [utils::file.edit()] otherwise. #' @param package Name of the package where the template is found. #' @return A logical vector indicating if file was modified. #' @export #' @examples #' \dontrun{ #' # Note: running this will write `NEWS.md` to your working directory #' use_template( #' template = "NEWS.md", #' data = list(Package = "acme", Version = "1.2.3"), #' package = "usethis" #' ) #' } use_template <- function(template, save_as = template, data = list(), ignore = FALSE, open = FALSE, package = "usethis") { template_contents <- render_template(template, data, package = package) new <- write_over(proj_path(save_as), template_contents) if (ignore) { use_build_ignore(save_as) } if (open && new) { edit_file(proj_path(save_as)) } invisible(new) } render_template <- function(template, data = list(), package = "usethis") { template_path <- find_template(template, package = package) strsplit(whisker::whisker.render(read_utf8(template_path), data), "\n")[[1]] } find_template <- function(template_name, package = "usethis") { check_installed(package) path <- tryCatch( path_package(package = package, "templates", template_name), error = function(e) "" ) if (identical(path, "")) { ui_stop( "Could not find template {ui_value(template_name)} \\ in package {ui_value(package)}." ) } path } usethis/R/utils-rematch2.R0000644000175000017500000000213714117743363015273 0ustar nileshnilesh# inlined from # https://github.com/r-lib/rematch2/commit/aab858e3411810fa107d20db6f936c6b10cbdf3f # EXCEPT I don't return a tibble re_match <- function(text, pattern, perl = TRUE, ...) { stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern)) text <- as.character(text) match <- regexpr(pattern, text, perl = perl, ...) start <- as.vector(match) length <- attr(match, "match.length") end <- start + length - 1L matchstr <- substring(text, start, end) matchstr[ start == -1 ] <- NA_character_ res <- data.frame( stringsAsFactors = FALSE, .text = text, .match = matchstr ) if (!is.null(attr(match, "capture.start"))) { gstart <- attr(match, "capture.start") glength <- attr(match, "capture.length") gend <- gstart + glength - 1L groupstr <- substring(text, gstart, gend) groupstr[ gstart == -1 ] <- NA_character_ dim(groupstr) <- dim(gstart) res <- cbind(groupstr, res, stringsAsFactors = FALSE) } names(res) <- c(attr(match, "capture.names"), ".text", ".match") #class(res) <- c("tbl_df", "tbl", class(res)) res } usethis/R/rcpp.R0000644000175000017500000000646014117743363013377 0ustar nileshnilesh#' Use C, C++, RcppArmadillo, or RcppEigen #' #' Adds infrastructure commonly needed when using compiled code: #' * Creates `src/` #' * Adds required packages to `DESCRIPTION` #' * May create an initial placeholder `.c` or `.cpp` file #' * Creates `Makevars` and `Makevars.win` files (`use_rcpp_armadillo()` only) #' #' @param name If supplied, creates and opens `src/name.{c,cpp}`. #' #' @details #' #' When using compiled code, please note that there must be at least one file #' inside the `src/` directory prior to building the package. As a result, #' if an empty `src/` directory is detected, either a `.c` or `.cpp` file will #' be added. #' #' @export use_rcpp <- function(name = NULL) { check_is_package("use_rcpp()") check_uses_roxygen("use_rcpp()") use_src() use_dependency("Rcpp", "LinkingTo") use_dependency("Rcpp", "Imports") roxygen_ns_append("@importFrom Rcpp sourceCpp") && roxygen_remind() use_src_example_script(name, "cpp") invisible() } #' @rdname use_rcpp #' @export use_rcpp_armadillo <- function(name = NULL) { use_rcpp(name) use_dependency("RcppArmadillo", "LinkingTo") makevars_settings <- list( "CXX_STD" = "CXX11", "PKG_CXXFLAGS" = "$(SHLIB_OPENMP_CXXFLAGS)", "PKG_LIBS" = "$(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS)" ) use_makevars(makevars_settings) invisible() } #' @rdname use_rcpp #' @export use_rcpp_eigen <- function(name = NULL) { use_rcpp(name) use_dependency("RcppEigen", "LinkingTo") roxygen_ns_append("@import RcppEigen") && roxygen_remind() invisible() } #' @rdname use_rcpp #' @export use_c <- function(name = NULL) { check_is_package("use_c()") check_uses_roxygen("use_c()") use_src() use_src_example_script(name, "c") invisible(TRUE) } use_src <- function() { use_directory("src") use_git_ignore(c("*.o", "*.so", "*.dll"), "src") roxygen_ns_append(glue("@useDynLib {project_name()}, .registration = TRUE")) && roxygen_remind() invisible() } use_makevars <- function(settings = NULL) { use_directory("src") settings_list <- settings %||% list() check_is_named_list(settings_list) makevars_entries <- vapply(settings_list, glue_collapse, character(1)) makevars_content <- glue("{names(makevars_entries)} = {makevars_entries}") makevars_path <- proj_path("src", "Makevars") makevars_win_path <- proj_path("src", "Makevars.win") if (!file_exists(makevars_path) && !file_exists(makevars_win_path)) { write_utf8(makevars_path, makevars_content) file_copy(makevars_path, makevars_win_path) ui_done("Created {ui_path(makevars_path)} and {ui_path(makevars_win_path)} \\ with requested compilation settings.") } else { ui_todo("Ensure the following Makevars compilation settings are set for both \\ {ui_path(makevars_path)} and {ui_path(makevars_win_path)}:") ui_code_block( makevars_content ) edit_file(makevars_path) edit_file(makevars_win_path) } } use_src_example_script <- function(name = NULL, src_type = c("cpp", "c")) { src_type <- match.arg(src_type) if (!directory_has_files(path("src"))) { name <- name %||% "code" } if (!is.null(name)) { name <- slug(name, src_type) check_file_name(name) use_template( slug("code", src_type), path("src", name), open = is_interactive() ) } } usethis/R/description.R0000644000175000017500000001631314132400710014733 0ustar nileshnilesh#' Create or modify a DESCRIPTION file #' #' @description #' #' `use_description()` creates a `DESCRIPTION` file. Although mostly associated #' with R packages, a `DESCRIPTION` file can also be used to declare #' dependencies for a non-package project. Within such a project, #' `devtools::install_deps()` can then be used to install all the required #' packages. Note that, by default, `use_decription()` checks for a #' CRAN-compliant package name. You can turn this off with `check_name = FALSE`. #' #' usethis consults the following sources, in this order, to set `DESCRIPTION` #' fields: #' * `fields` argument of [create_package()] or [use_description()] #' * `getOption("usethis.description")` #' * Defaults built into usethis #' #' The fields discovered via options or the usethis package can be viewed with #' `use_description_defaults()`. #' #' If you create a lot of packages, consider storing personalized defaults as a #' named list in an option named `"usethis.description"`. Here's an example of #' code to include in `.Rprofile`, which can be opened via [edit_r_profile()]: #' #' ``` #' options( #' usethis.description = list( #' `Authors@R` = 'person("Jane", "Doe", email = "jane@example.com", #' role = c("aut", "cre"), #' comment = c(ORCID = "YOUR-ORCID-ID"))', #' License = "MIT + file LICENSE", #' Language = "es" #' ) #' ) #' ``` #' #' Prior to usethis v2.0.0, `getOption("devtools.desc")` was consulted for #' backwards compatibility, but now only the `"usethis.description"` option is #' supported. #' #' @param fields A named list of fields to add to `DESCRIPTION`, potentially #' overriding default values. See [use_description()] for how you can set #' personalized defaults using package options. #' @param check_name Whether to check if the name is valid for CRAN and throw an #' error if not. #' @param roxygen If `TRUE`, sets `RoxygenNote` to current roxygen2 version #' @seealso The [description chapter](https://r-pkgs.org/description.html) #' of [R Packages](https://r-pkgs.org) #' @export #' @examples #' \dontrun{ #' use_description() #' #' use_description(fields = list(Language = "es")) #' #' use_description_defaults() #' } use_description <- function(fields = list(), check_name = TRUE, roxygen = TRUE) { name <- project_name() if (check_name) { check_package_name(name) } desc <- build_description(name, roxygen = roxygen, fields = fields) tf <- withr::local_tempfile(pattern = glue("use_description-{name}-")) desc$write(file = tf) tf_contents <- read_utf8(tf) write_over(proj_path("DESCRIPTION"), tf_contents) # explicit check of "usethis.quiet" since I'm not doing the printing if (!getOption("usethis.quiet", default = FALSE)) { desc$print() } } #' @rdname use_description #' @param package Package name #' @export use_description_defaults <- function(package = NULL, roxygen = TRUE, fields = list()) { fields <- fields %||% list() check_is_named_list(fields) usethis <- list( Package = package %||% "valid.package.name.goes.here", Version = "0.0.0.9000", Title = "What the Package Does (One Line, Title Case)", Description = "What the package does (one paragraph).", "Authors@R" = 'person("First", "Last", , "first.last@example.com", c("aut", "cre"), comment = c(ORCID = "YOUR-ORCID-ID"))', License = "`use_mit_license()`, `use_gpl3_license()` or friends to pick a license", Encoding = "UTF-8" ) if (roxygen) { if (is_installed("roxygen2")) { roxygen_note <- utils::packageVersion("roxygen2") } else { roxygen_note <- "7.0.0" # version doesn't really matter } usethis$Roxygen <- "list(markdown = TRUE)" usethis$RoxygenNote <- roxygen_note } options <- getOption("usethis.description") %||% list() # A `person` object in Authors@R is not patched in by modifyList() modify_this <- function(orig, patch) { out <- utils::modifyList(orig, patch) if (inherits(patch$`Authors@R`, "person")) { #if (has_name(patch, "Authors@R")) { out$`Authors@R` <- patch$`Authors@R` } out } defaults <- modify_this(usethis, options) defaults <- modify_this(defaults, fields) # Ensure each element is a single string if (inherits(defaults$`Authors@R`, "person")) { defaults$`Authors@R` <- format(defaults$`Authors@R`, style = "R") defaults$`Authors@R` <- paste0(defaults$`Authors@R`, collapse = "\n") } defaults <- lapply(defaults, paste, collapse = "") compact(defaults) } build_description <- function(package, roxygen = TRUE, fields = list()) { fields <- use_description_defaults(package, roxygen = roxygen, fields) desc <- desc::desc(text = glue("{names(fields)}: {fields}")) tidy_desc(desc) desc } check_package_name <- function(name) { if (!valid_package_name(name)) { ui_stop(c( "{ui_value(name)} is not a valid package name. To be allowed on CRAN, it should:", "* Contain only ASCII letters, numbers, and '.'", "* Have at least two characters", "* Start with a letter", "* Not end with '.'" )) } } valid_package_name <- function(x) { grepl("^[a-zA-Z][a-zA-Z0-9.]+$", x) && !grepl("\\.$", x) } tidy_desc <- function(desc) { desc$set("Encoding" = "UTF-8") # Normalize all fields (includes reordering) # Wrap in a try() so it always succeeds, even if user options are malformed try(desc$normalize(), silent = TRUE) } # 2021-10-10, while adding use_description_list(), I moved this helper here # # this helper feels out-of-sync with current usethis practices around active # project and how overwrite is handled # # I won't change use_description_field() now, but use_description_list() is # implemented differently, more in keeping with our current style use_description_field <- function(name, value, overwrite = FALSE) { # account for `value`s produced via `glue::glue()` value <- as.character(value) curr <- desc::desc_get(name, file = proj_get())[[1]] curr <- gsub("^\\s*|\\s*$", "", curr) if (identical(curr, value)) { return(invisible()) } if (!is.na(curr) && !overwrite) { ui_stop( "{ui_field(name)} has a different value in DESCRIPTION. \\ Use {ui_code('overwrite = TRUE')} to overwrite." ) } ui_done("Setting {ui_field(name)} field in DESCRIPTION to {ui_value(value)}") desc::desc_set(name, value, file = proj_get()) invisible() } use_description_list <- function(key, values, append = TRUE, desc = NULL) { desc_provided <- !is.null(desc) desc <- desc %||% desc::desc(file = proj_get()) check_string(key) stopifnot(is.character(values)) if (append) { values <- unique(c(desc$get_list(key, default = ""), values)) } # formatting needs some improvements # https://github.com/r-lib/desc/issues/117 desc$set_list(key, values, sep = ",\n") if (desc_provided) { return(invisible()) } tf <- withr::local_tempfile( pattern = glue("use_description_list-{project_name()}-{path_sanitize(key, '-')}") ) desc$write(file = tf) tf_contents <- read_utf8(tf) write_over(proj_path("DESCRIPTION"), tf_contents) } usethis/R/documentation.R0000644000175000017500000000216114117743363015276 0ustar nileshnilesh#' Package-level documentation #' #' Adds a dummy `.R` file that will prompt roxygen to generate basic #' package-level documentation. If your package is named "foo", this will make #' help available to the user via `?foo` or `package?foo`. Once you call #' `devtools::document()`, roxygen will flesh out the `.Rd` file using data from #' the `DESCRIPTION`. That ensures you don't need to repeat the same information #' in multiple places. This `.R` file is also a good place for roxygen #' directives that apply to the whole package (vs. a specific function), such as #' global namespace tags like `@importFrom`. #' #' @seealso The [documentation chapter](https://r-pkgs.org/man.html) of [R #' Packages](https://r-pkgs.org) #' @inheritParams use_template #' @export use_package_doc <- function(open = rlang::is_interactive()) { check_is_package("use_package_doc()") use_template( "packagename-package.R", package_doc_path(), open = open ) } package_doc_path <- function() { path("R", paste0(project_name(), "-package"), ext = "R") } has_package_doc <- function() { file_exists(proj_path(package_doc_path())) } usethis/R/tidy-upkeep.R0000644000175000017500000000652114132450520014654 0ustar nileshnilesh#' @export #' @rdname tidyverse #' @param year Approximate year when you last touched this package. If `NULL`, #' the default, will give you a full set of actions to perform. use_tidy_upkeep_issue <- function(year = NULL) { check_is_package("use_tidy_upkeep_issue()") tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_line(" It is very unusual to open an upkeep issue on a repo you can't push to: {ui_value(tr$repo_spec)}") if (ui_nope("Do you really want to do this?")) { ui_oops("Cancelling.") return(invisible()) } } checklist <- upkeep_checklist(year) gh <- gh_tr(tr) issue <- gh( "POST /repos/{owner}/{repo}/issues", title = glue("Upkeep for {project_name()}"), body = paste0(checklist, "\n", collapse = "") ) view_url(issue$html_url) } upkeep_checklist <- function(year = NULL) { year <- year %||% 2000 is_rstudio_funded <- is_rstudio_funded() is_in_rstudio_org <- is_in_rstudio_org() bullets <- c() if (year <= 2000) { bullets <- c(bullets, "Pre-history", "", todo("`usethis::use_readme_rmd()`"), todo("`usethis::use_roxygen_md()`"), todo("`usethis::use_github_links()`"), todo("`usethis::use_pkgdown_github_pages()`"), todo("`usethis::use_tidy_labels()`"), todo("`usethis::use_tidy_style()`"), todo("`usethis::use_tidy_description()`"), todo("`urlchecker::url_check()`"), "" ) } if (year <= 2020) { bullets <- c(bullets, "2020", "", todo(" `usethis::use_package_doc()` Consider letting usethis manage your `@importFrom` directives here. `usethis::use_import_from()` is handy for this."), todo(" `usethis::use_testthat(3)` and upgrade to 3e, \\ [testthat 3e vignette](https://testthat.r-lib.org/articles/third-edition.html)"), todo(" Align the names of `R/` files and `test/` files for workflow happiness. `usethis::rename_files()` can be helpful."), "" ) } if (year <= 2021) { bullets <- c(bullets, "2021", "", todo("`usethis::use_tidy_description()`", year > 2000), todo("`usethis::use_tidy_dependencies()`"), todo(" `usethis::use_tidy_github_actions()` and update artisanal actions to \\ use `setup-r-dependencies`"), todo("Remove check environments section from `cran-comments.md`"), todo("Bump required R version in DESCRIPTION to {tidy_minimum_r_version()}"), todo(" Use lifecycle instead of artisanal deprecation messages, as described \\ in [Communicate lifecycle changes in your functions](https://lifecycle.r-lib.org/articles/communicate.html)"), todo(" Add RStudio to DESCRIPTION as funder, if appropriate", !is_rstudio_funded && is_in_rstudio_org), "" ) } bullets } # https://www.tidyverse.org/blog/2019/04/r-version-support/ tidy_minimum_r_version <- function() { con <- curl::curl("https://api.r-hub.io/rversions/r-oldrel/4") withr::defer(close(con)) # I do not want a failure here to make use_tidy_upkeep_issue() fail json <- tryCatch(readLines(con, warn = FALSE), error = function(e) NULL) if (is.null(json)) { oldrel_4 <- "3.4" } else { version <- jsonlite::fromJSON(json)$version oldrel_4 <- re_match(version, "[0-9]+[.][0-9]+")$.match } oldrel_4 } usethis/R/usethis-defunct.R0000644000175000017500000001635414131645451015543 0ustar nileshnilesh#' Defunct and deprecated functions in usethis #' #' These functions have either been deprecated or removed from usethis. #' #' @name usethis-defunct #' @keywords internal NULL #' @section `pr_pull_upstream()`: #' This function has been replaced by [pr_merge_main()]. #' @rdname usethis-defunct #' @export pr_pull_upstream <- function() { lifecycle::deprecate_stop( when = "2.0.0", what = "pr_pull_upstream()", with = "pr_merge_main()", ) } #' @section `pr_sync()`: #' Bundling these operations together did not seem justified, in terms of how #' rarely this comes up and, when it does, how likely merge conflicts are. #' Users of `pr_sync()` should implement these steps "by hand": #' * (Check you are on a PR branch) #' * `pr_pull()` #' * `pr_merge_main()`, deal with any merge conflicts, if any #' * `pr_push()` #' @export #' @rdname usethis-defunct pr_sync <- function() { details <- glue(" Sync a PR with: * {ui_code('pr_pull()')} * {ui_code('pr_merge_main()')} * {ui_code('pr_push()')}") lifecycle::deprecate_stop( when = "2.0.0", what = "pr_sync()", details = details ) } #' @section `browse_github_token()`, `browse_github_pat()`: #' These functions have been replaced by [create_github_token()]. #' @rdname usethis-defunct #' @export browse_github_token <- function(...) { lifecycle::deprecate_stop( when = "2.0.0", what = "browse_github_token()", with = "create_github_token()" ) } #' @rdname usethis-defunct #' @export browse_github_pat <- function(...) { lifecycle::deprecate_stop( "2.0.0", what = "browse_github_pat()", with = "create_github_token()" ) } #' @section `github_token()`: #' All implicit and explicit token discovery routes through [gh::gh_token()] #' now. #' @rdname usethis-defunct #' @export github_token <- function() { details <- glue(" Call {ui_code('gh::gh_token()')} to retrieve a GitHub personal access token Call {ui_code('gh_token_help()')} if you need help getting or configuring \\ your token") lifecycle::deprecate_stop( "2.0.0", what = "github_token()", details = details ) } #' @section `git_branch_default()`: #' Please call [git_default_branch()] instead. In hindsight, that is a better #' name for this function. #' @export #' @rdname usethis-defunct git_branch_default <- function() { lifecycle::deprecate_soft("2.1.0", "git_branch_default()", "git_default_branch()") git_default_branch() } #' @section `use_tidy_labels()`: #' Please call [use_tidy_github_labels()] instead. In hindsight, that is a #' better name for this function. #' @export #' @rdname usethis-defunct use_tidy_labels <- function() { lifecycle::deprecate_warn("2.1.0", "use_tidy_labels()", "use_tidy_github_labels()") git_default_branch() } #' @export #' @rdname usethis-defunct use_tidy_ci <- function(...) { lifecycle::deprecate_stop("2.1.0", "use_tidy_ci()", "use_tidy_github_actions()") } #' @export #' @rdname usethis-defunct use_github_action_check_full <- function(save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE, repo_spec = NULL) { details <- glue(" It is overkill for the vast majority of R packages. The \"check-full\" workflow is among those configured by \\ {ui_code('use_tidy_github_actions()')}. If you really want it, request it by name with \\ {ui_code('use_github_action()')}.") lifecycle::deprecate_stop( "2.1.0", "use_github_action_check_full()", details = details ) } # git2r ------------------------------------------------------------------------ git2r_explanation <- glue(" usethis now uses the gert package for Git operations, instead of git2r, and gert relies on the credentials package for auth. Therefore git2r credentials are no longer accepted.") deprecate_warn_credentials <- function(whos_asking, details = NULL) { whos_asking <- sub("[()]+$", "", whos_asking) what <- glue("{whos_asking}(credentials = )") lifecycle::deprecate_warn( "2.0.0", what, details = details %||% git2r_explanation ) } #' Produce or register credentials for git2r #' #' #' @description #' #' `r lifecycle::badge("deprecated")` #' #' In usethis v2.0.0, usethis switched from git2r to gert (+ credentials) for #' all Git operations. This pair of packages (gert + credentials) is designed to #' discover and use the same credentials as command line Git. As a result, a #' great deal of credential-handling assistance has been removed from usethis, #' primarily around SSH keys. #' #' If you have credential problems, focus your troubleshooting on getting the #' credentials package to find your credentials. The [introductory #' vignette](https://docs.ropensci.org/credentials/articles/intro.html) #' is a good place to start. #' #' If you use the HTTPS protocol (which we recommend), a GitHub personal access #' token will satisfy all auth needs, for both Git and the GitHub API, and is #' therefore the easiest approach to get working. See [gh_token_help()] for #' more. #' #' @param protocol Deprecated. #' @param auth_token Deprecated. #' @param credentials Deprecated. #' #' @return These functions raise a warning and return an invisible `NULL`. #' @export git_credentials <- function(protocol = deprecated(), auth_token = deprecated()) { lifecycle::deprecate_warn( "2.0.0", "git_credentials()", details = git2r_explanation ) invisible() } #' @rdname git_credentials #' @export use_git_credentials <- function(credentials = deprecated()) { lifecycle::deprecate_warn( "2.0.0", "use_git_credentials()", details = git2r_explanation ) invisible() } # repo_spec, host, auth_token -------------------------------------------------- deprecate_warn_host <- function(whos_asking, details = NULL) { whos_asking <- sub("[()]+$", "", whos_asking) what <- glue("{whos_asking}(host = )") host_explanation <- glue(" usethis now determines the {ui_code('host')} from the current project's \\ Git remotes. The {ui_code('host')} argument is ignored and will eventually be removed.") lifecycle::deprecate_warn( "2.0.0", what, details = details %||% host_explanation ) } deprecate_warn_auth_token <- function(whos_asking, details = NULL) { whos_asking <- sub("[()]+$", "", whos_asking) what <- glue("{whos_asking}(auth_token = )") auth_token_explanation <- glue(" usethis now delegates token lookup to the gh package, which retrieves \\ credentials based on the targeted host URL. This URL is determined by the current project's Git remotes. The {ui_code('auth_token')} argument is ignored and will eventually be \\ removed.") lifecycle::deprecate_warn( "2.0.0", what, details = details %||% auth_token_explanation ) } deprecate_warn_repo_spec <- function(whos_asking, details = NULL) { whos_asking <- sub("[()]+$", "", whos_asking) what <- glue("{whos_asking}(repo_spec = )") repo_spec_explanation <- glue(" usethis now consults the current project's Git remotes to determine the \\ target repo. The {ui_code('repo_spec')} argument is ignored and will eventually be \\ removed.") lifecycle::deprecate_warn( "2.0.0", what, details = details %||% repo_spec_explanation ) } usethis/R/utils-git.R0000644000175000017500000002546514153502006014345 0ustar nileshnilesh# gert ------------------------------------------------------------------------- gert_shush <- function(expr, regexp) { stopifnot(is.character(regexp)) withCallingHandlers( gertMessage = function(cnd) { m <- map_lgl(regexp, ~ grepl(.x, cnd_message(cnd), perl = TRUE)) if (any(m)) { cnd_muffle(cnd) } }, expr ) } # Repository ------------------------------------------------------------------- git_repo <- function() { check_uses_git() proj_get() } uses_git <- function() { repo <- tryCatch( gert::git_find(proj_get()), error = function(e) NULL ) !is.null(repo) } check_uses_git <- function() { if (uses_git()) { return(invisible()) } ui_stop(c( "Cannot detect that project is already a Git repository.", "Do you need to run {ui_code('use_git()')}?" )) } git_init <- function() { gert::git_init(proj_get()) } # Config ----------------------------------------------------------------------- # `where = "de_facto"` means look at the values that are "in force", i.e. where # local repo variables override global user-level variables, when both are # defined # # `where = "local"` is strict, i.e. it only returns a value that is in the local # config git_cfg_get <- function(name, where = c("de_facto", "local", "global")) { where <- match.arg(where) if (where == "global" || !uses_git()) { dat <- gert::git_config_global() } else { dat <- gert::git_config(repo = git_repo()) } if (where == "local") { dat <- dat[dat$level == "local", ] } out <- dat$value[tolower(dat$name) == tolower(name)] if (length(out) > 0) out else NULL } # ensures that core.excludesFile is configured # if configured, leave well enough alone # if not, check for existence of one of the Usual Suspects; if found, configure # otherwise, configure as path_home(".gitignore") ensure_core_excludesFile <- function() { path <- git_ignore_path(scope = "user") if (!is.null(path)) { return(invisible()) } # .gitignore is most common, but .gitignore_global appears in prominent # places --> so we allow the latter, but prefer the former path <- path_first_existing(path_home(c(".gitignore", ".gitignore_global"))) %||% path_home(".gitignore") if (!is_windows()) { # express path relative to user's home directory, except on Windows path <- path("~", path_rel(path, path_home())) } ui_done("Configuring {ui_field('core.excludesFile')}: {ui_path(path)}") gert::git_config_global_set("core.excludesFile", path) invisible() } # Status------------------------------------------------------------------------ git_status <- function(untracked) { stopifnot(is_true(untracked) || is_false(untracked)) st <- gert::git_status(repo = git_repo()) if (!untracked) { st <- st[st$status != "new", ] } st } # Commit ----------------------------------------------------------------------- git_ask_commit <- function(message, untracked, paths = NULL) { if (!is_interactive() || !uses_git()) { return(invisible()) } # this is defined here to encourage all commits to route through this function git_commit <- function(paths, message) { repo <- git_repo() ui_done("Adding files") gert::git_add(paths, repo = repo) ui_done("Making a commit with message {ui_value(message)}") gert::git_commit(message, repo = repo) } uncommitted <- git_status(untracked)$file if (is.null(paths)) { paths <- uncommitted } else { paths <- intersect(paths, uncommitted) } n <- length(paths) if (n == 0) { return(invisible()) } paths <- sort(paths) ui_paths <- map_chr(paths, ui_path) if (n > 10) { ui_paths <- c(ui_paths[1:10], "...") } if (n == 1) { file_hint <- "There is 1 uncommitted file:" } else { file_hint <- "There are {n} uncommitted files:" } ui_line(c( file_hint, paste0("* ", ui_paths) )) if (ui_yeah("Is it ok to commit {if (n == 1) 'it' else 'them'}?")) { git_commit(paths, message) } invisible() } git_uncommitted <- function(untracked = FALSE) { nrow(git_status(untracked)) > 0 } challenge_uncommitted_changes <- function(untracked = FALSE, msg = NULL) { if (!uses_git()) { return(invisible()) } if (rstudioapi::hasFun("documentSaveAll")) { rstudioapi::documentSaveAll() } default_msg <- " There are uncommitted changes, which may cause problems or be lost when \\ we push, pull, switch, or compare branches" msg <- glue(msg %||% default_msg) if (git_uncommitted(untracked = untracked)) { if (ui_yeah("{msg}\nDo you want to proceed anyway?")) { return(invisible()) } else { ui_stop("Uncommitted changes. Please commit before continuing.") } } } git_conflict_report <- function() { st <- git_status(untracked = FALSE) conflicted <- st$file[st$status == "conflicted"] n <- length(conflicted) if (n == 0) { return(invisible()) } conflicted_paths <- map_chr(conflicted, ui_path) ui_line(c( "There are {n} conflicted files:", paste0("* ", conflicted_paths) )) msg <- glue(" Are you ready to sort this out? If so, we will open the conflicted files for you to edit.") yes <- "Yes, I'm ready to resolve the merge conflicts." no <- "No, I want to abort this merge." if (ui_yeah(msg, yes = yes, no = no, shuffle = FALSE)) { ui_silence(purrr::walk(conflicted, edit_file)) ui_stop(" Please fix each conflict, save, stage, and commit. To back out of this merge, run {ui_code('gert::git_merge_abort()')} \\ (in R) or {ui_code('git merge --abort')} (in the shell).") } else { gert::git_merge_abort(repo = git_repo()) ui_stop("Abandoning the merge, since it will cause merge conflicts") } } # Remotes ---------------------------------------------------------------------- ## remref --> remote, branch git_parse_remref <- function(remref) { regex <- paste0("^", names(git_remotes()), collapse = "|") regex <- glue("({regex})/(.*)") list(remote = sub(regex, "\\1", remref), branch = sub(regex, "\\2", remref)) } remref_remote <- function(remref) git_parse_remref(remref)$remote remref_branch <- function(remref) git_parse_remref(remref)$branch # Pull ------------------------------------------------------------------------- # Pull from remref or upstream tracking. If neither given/exists, do nothing. # Therefore, this does less than `git pull`. git_pull <- function(remref = NULL, verbose = TRUE) { repo <- git_repo() branch <- git_branch() remref <- remref %||% git_branch_tracking(branch) if (is.na(remref)) { if (verbose) { ui_done("No remote branch to pull from for {ui_value(branch)}.") } return(invisible()) } stopifnot(is_string(remref)) if (verbose) { ui_done("Pulling from {ui_value(remref)}.") } gert::git_fetch( remote = remref_remote(remref), refspec = remref_branch(remref), repo = repo, verbose = FALSE ) # this is pretty brittle, because I've hard-wired these messages # https://github.com/r-lib/gert/blob/main/R/merge.R # but at time of writing, git_merge() offers no verbosity control gert_shush( regexp = c( "Already up to date, nothing to merge", "Performing fast-forward merge, no commit needed" ), gert::git_merge(remref, repo = repo) ) st <- git_status(untracked = TRUE) if (any(st$status == "conflicted")) { git_conflict_report() } invisible() } # Branch ------------------------------------------------------------------ git_branch <- function() { info <- gert::git_info(repo = git_repo()) branch <- info$shorthand if (identical(branch, "HEAD")) { ui_stop("Detached head; can't continue") } if (is.na(branch)) { ui_stop("On an unborn branch -- do you need to make an initial commit?") } branch } git_branch_tracking <- function(branch = git_branch()) { repo <- git_repo() if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) { ui_stop("There is no local branch named {ui_value(branch)}") } gbl <- gert::git_branch_list(local = TRUE, repo = repo) sub("^refs/remotes/", "", gbl$upstream[gbl$name == branch]) } git_branch_compare <- function(branch = git_branch(), remref = NULL) { remref <- remref %||% git_branch_tracking(branch) gert::git_fetch( remote = remref_remote(remref), refspec = remref_branch(remref), repo = git_repo(), verbose = FALSE ) out <- gert::git_ahead_behind(upstream = remref, ref = branch, repo = git_repo()) list(local_only = out$ahead, remote_only = out$behind) } # Checks ------------------------------------------------------------------ check_current_branch <- function(is = NULL, is_not = NULL, message = NULL) { gb <- git_branch() if (!is.null(is)) { check_string(is) if (gb == is) { return(invisible()) } else { msg <- message %||% "Must be on branch {ui_value(is)}, not {ui_value(gb)}." ui_stop(msg) } } if (!is.null(is_not)) { check_string(is_not) if (gb != is_not) { return(invisible()) } else { msg <- message %||% "Can't be on branch {ui_value(gb)}." ui_stop(msg) } } invisible() } # examples of remref: upstream/main, origin/foofy check_branch_up_to_date <- function(direction = c("pull", "push"), remref = NULL, use = NULL) { direction <- match.arg(direction) branch <- git_branch() remref <- remref %||% git_branch_tracking(branch) use <- use %||% switch(direction, pull = "git pull", push = "git push") if (is.na(remref)) { ui_done("Local branch {ui_value(branch)} is not tracking a remote branch.") return(invisible()) } if (direction == "pull") { ui_done(" Checking that local branch {ui_value(branch)} has the changes \\ in {ui_value(remref)}") } else { ui_done(" Checking that remote branch {ui_value(remref)} has the changes \\ in {ui_value(branch)}") } comparison <- git_branch_compare(branch, remref) # TODO: properly pluralize "commit(s)" when I switch to cli if (direction == "pull") { if (comparison$remote_only == 0) { return(invisible()) } else { ui_stop(" Local branch {ui_value(branch)} is behind {ui_value(remref)} by \\ {comparison$remote_only} commit(s). Please use {ui_code(use)} to update.") } } else { if (comparison$local_only == 0) { return(invisible()) } else { # TODO: consider offering to push for them? ui_stop(" Local branch {ui_value(branch)} is ahead of {ui_value(remref)} by \\ {comparison$local_only} commit(s). Please use {ui_code(use)} to update.") } } } check_branch_pulled <- function(remref = NULL, use = NULL) { check_branch_up_to_date(direction = "pull", remref = remref, use = use) } check_branch_pushed <- function(remref = NULL, use = NULL) { check_branch_up_to_date(direction = "push", remref = remref, use = use) } usethis/R/edit.R0000644000175000017500000001634314153502006013344 0ustar nileshnilesh#' Open file for editing #' #' Opens a file for editing in RStudio, if that is the active environment, or #' via [utils::file.edit()] otherwise. If the file does not exist, it is #' created. If the parent directory does not exist, it is also created. #' `edit_template()` specifically opens templates in `inst/templates` for use #' with [use_template()]. #' #' @param path Path to target file. #' @param open Whether to open the file for interactive editing. #' @return Target path, invisibly. #' @export #' @keywords internal #' #' @examples #' \dontrun{ #' edit_file("DESCRIPTION") #' edit_file("~/.gitconfig") #' } edit_file <- function(path, open = rlang::is_interactive()) { open <- open && is_interactive() path <- user_path_prep(path) create_directory(path_dir(path)) file_create(path) if (!open) { ui_todo("Edit {ui_path(path)}") return(invisible(path)) } ui_todo("Modify {ui_path(path)}") if (rstudio_available() && rstudioapi::hasFun("navigateToFile")) { rstudioapi::navigateToFile(path) } else { utils::file.edit(path) } invisible(path) } #' @param template The target template file. If not specified, existing template #' files are offered for interactive selection. #' @export #' @rdname edit_file edit_template <- function(template = NULL, open = rlang::is_interactive()) { check_is_package("edit_template()") if (is.null(template)) { ui_info("No template specified... checking {ui_path('inst/templates')}") template <- choose_template() } if (is_empty(template)) { return(invisible()) } path <- proj_path("inst", "templates", template) edit_file(path, open) } choose_template <- function() { if (!is_interactive()) { return(character()) } templates <- path_file(dir_ls(proj_path("inst", "templates"), type = "file")) if (is_empty(templates)) { return(character()) } choice <- utils::menu( choices = templates, title = "Which template do you want to edit? (0 to exit)" ) templates[choice] } #' Open configuration files #' #' * `edit_r_profile()` opens `.Rprofile` #' * `edit_r_environ()` opens `.Renviron` #' * `edit_r_makevars()` opens `.R/Makevars` #' * `edit_git_config()` opens `.gitconfig` or `.git/config` #' * `edit_git_ignore()` opens global (user-level) gitignore file and ensures #' its path is declared in your global Git config. #' * `edit_pkgdown_config` opens the pkgdown YAML configuration file for the #' current Project. #' * `edit_rstudio_snippets()` opens RStudio's snippet config for the given type. #' * `edit_rstudio_prefs()` opens RStudio's preference file. #' #' The `edit_r_*()` functions consult R's notion of user's home directory. #' The `edit_git_*()` functions (and \pkg{usethis} in general) inherit home #' directory behaviour from the \pkg{fs} package, which differs from R itself #' on Windows. The \pkg{fs} default is more conventional in terms of the #' location of user-level Git config files. See [fs::path_home()] for more #' details. #' #' Files created by `edit_rstudio_snippets()` will *mask*, not supplement, #' the built-in default snippets. If you like the built-in snippets, copy them #' and include with your custom snippets. #' #' @return Path to the file, invisibly. #' #' @param scope Edit globally for the current __user__, or locally for the #' current __project__ #' @name edit NULL #' @export #' @rdname edit edit_r_profile <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".Rprofile", envvar = "R_PROFILE_USER") edit_file(path) ui_todo("Restart R for changes to take effect") invisible(path) } #' @export #' @rdname edit edit_r_environ <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".Renviron", envvar = "R_ENVIRON_USER") edit_file(path) ui_todo("Restart R for changes to take effect") invisible(path) } #' @export #' @rdname edit edit_r_buildignore <- function() { check_is_package("edit_r_buildignore()") edit_file(proj_path(".Rbuildignore")) } #' @export #' @rdname edit edit_r_makevars <- function(scope = c("user", "project")) { path <- scoped_path_r(scope, ".R", "Makevars") edit_file(path) } #' @export #' @rdname edit #' @param type Snippet type (case insensitive text). edit_rstudio_snippets <- function(type = c( "r", "markdown", "c_cpp", "css", "html", "java", "javascript", "python", "sql", "stan", "tex" )) { type <- tolower(type) type <- match.arg(type) file <- path_ext_set(type, "snippets") # Snippet location changed in 1.3: # https://blog.rstudio.com/2020/02/18/rstudio-1-3-preview-configuration/ new_rstudio <- !rstudioapi::isAvailable() || rstudioapi::getVersion() >= "1.3.0" old_path <- path_home_r(".R", "snippets", file) new_path <- rstudio_config_path("snippets", file) # Mimic RStudio behaviour: copy to new location if you edit if (new_rstudio && file_exists(old_path) && !file_exists(new_path)) { create_directory(path_dir(new_path)) file_copy(old_path, new_path) ui_done("Copying snippets file to {ui_path(new_path)}") } path <- if (new_rstudio) new_path else old_path if (!file_exists(path)) { ui_done("New snippet file at {ui_path(path)}") ui_info(c( "This masks the default snippets for {ui_field(type)}.", "Delete this file and restart RStudio to restore the default snippets." )) } edit_file(path) } #' @export #' @rdname edit edit_rstudio_prefs <- function() { path <- rstudio_config_path("rstudio-prefs.json") edit_file(path) ui_todo("Restart RStudio for changes to take effect") invisible(path) } scoped_path_r <- function(scope = c("user", "project"), ..., envvar = NULL) { scope <- match.arg(scope) # Try environment variable in user scopes if (scope == "user" && !is.null(envvar)) { env <- Sys.getenv(envvar, unset = "") if (!identical(env, "")) { return(user_path_prep(env)) } } root <- switch(scope, user = path_home_r(), project = proj_get() ) path(root, ...) } # git paths --------------------------------------------------------------- # Note that on windows R's definition of ~ is in a nonstandard place, # so it is important to use path_home(), not path_home_r() #' @export #' @rdname edit edit_git_config <- function(scope = c("user", "project")) { scope <- match.arg(scope) path <- switch( scope, user = path_home(".gitconfig"), project = proj_path(".git", "config") ) invisible(edit_file(path)) } #' @export #' @rdname edit edit_git_ignore <- function(scope = c("user", "project")) { scope <- match.arg(scope) if (scope == "user") { ensure_core_excludesFile() } file <- git_ignore_path(scope) if (scope == "user" && !file_exists(file)) { git_vaccinate() } invisible(edit_file(file)) } git_ignore_path <- function(scope = c("user", "project")) { scope <- match.arg(scope) switch( scope, user = git_cfg_get("core.excludesFile", where = "global"), project = proj_path(".gitignore") ) } # pkgdown --------------------------------------------------------------- #' @export #' @rdname edit edit_pkgdown_config <- function() { path <- pkgdown_config_path() if (is.null(path)) { ui_oops("No pkgdown config file found in current Project.") } else { invisible(edit_file(path)) } } usethis/R/roxygen.R0000644000175000017500000000537414153502006014114 0ustar nileshnilesh#' Use roxygen2 with markdown #' #' If you are already using roxygen2, but not with markdown, you'll need to use #' [roxygen2md](https://roxygen2md.r-lib.org) to convert existing Rd #' expressions to markdown. The conversion is not perfect, so make sure #' to check the results. #' #' @export use_roxygen_md <- function() { check_installed("roxygen2") if (!uses_roxygen()) { roxy_ver <- as.character(utils::packageVersion("roxygen2")) use_description_field("Roxygen", "list(markdown = TRUE)") use_description_field("RoxygenNote", roxy_ver) ui_todo("Run {ui_code('devtools::document()')}") } else if (!uses_roxygen_md()) { use_description_field("Roxygen", "list(markdown = TRUE)") if (!uses_git()) { ui_todo("Use git to ensure that you don't lose any data") } check_installed("roxygen2md") ui_todo( "Run {ui_code('roxygen2md::roxygen2md()')} to convert existing Rd commands to RMarkdown" ) ui_todo("Run {ui_code('devtools::document()')} when you're done.") } invisible() } uses_roxygen_md <- function() { if (!desc::desc_has_fields("Roxygen", file = proj_get())) { return(FALSE) } roxygen <- desc::desc_get("Roxygen", file = proj_get())[[1]] value <- tryCatch( { eval(parse(text = roxygen)) }, error = function(e) { NULL } ) isTRUE(value$markdown) } uses_roxygen <- function() { desc::desc_has_fields("RoxygenNote", file = proj_get()) } roxygen_ns_append <- function(tag) { block_append( glue("{ui_value(tag)}"), glue("#' {tag}"), path = proj_path(package_doc_path()), block_start = "## usethis namespace: start", block_end = "## usethis namespace: end", block_suffix = "NULL", sort = TRUE ) } roxygen_ns_show <- function() { block_show( path = proj_path(package_doc_path()), block_start = "## usethis namespace: start", block_end = "## usethis namespace: end" ) } roxygen_remind <- function() { ui_todo("Run {ui_code('devtools::document()')} to update {ui_path('NAMESPACE')}") TRUE } roxygen_update_ns <- function(load = is_interactive()) { ui_done("Writing {ui_path('NAMESPACE')}") utils::capture.output( suppressMessages(roxygen2::roxygenise(proj_get(), "namespace")) ) if (load) { ui_done("Loading {project_name()}") pkgload::load_all(path = proj_get(), quiet = TRUE) } TRUE } # Checkers ---------------------------------------------------------------- check_uses_roxygen <- function(whos_asking) { force(whos_asking) if (uses_roxygen()) { return(invisible()) } ui_stop( " Project {ui_value(project_name())} does not use roxygen2. {ui_code(whos_asking)} can not work without it. You might just need to run {ui_code('devtools::document()')} once, then try again. " ) } usethis/R/jenkins.R0000644000175000017500000000116513764577255014105 0ustar nileshnilesh#' Create Jenkinsfile for Jenkins CI Pipelines #' #' `use_jenkins()` adds a basic Jenkinsfile for R packages to the project root #' directory. The Jenkinsfile stages take advantage of calls to `make`, and so #' calling this function will also run `use_make()` if a Makefile does not #' already exist at the project root. #' #' @seealso The [documentation on Jenkins #' Pipelines](https://www.jenkins.io/doc/book/pipeline/jenkinsfile/). #' @seealso [use_make()] #' @export use_jenkins <- function() { use_make() use_template( "Jenkinsfile", data = list(name = project_name()) ) use_build_ignore("Jenkinsfile") } usethis/R/browse.R0000644000175000017500000002025714132400710013713 0ustar nileshnilesh#' Visit important project-related web pages #' #' These functions take you to various web pages associated with a project #' (often, an R package) and return the target URL(s) invisibly. To form #' these URLs we consult: #' * Git remotes configured for the active project that appear to be hosted on #' a GitHub deployment #' * DESCRIPTION file for the active project or the specified `package`. The #' DESCRIPTION file is sought first in the local package library and then #' on CRAN. #' * Fixed templates: #' - Travis CI: `https://travis-ci.{EXT}/{OWNER}/{PACKAGE}` #' - Circle CI: `https://circleci.com/gh/{OWNER}/{PACKAGE}` #' - CRAN landing page: `https://cran.r-project.org/package={PACKAGE}` #' - GitHub mirror of a CRAN package: `https://github.com/cran/{PACKAGE}` #' Templated URLs aren't checked for existence, so there is no guarantee #' there will be content at the destination. #' #' @details #' * `browse_package()`: Assembles a list of URLs and lets user choose one to #' visit in a web browser. In a non-interactive session, returns all #' discovered URLs. #' * `browse_project()`: Thin wrapper around `browse_package()` that always #' targets the active usethis project. #' * `browse_github()`: Visits a GitHub repository associated with the project. #' In the case of a fork, you might be asked to specify if you're interested #' in the source repo or your fork. #' * `browse_github_issues()`: Visits the GitHub Issues index or one specific #' issue. #' * `browse_github_pulls()`: Visits the GitHub Pull Request index or one #' specific pull request. #' * `browse_travis()`: Visits the project's page on #' [Travis CI](https://www.travis-ci.com/). #' * `browse_circleci()`: Visits the project's page on #' [Circle CI](https://circleci.com). #' * `browse_cran()`: Visits the package on CRAN, via the canonical URL. #' #' @param package Name of package. If `NULL`, the active project is targeted, #' regardless of whether it's an R package or not. #' @param number Optional, to specify an individual GitHub issue or pull #' request. Can be a number or `"new"`. #' #' @examples #' # works on the active project #' # browse_project() #' #' browse_package("httr") #' browse_github("gh") #' browse_github_issues("fs") #' browse_github_issues("fs", 1) #' browse_github_pulls("curl") #' browse_github_pulls("curl", 183) #' browse_travis("gert", ext = "org") #' browse_cran("MASS") #' @name browse-this NULL #' @export #' @rdname browse-this browse_package <- function(package = NULL) { stopifnot(is.null(package) || is_string(package)) if (is.null(package)) { check_is_project() } urls <- character() details <- list() if (is.null(package) && uses_git()) { grl <- github_remote_list(these = NULL) ord <- c( which(grl$remote == "origin"), which(grl$remote == "upstream"), which(!grl$remote %in% c("origin", "upstream")) ) grl <- grl[ord, ] grl <- set_names(grl$url, nm = grl$remote) parsed <- parse_github_remotes(grl) urls <- c(urls, glue_data(parsed, "https://{host}/{repo_owner}/{repo_name}")) details <- c(details, map(parsed$name, ~ glue("{ui_value(.x)} remote"))) } desc_urls_dat <- desc_urls(package, include_cran = TRUE) urls <- c(urls, desc_urls_dat$url) details <- c( details, map( desc_urls_dat$desc_field, ~ if (is.na(.x)) "CRAN" else glue("{ui_field(.x)} field in DESCRIPTION") ) ) if (length(urls) == 0) { ui_oops("Can't find any URLs") return(invisible(character())) } if (!is_interactive()) { return(invisible(urls)) } prompt <- "Which URL do you want to visit? (0 to exit)" pretty <- purrr::map2( format(urls, justify = "left"), details, ~ glue("{.x} ({.y})") ) choice <- utils::menu(title = prompt, choices = pretty) if (choice == 0) { return(invisible(character())) } view_url(urls[choice]) } #' @export #' @rdname browse-this browse_project <- function() browse_package(NULL) #' @export #' @rdname browse-this browse_github <- function(package = NULL) { view_url(github_url(package)) } #' @export #' @rdname browse-this browse_github_issues <- function(package = NULL, number = NULL) { view_url(github_url(package), "issues", number) } #' @export #' @rdname browse-this browse_github_pulls <- function(package = NULL, number = NULL) { pull <- if (is.null(number)) "pulls" else "pull" view_url(github_url(package), pull, number) } #' @export #' @rdname browse-this browse_github_actions <- function(package = NULL) { view_url(github_url(package), "actions") } #' @export #' @rdname browse-this #' @param ext Version of travis to use. browse_travis <- function(package = NULL, ext = c("com", "org")) { gh <- github_url(package) ext <- arg_match(ext) travis_url <- glue("travis-ci.{ext}") view_url(sub("github.com", travis_url, gh)) } #' @export #' @rdname browse-this browse_circleci <- function(package = NULL) { gh <- github_url(package) circle_url <- "circleci.com/gh" view_url(sub("github.com", circle_url, gh)) } #' @export #' @rdname browse-this browse_cran <- function(package = NULL) { view_url(cran_home(package)) } # Try to get a GitHub repo spec from these places: # 1. Remotes associated with GitHub (active project) # 2. BugReports/URL fields of DESCRIPTION (active project or arbitrary # installed package) github_url <- function(package = NULL) { stopifnot(is.null(package) || is_string(package)) if (is.null(package)) { check_is_project() url <- github_url_from_git_remotes() if (!is.null(url)) { return(url) } } desc_urls_dat <- desc_urls(package) if (is.null(desc_urls_dat)) { if (is.null(package)) { ui_stop(" Project {ui_value(project_name())} has no DESCRIPTION file and \\ has no GitHub remotes configured No way to discover URLs") } else { ui_stop(" Can't find DESCRIPTION for package {ui_value(package)} locally \\ or on CRAN No way to discover URLs") } } desc_urls_dat <- desc_urls_dat[desc_urls_dat$is_github, ] if (nrow(desc_urls_dat) > 0) { parsed <- parse_github_remotes(desc_urls_dat$url[[1]]) return(glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}")) } if (is.null(package)) { ui_stop(" Project {ui_value(project_name())} has no GitHub remotes configured \\ and has no GitHub URLs in DESCRIPTION") } ui_warn(" Package {ui_value(package)} has no GitHub URLs in DESCRIPTION Trying the GitHub CRAN mirror") glue_chr("https://github.com/cran/{package}") } cran_home <- function(package = NULL) { package <- package %||% project_name() glue_chr("https://cran.r-project.org/package={package}") } # returns NULL, if no DESCRIPTION found # returns 0-row data frame, if DESCRIPTION holds no URLs # returns data frame, if successful # include_cran whether to include CRAN landing page, if we consult it desc_urls <- function(package = NULL, include_cran = FALSE, desc = NULL) { maybe_desc <- purrr::possibly(desc::desc, otherwise = NULL) desc_from_cran <- FALSE if (is.null(desc)) { if (is.null(package)) { desc <- maybe_desc(file = proj_get()) if (is.null(desc)) { return() } } else { desc <- maybe_desc(package = package) if (is.null(desc)) { cran_desc_url <- glue("https://cran.rstudio.com/web/packages/{package}/DESCRIPTION") suppressWarnings( desc <- maybe_desc(text = readLines(cran_desc_url)) ) if (is.null(desc)) { return() } desc_from_cran <- TRUE } } } url <- desc$get_urls() bug_reports <- desc$get_field("BugReports", default = character()) cran <- if (include_cran && desc_from_cran) cran_home(package) else character() dat <- data.frame( desc_field = c( rep_len("URL", length.out = length(url)), rep_len("BugReports", length.out = length(bug_reports)), rep_len(NA, length.out = length(cran)) ), url = c(url, bug_reports, cran), stringsAsFactors = FALSE ) dat <- cbind(dat, re_match(dat$url, github_remote_regex)) # TODO: could have a more sophisticated understanding of GitHub deployments dat$is_github <- !is.na(dat$.match) & grepl("github", dat$host) dat[c("url", "desc_field", "is_github")] } usethis/R/pipe.R0000644000175000017500000000202014131622147013344 0ustar nileshnilesh#' Use magrittr's pipe in your package #' #' Does setup necessary to use magrittr's pipe operator, `%>%` in your package. #' This function requires the use roxygen. #' * Adds magrittr to "Imports" in `DESCRIPTION`. #' * Imports the pipe operator specifically, which is necessary for internal #' use. #' * Exports the pipe operator, if `export = TRUE`, which is necessary to make #' `%>%` available to the users of your package. #' #' @param export If `TRUE`, the file `R/utils-pipe.R` is added, which provides #' the roxygen template to import and re-export `%>%`. If `FALSE`, the necessary #' roxygen directive is added, if possible, or otherwise instructions are given. #' #' @export #' #' @examples #' \dontrun{ #' use_pipe() #' } use_pipe <- function(export = TRUE) { check_is_package("use_pipe()") check_uses_roxygen("use_pipe()") if (export) { use_dependency("magrittr", "Imports") use_template("pipe.R", "R/utils-pipe.R") && roxygen_remind() return(invisible(TRUE)) } use_import_from("magrittr", "%>%") } usethis/R/course.R0000644000175000017500000005165314153502006013722 0ustar nileshnilesh## see end of file for some cURL notes #' Download and unpack a ZIP file #' #' Functions to download and unpack a ZIP file into a local folder of files, #' with very intentional default behaviour. Useful in pedagogical settings or #' anytime you need a large audience to download a set of files quickly and #' actually be able to find them. The underlying helpers are documented in #' [use_course_details]. #' #' @param url Link to a ZIP file containing the materials. To reduce the chance #' of typos in live settings, these shorter forms are accepted: #' #' * GitHub repo spec: "OWNER/REPO". Equivalent to #' `https://github.com/OWNER/REPO/DEFAULT_BRANCH.zip`. #' * bit.ly or rstd.io shortlinks: "bit.ly/xxx-yyy-zzz" or "rstd.io/foofy". #' The instructor must then arrange for the shortlink to point to a valid #' download URL for the target ZIP file. The helper #' [create_download_url()] helps to create such URLs for GitHub, DropBox, #' and Google Drive. #' @param destdir The new folder is stored here. If `NULL`, defaults to user's #' Desktop or some other conspicuous place. You can also set a default #' location using the option `usethis.destdir`, e.g. #' `options(usethis.destdir = "a/good/dir")`, perhaps saved to your #' `.Rprofile` with [`edit_r_profile()`] #' @param cleanup Whether to delete the original ZIP file after unpacking its #' contents. In an interactive setting, `NA` leads to a menu where user can #' approve the deletion (or decline). #' #' @return Path to the new directory holding the unpacked ZIP file, invisibly. #' @name zip-utils #' @examples #' \dontrun{ #' # download the source of usethis from GitHub, behind a bit.ly shortlink #' use_course("bit.ly/usethis-shortlink-example") #' use_course("http://bit.ly/usethis-shortlink-example") #' #' # download the source of rematch2 package from CRAN #' use_course("https://cran.r-project.org/bin/windows/contrib/3.4/rematch2_2.0.1.zip") #' #' # download the source of rematch2 package from GitHub, 4 ways #' use_course("r-lib/rematch2") #' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/HEAD") #' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/main") #' use_course("https://github.com/r-lib/rematch2/archive/main.zip") #' } NULL #' @describeIn zip-utils #' #' Designed with live workshops in mind. Includes intentional friction to #' highlight the download destination. Workflow: #' * User executes, e.g., `use_course("bit.ly/xxx-yyy-zzz")`. #' * User is asked to notice and confirm the location of the new folder. Specify #' `destdir` or configure the `"usethis.destdir"` option to prevent this. #' * User is asked if they'd like to delete the ZIP file. #' * If new folder contains an `.Rproj` file, a new instance of RStudio is #' launched. Otherwise, the folder is opened in the file manager, e.g. Finder #' or File Explorer. #' @export use_course <- function(url, destdir = getOption("usethis.destdir")) { url <- normalize_url(url) destdir_not_specified <- is.null(destdir) destdir <- user_path_prep(destdir %||% conspicuous_place()) check_path_is_directory(destdir) if (destdir_not_specified && is_interactive()) { ui_line(c( "Downloading into {ui_path(destdir)}.", "Prefer a different location? Cancel, try again, and specify {ui_code('destdir')}" )) if (ui_nope("OK to proceed?")) { ui_stop("Aborting.") } } ui_done("Downloading from {ui_value(url)}") zipfile <- tidy_download(url, destdir) ui_done("Download stored in {ui_path(zipfile)}") check_is_zip(attr(zipfile, "content-type")) tidy_unzip(zipfile, cleanup = NA) } #' @describeIn zip-utils #' #' More useful in day-to-day work. Downloads in current working directory, by #' default, and allows `cleanup` behaviour to be specified. #' @export use_zip <- function(url, destdir = getwd(), cleanup = if (rlang::is_interactive()) NA else FALSE) { url <- normalize_url(url) check_path_is_directory(destdir) ui_done("Downloading from {ui_value(url)}") zipfile <- tidy_download(url, destdir) ui_done("Download stored in {ui_path(zipfile)}") check_is_zip(attr(zipfile, "content-type")) tidy_unzip(zipfile, cleanup) } #' Helpers to download and unpack a ZIP file #' #' @description #' Details on the internal and helper functions that power [use_course()] and #' [use_zip()]. Only `create_download_url()` is exported. #' #' @name use_course_details #' @keywords internal #' #' @section tidy_download(): #' #' ``` #' ## function signature #' tidy_download(url, destdir = getwd()) #' #' # as called inside use_course() #' tidy_download( #' url, ## after post-processing with normalize_url() #' # conspicuous_place() = `getOption('usethis.destdir')` or desktop or home #' # directory or working directory #' destdir = destdir %||% conspicuous_place() #' ) #' ``` #' #' Special-purpose function to download a ZIP file and automatically determine #' the file name, which often determines the folder name after unpacking. #' Developed with DropBox and GitHub as primary targets, possibly via #' shortlinks. Both platforms offer a way to download an entire folder or repo #' as a ZIP file, with information about the original folder or repo transmitted #' in the `Content-Disposition` header. In the absence of this header, a #' filename is generated from the input URL. In either case, the filename is #' sanitized. Returns the path to downloaded ZIP file, invisibly. #' #' `tidy_download()` is setup to retry after a download failure. In an #' interactive session, it asks for user's consent. All retries use a longer #' connect timeout. #' #' ## DropBox #' #' To make a folder available for ZIP download, create a shared link for it: #' * #' #' A shared link will have this form: #' ``` #' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0 #' ``` #' Replace the `dl=0` at the end with `dl=1` to create a download link: #' ``` #' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1 #' ``` #' You can use `create_download_url()` to do this conversion. #' #' This download link (or a shortlink that points to it) is suitable as input #' for `tidy_download()`. After one or more redirections, this link will #' eventually lead to a download URL. For more details, see #' and #' . #' #' ## GitHub #' #' Click on the repo's "Clone or download" button, to reveal a "Download ZIP" #' button. Capture this URL, which will have this form: #' ``` #' https://github.com/r-lib/usethis/archive/main.zip #' ``` #' This download link (or a shortlink that points to it) is suitable as input #' for `tidy_download()`. After one or more redirections, this link will #' eventually lead to a download URL. Here are other links that also lead to #' ZIP download, albeit with a different filenaming scheme (REF could be a #' branch name, a tag, or a SHA): #' ``` #' https://github.com/github.com/r-lib/usethis/zipball/HEAD #' https://api.github.com/repos/r-lib/rematch2/zipball/REF #' https://api.github.com/repos/r-lib/rematch2/zipball/HEAD #' https://api.github.com/repos/r-lib/usethis/zipball/REF #' ``` #' #' You can use `create_download_url()` to create the "Download ZIP" URL from #' a typical GitHub browser URL. #' #' ## Google Drive #' #' To our knowledge, it is not possible to download a Google Drive folder as a #' ZIP archive. It is however possible to share a ZIP file stored on Google #' Drive. To get its URL, click on "Get the shareable link" (within the "Share" #' menu). This URL doesn't allow for direct download, as it's designed to be #' processed in a web browser first. Such a sharing link looks like: #' #' ``` #' https://drive.google.com/open?id=123456789xxyyyzzz #' ``` #' #' To be able to get the URL suitable for direct download, you need to extract #' the "id" element from the URL and include it in this URL format: #' #' ``` #' https://drive.google.com/uc?export=download&id=123456789xxyyyzzz #' ``` #' #' Use `create_download_url()` to perform this transformation automatically. #' #' @param url Download link for the ZIP file, possibly behind a shortlink or #' other redirect. See Details. #' @param destdir Path to existing local directory where the ZIP file will be #' stored. Defaults to current working directory, but note that [use_course()] #' has different default behavior. #' #' @examples #' \dontrun{ #' tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") #' } #' #' @section tidy_unzip(): #' #' Special-purpose function to unpack a ZIP file and (attempt to) create the #' directory structure most people want. When unpacking an archive, it is easy #' to get one more or one less level of nesting than you expected. #' #' It's especially important to finesse the directory structure here: we want #' the same local result when unzipping the same content from either GitHub or #' DropBox ZIP files, which pack things differently. Here is the intent: #' * If the ZIP archive `foo.zip` does not contain a single top-level directory, #' i.e. it is packed as "loose parts", unzip into a directory named `foo`. #' Typical of DropBox ZIP files. #' * If the ZIP archive `foo.zip` has a single top-level directory (which, by #' the way, is not necessarily called "foo"), unpack into said directory. #' Typical of GitHub ZIP files. #' #' Returns path to the directory holding the unpacked files, invisibly. #' #' **DropBox:** #' The ZIP files produced by DropBox are special. The file list tends to contain #' a spurious directory `"/"`, which we ignore during unzip. Also, if the #' directory is a Git repo and/or RStudio Project, we unzip-ignore various #' hidden files, such as `.RData`, `.Rhistory`, and those below `.git/` and #' `.Rproj.user`. #' #' @param zipfile Path to local ZIP file. #' #' @examples #' \dontrun{ #' tidy_download("https://github.com/r-lib/rematch2/archive/main.zip") #' tidy_unzip("rematch2-main.zip") #' } NULL # 1. downloads from `url` # 2. calls a retry-capable helper to download the ZIP file # 3. determines filename from content-description header (with fallbacks) # 4. returned path has content-type and content-description as attributes tidy_download <- function(url, destdir = getwd()) { check_path_is_directory(destdir) tmp <- file_temp("tidy-download-") h <- download_url(url, destfile = tmp) ui_line() cd <- content_disposition(h) base_name <- make_filename(cd, fallback = path_file(url)) full_path <- path(destdir, base_name) if (!can_overwrite(full_path)) { ui_stop("Aborting to avoid overwriting {ui_path(full_path)}") } attr(full_path, "content-type") <- content_type(h) attr(full_path, "content-disposition") <- cd file_move(tmp, full_path) invisible(full_path) } download_url <- function(url, destfile, handle = curl::new_handle(), n_tries = 3, retry_connecttimeout = 40L) { handle_options <- list(noprogress = FALSE, progressfunction = progress_fun) curl::handle_setopt(handle, .list = handle_options) we_should_retry <- function(i, n_tries, status) { if (i >= n_tries) { FALSE } else if (inherits(status, "error")) { # TODO: find a way to detect a (connect) timeout more specifically? # https://github.com/jeroen/curl/issues/154 # https://ec.haxx.se/usingcurl/usingcurl-timeouts # "Failing to connect within the given time will cause curl to exit with a # timeout exit code (28)." # (however, note that all timeouts lead to this same exit code) # https://ec.haxx.se/usingcurl/usingcurl-returns # "28. Operation timeout. The specified time-out period was reached # according to the conditions. curl offers several timeouts, and this exit # code tells one of those timeout limits were reached." # https://github.com/curl/curl/blob/272282a05416e42d2cc4a847a31fd457bc6cc827/lib/strerror.c#L143-L144 # "Timeout was reached" <-- actual message we could potentially match TRUE } else { FALSE } } status <- try_download(url, destfile, handle = handle) if (inherits(status, "error") && is_interactive()) { ui_oops(status$message) if (ui_nope(" Download failed :( See above for everything we know about why it failed. Shall we try a couple more times, with a longer timeout? ")) { n_tries <- 1 } } i <- 1 # invariant: we have made i download attempts while (we_should_retry(i, n_tries, status)) { if (i == 1) { curl::handle_setopt( handle, .list = c(connecttimeout = retry_connecttimeout) ) } i <- i + 1 ui_info("Retrying download ... attempt {i}") status <- try_download(url, destfile, handle = handle) } if (inherits(status, "error")) { stop(status) } invisible(handle) } try_download <- function(url, destfile, quiet = FALSE, mode = "wb", handle) { tryCatch( curl::curl_download( url = url, destfile = destfile, quiet = quiet, mode = mode, handle = handle ), error = function(e) e ) } tidy_unzip <- function(zipfile, cleanup = FALSE) { base_path <- path_dir(zipfile) filenames <- utils::unzip(zipfile, list = TRUE)[["Name"]] ## deal with DropBox's peculiar habit of including "/" as a file --> drop it filenames <- filenames[filenames != "/"] ## DropBox ZIP files often include lots of hidden R, RStudio, and Git files filenames <- filenames[keep_lgl(filenames)] td <- top_directory(filenames) loose_parts <- is.na(td) if (loose_parts) { target <- path_ext_remove(zipfile) utils::unzip(zipfile, files = filenames, exdir = target) } else { target <- path(base_path, td) utils::unzip(zipfile, files = filenames, exdir = base_path) } ui_done( "Unpacking ZIP file into {ui_path(target, base_path)} \\ ({length(filenames)} files extracted)" ) if (isNA(cleanup)) { cleanup <- is_interactive() && ui_yeah("Shall we delete the ZIP file ({ui_path(zipfile, base_path)})?") } if (isTRUE(cleanup)) { ui_done("Deleting {ui_path(zipfile, base_path)}") file_delete(zipfile) } if (is_interactive()) { rproj_path <- dir_ls(target, regexp = "[.]Rproj$") if (length(rproj_path) == 1 && rstudioapi::hasFun("openProject")) { ui_done("Opening project in RStudio") rstudioapi::openProject(target, newSession = TRUE) } else if (!in_rstudio_server()) { ui_done("Opening {ui_path(target, base_path)} in the file manager") utils::browseURL(path_real(target)) } } invisible(target) } #' @rdname use_course_details #' @param url a GitHub, DropBox, or Google Drive URL, as copied from a web #' browser. #' @examples #' # GitHub #' create_download_url("https://github.com/r-lib/usethis") #' create_download_url("https://github.com/r-lib/usethis/issues") #' #' # DropBox #' create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0") #' #' # Google Drive #' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz") #' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz/view") #' @export create_download_url <- function(url) { stopifnot(is_string(url)) stopifnot(grepl("^http[s]?://", url)) switch( classify_url(url), drive = modify_drive_url(url), dropbox = modify_dropbox_url(url), github = modify_github_url(url), hopeless_url(url) ) } classify_url <- function(url) { if (grepl("drive.google.com", url)) { return("drive") } if (grepl("dropbox.com/sh", url)) { return("dropbox") } if (grepl("github.com", url)) { return("github") } "unknown" } modify_drive_url <- function(url) { # id-isolating approach taken from the gargle / googleverse id_loc <- regexpr("/d/([^/])+|/folders/([^/])+|id=([^/])+", url) if (id_loc == -1) { return(hopeless_url(url)) } id <- gsub("/d/|/folders/|id=", "", regmatches(url, id_loc)) glue_chr("https://drive.google.com/uc?export=download&id={id}") } modify_dropbox_url <- function(url) { gsub("dl=0", "dl=1", url) } modify_github_url <- function(url) { # TO CONSIDER: one could use the API for this, which might be more proper and # would work if auth is needed # https://docs.github.com/en/free-pro-team@latest/rest/reference/repos#download-a-repository-archive-zip # https://api.github.com/repos/OWNER/REPO/zipball/ # but then, in big workshop settings, we might see rate limit problems or # get blocked because of too many token-free requests from same IP parsed <- parse_github_remotes(url) glue_data_chr(parsed, "{protocol}://{host}/{repo_owner}/{repo_name}/zipball/HEAD") } hopeless_url <- function(url) { ui_info( "URL does not match a recognized form for Google Drive or DropBox. \\ No change made." ) url } normalize_url <- function(url) { stopifnot(is.character(url)) has_scheme <- grepl("^http[s]?://", url) if (has_scheme) { return(url) } if (!is_shortlink(url)) { url <- tryCatch( expand_github(url), error = function(e) url ) } paste0("https://", url) } is_shortlink <- function(url) { shortlink_hosts <- c("rstd\\.io", "bit\\.ly") any(map_lgl(shortlink_hosts, grepl, x = url)) } expand_github <- function(url) { # mostly to handle errors in the spec repo_spec <- parse_repo_spec(url) glue_data_chr(repo_spec, "github.com/{owner}/{repo}/zipball/HEAD") } conspicuous_place <- function() { destdir_opt <- getOption("usethis.destdir") if (!is.null(destdir_opt)) { return(path_tidy(destdir_opt)) } Filter(dir_exists, c( path_home("Desktop"), path_home(), path_home_r(), path_tidy(getwd()) ))[[1]] } keep_lgl <- function(file, ignores = c(".Rproj.user", ".rproj.user", ".Rhistory", ".RData", ".git", "__MACOSX", ".DS_Store")) { ignores <- paste0( "((\\/|\\A)", gsub("\\.", "[.]", ignores), "(\\/|\\Z))", collapse = "|" ) !grepl(ignores, file, perl = TRUE) } top_directory <- function(filenames) { in_top <- path_dir(filenames) == "." unique_top <- unique(filenames[in_top]) is_directory <- grepl("/$", unique_top) if (length(unique_top) > 1 || !is_directory) { NA_character_ } else { unique_top } } content_type <- function(h) { headers <- curl::parse_headers_list(curl::handle_data(h)$headers) headers[["content-type"]] } content_disposition <- function(h) { headers <- curl::parse_headers_list(curl::handle_data(h)$headers) cd <- headers[["content-disposition"]] if (is.null(cd)) { return() } parse_content_disposition(cd) } check_is_zip <- function(ct) { # "https://www.fueleconomy.gov/feg/epadata/16data.zip" comes with # MIME type "application/x-zip-compressed" # see https://github.com/r-lib/usethis/issues/573 allowed <- c("application/zip", "application/x-zip-compressed") if (!ct %in% allowed) { ui_stop(c( "Download does not have MIME type {ui_value('application/zip')}.", "Instead it's {ui_value(ct)}." )) } invisible(ct) } ## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition ## https://tools.ietf.org/html/rfc6266 ## DropBox eg: "attachment; filename=\"foo.zip\"; filename*=UTF-8''foo.zip\" ## GitHub eg: "attachment; filename=foo-main.zip" # https://stackoverflow.com/questions/30193569/get-content-disposition-parameters # http://test.greenbytes.de/tech/tc2231/ parse_content_disposition <- function(cd) { if (!grepl("^attachment;", cd)) { ui_stop(c( "{ui_code('Content-Disposition')} header doesn't start with {ui_value('attachment')}.", "Actual header: {ui_value(cd)}" )) } cd <- sub("^attachment;\\s*", "", cd, ignore.case = TRUE) cd <- strsplit(cd, "\\s*;\\s*")[[1]] cd <- strsplit(cd, "=") stats::setNames( vapply(cd, `[[`, character(1), 2), vapply(cd, `[[`, character(1), 1) ) } progress_fun <- function(down, up) { total <- down[[1]] now <- down[[2]] pct <- if (length(total) && total > 0) { paste0("(", round(now / total * 100), "%)") } else { "" } if (now > 10000) { cat("\rDownloaded:", sprintf("%.2f", now / 2^20), "MB ", pct) } TRUE } make_filename <- function(cd, fallback = path_file(file_temp())) { ## TO DO(jennybc): the element named 'filename*' is preferred but I'm not ## sure how to parse it yet, so targeting 'filename' for now ## https://tools.ietf.org/html/rfc6266 cd <- cd[["filename"]] if (is.null(cd) || is.na(cd)) { stopifnot(is_string(fallback)) return(path_sanitize(fallback)) } ## I know I could use regex and lookahead but this is easier for me to ## maintain cd <- sub("^\"(.+)\"$", "\\1", cd) path_sanitize(cd) } ## https://stackoverflow.com/questions/21322614/use-curl-to-download-a-dropbox-folder-via-shared-link-not-public-link ## lesson: if using cURL, you'd want these options ## -L, --location (follow redirects) ## -O, --remote-name (name local file like the file part of remote name) ## -J, --remote-header-name (tells -O option to consult Content-Disposition ## instead of the URL) ## https://curl.haxx.se/docs/manpage.html#OPTIONS usethis/R/version.R0000644000175000017500000000660114131645451014110 0ustar nileshnilesh#' Increment package version #' #' @description `use_version()` increments the "Version" field in `DESCRIPTION`, #' adds a new heading to `NEWS.md` (if it exists), and commits those changes #' (if package uses Git). It makes the same update to a line like `PKG_version #' = "x.y.z";` in `src/version.c` (if it exists). #' #' @description `use_dev_version()` increments to a development version, e.g. #' from 1.0.0 to 1.0.0.9000. If the existing version is already a development #' version with four components, it does nothing. Thin wrapper around #' `use_version()`. #' #' @param which A string specifying which level to increment, one of: "major", #' "minor", "patch", "dev". If `NULL`, user can choose interactively. #' #' @seealso The [version #' section](https://r-pkgs.org/description.html#version) of [R #' Packages](https://r-pkgs.org). #' #' @examples #' \dontrun{ #' ## for interactive selection, do this: #' use_version() #' #' ## request a specific type of increment #' use_version("minor") #' use_dev_version() #' } #' #' @name use_version NULL #' @rdname use_version #' @export use_version <- function(which = NULL) { if (is.null(which) && !is_interactive()) { return(invisible(FALSE)) } check_is_package("use_version()") challenge_uncommitted_changes( msg = "There are uncommitted changes and you're about to bump version" ) new_ver <- choose_version("What should the new version be?", which) if (is.null(new_ver)) { return(invisible(FALSE)) } use_description_field("Version", new_ver, overwrite = TRUE) if (names(new_ver) == "dev") { use_news_heading("(development version)") } else { use_news_heading(new_ver) } use_c_version(new_ver) git_ask_commit( glue("Increment version number to {new_ver}"), untracked = TRUE, paths = c("DESCRIPTION", "NEWS.md", path("src", "version.c")) ) invisible(TRUE) } #' @rdname use_version #' @export use_dev_version <- function() { check_is_package("use_dev_version()") ver <- desc::desc_get_version(proj_get()) if (length(unlist(ver)) > 3) { return(invisible()) } use_version(which = "dev") } choose_version <- function(message, which = NULL) { ver <- desc::desc_get_version(proj_get()) versions <- bump_version(ver) if (is.null(which)) { choice <- utils::menu( choices = glue( "{format(names(versions), justify = 'right')} --> {versions}" ), title = glue( "Current version is {ver}.\n", "{message} (0 to exit)" ) ) if (choice == 0) { return(invisible()) } else { which <- names(versions)[choice] } } which <- match.arg(which, c("major", "minor", "patch", "dev")) versions[which] } bump_version <- function(ver) { bumps <- c("major", "minor", "patch", "dev") vapply(bumps, bump_, character(1), ver = ver) } bump_ <- function(x, ver) { d <- desc::desc(text = paste0("Version: ", ver)) suppressMessages(d$bump_version(x)$get("Version")[[1]]) } use_c_version <- function(ver) { version_path <- proj_path("src", "version.c") if (!file_exists(version_path)) { return() } hint <- glue("{project_name()}_version") ui_done(" Setting {ui_field(hint)} to {ui_value(ver)} in {ui_path(version_path)}") lines <- read_utf8(version_path) re <- glue("(^.*{project_name()}_version = \")([0-9.]+)(\";$)") lines <- gsub(re, glue("\\1{ver}\\3"), lines) write_utf8(version_path, lines) } usethis/R/r.R0000644000175000017500000001333514117743363012673 0ustar nileshnilesh#' Create or edit R or test files #' #' This pair of functions makes it easy to create paired R and test files, #' using the convention that the tests for `R/foofy.R` should live #' in `tests/testthat/test-foofy.R`. You can use them to create new files #' from scratch by supplying `name`, or if you use RStudio, you can call #' to create (or navigate to) the paired file based on the currently open #' script. #' #' @param name Either a name without extension, or `NULL` to create the #' paired file based on currently open file in the script editor. If #' the R file is open, `use_test()` will create/open the corresponding #' test file; if the test file is open, `use_r()` will create/open the #' corresponding R file. #' @inheritParams edit_file #' @seealso The [testing](https://r-pkgs.org/tests.html) and #' [R code](https://r-pkgs.org/r.html) chapters of #' [R Packages](https://r-pkgs.org). #' @export use_r <- function(name = NULL, open = rlang::is_interactive()) { name <- name %||% get_active_r_file(path = "tests/testthat") name <- gsub("^test-", "", name) name <- slug(name, "R") check_file_name(name) use_directory("R") edit_file(proj_path("R", name), open = open) test_path <- proj_path("tests", "testthat", paste0("test-", name, ".R")) if (!file_exists(test_path)) { ui_todo("Call {ui_code('use_test()')} to create a matching test file") } invisible(TRUE) } #' @rdname use_r #' @export use_test <- function(name = NULL, open = rlang::is_interactive()) { if (!uses_testthat()) { use_testthat_impl() } name <- name %||% get_active_r_file(path = "R") name <- paste0("test-", name) name <- slug(name, "R") check_file_name(name) path <- path("tests", "testthat", name) if (!file_exists(path)) { use_template("test-example-2.1.R", save_as = path, open = FALSE) } edit_file(proj_path(path), open = open) } #' Automatically rename paired `R/` and `test/` files #' #' @description #' * Moves `R/{old}.R` to `R/{new}.R` #' * Moves `tests/testthat/test-{old}.R` to `tests/testthat/test-{new}.R` #' * Moves `tests/testthat/test-{old}-*.*` to `tests/testthat/test-{new}-*.*` #' and updates paths in the test file. #' * Removes `context()` calls from the test file, which are unnecessary #' (and discouraged) as of testthat v2.1.0. #' #' This is a potentially dangerous operation, so you must be using Git in #' order to use this function. #' #' @param old,new Old and new file names (with or without extensions). #' @export rename_files <- function(old, new) { check_uses_git() old <- path_ext_remove(old) new <- path_ext_remove(new) # Move .R file r_old_path <- proj_path("R", old, ext = "R") r_new_path <- proj_path("R", new, ext = "R") if (file_exists(r_old_path)) { ui_done("Moving {ui_path(r_old_path)} to {ui_path(r_new_path)}") file_move(r_old_path, r_new_path) } if (!uses_testthat()) { return(invisible()) } # Move test files and snapshots rename_test <- function(path) { file <- gsub(glue("^test-{old}"), glue("test-{new}"), path_file(path)) file <- gsub(glue("^{old}.md"), glue("{new}.md"), file) path(path_dir(path), file) } old_test <- dir_ls( proj_path("tests", "testthat"), glob = glue("*/test-{old}*") ) new_test <- rename_test(old_test) if (length(old_test) > 0) { ui_done("Moving {ui_path(old_test)} to {ui_path(new_test)}") file_move(old_test, new_test) } snaps_dir <- proj_path("tests", "testthat", "_snaps") if (dir_exists(snaps_dir)) { old_snaps <- dir_ls(snaps_dir, glob = glue("*/{old}.md")) if (length(old_snaps) > 0) { new_snaps <- rename_test(old_snaps) ui_done("Moving {ui_path(old_snaps)} to {ui_path(new_snaps)}") file_move(old_snaps, new_snaps) } } # Update test file test_path <- proj_path("tests", "testthat", glue("test-{new}"), ext = "R") if (!file_exists(test_path)) { return(invisible()) } lines <- read_utf8(test_path) # Remove old context lines context <- grepl("context\\(.*\\)", lines) if (any(context)) { ui_done("Removing call to {ui_code('context()')}") lines <- lines[!context] if (lines[[1]] == "") { lines <- lines[-1] } } old_test <- old_test[new_test != test_path] new_test <- new_test[new_test != test_path] if (length(old_test) > 0) { ui_done("Updating paths in {ui_path(test_path)}") for (i in seq_along(old_test)) { lines <- gsub(path_file(old_test[[i]]), path_file(new_test[[i]]), lines, fixed = TRUE) } } write_utf8(test_path, lines) } # helpers ----------------------------------------------------------------- check_file_name <- function(name) { if (!is_string(name)) { ui_stop("Name must be a single string") } if (!valid_file_name(path_ext_remove(name))) { ui_stop(c( "{ui_value(name)} is not a valid file name. It should:", "* Contain only ASCII letters, numbers, '-', and '_'." )) } name } valid_file_name <- function(x) { grepl("^[a-zA-Z0-9._-]+$", x) } get_active_r_file <- function(path = "R") { if (!rstudio_available()) { ui_stop("Argument {ui_code('name')} must be specified.") } active_file <- rstudioapi::getSourceEditorContext()$path ## rstudioapi can return a path like '~/path/to/file' where '~' means ## R's notion of user's home directory active_file <- proj_path_prep(path_expand_r(active_file)) rel_path <- proj_rel_path(active_file) if (path_dir(rel_path) != path) { ui_stop(c( "Open file must be in the {ui_path(path)} directory of the active package.", " * Actual path: {ui_path(rel_path)}" )) } ext <- path_ext(active_file) if (toupper(ext) != "R") { ui_stop( "Open file must have {ui_value('.R')} or {ui_value('.r')} as extension,\\ not {ui_value(ext)}." ) } path_file(active_file) } usethis/R/github.R0000644000175000017500000002257414153502006013704 0ustar nileshnilesh#' Connect a local repo with GitHub #' #' @description #' `use_github()` takes a local project and: #' * Checks that the initial state is good to go: #' - Project is already a Git repo #' - Current branch is the default branch, e.g. `main` or `master` #' - No uncommitted changes #' - No pre-existing `origin` remote #' * Creates an associated repo on GitHub #' * Adds that GitHub repo to your local repo as the `origin` remote #' * Makes an initial push to GitHub #' * Calls [use_github_links()], if the project is an R package #' * Configures `origin/DEFAULT` to be the upstream branch of the local #' `DEFAULT` branch, e.g. `main` or `master` #' #' See below for the authentication setup that is necessary for all of this to #' work. #' #' @template double-auth #' #' @param organisation If supplied, the repo will be created under this #' organisation, instead of the login associated with the GitHub token #' discovered for this `host`. The user's role and the token's scopes must be #' such that you have permission to create repositories in this #' `organisation`. #' @param private If `TRUE`, creates a private repository. #' @param visibility Only relevant for organisation-owned repos associated with #' certain GitHub Enterprise products. The special "internal" `visibility` #' grants read permission to all organisation members, i.e. it's intermediate #' between "private" and "public", within GHE. When specified, `visibility` #' takes precedence over `private = TRUE/FALSE`. #' @inheritParams git_protocol #' @param host GitHub host to target, passed to the `.api_url` argument of #' [gh::gh()]. If unspecified, gh defaults to "https://api.github.com", #' although gh's default can be customised by setting the GITHUB_API_URL #' environment variable. #' #' For a hypothetical GitHub Enterprise instance, either #' "https://github.acme.com/api/v3" or "https://github.acme.com" is #' acceptable. #' @param auth_token,credentials `r lifecycle::badge("deprecated")`: No longer #' consulted now that usethis uses the gert package for Git operations, #' instead of git2r; gert relies on the credentials package for auth. The API #' requests are now authorized with the token associated with the `host`, as #' retrieved by [gh::gh_token()]. #' #' @export #' @examples #' \dontrun{ #' pkgpath <- file.path(tempdir(), "testpkg") #' create_package(pkgpath) #' #' ## now, working inside "testpkg", initialize git repository #' use_git() #' #' ## create github repository and configure as git remote #' use_github() #' } use_github <- function(organisation = NULL, private = FALSE, visibility = c("public", "private", "internal"), protocol = git_protocol(), host = NULL, auth_token = deprecated(), credentials = deprecated()) { if (lifecycle::is_present(auth_token)) { deprecate_warn_auth_token("use_github") } if (lifecycle::is_present(credentials)) { deprecate_warn_credentials("use_github") } visibility_specified <- !missing(visibility) visibility <- match.arg(visibility) check_protocol(protocol) check_uses_git() default_branch <- git_default_branch() check_current_branch( is = default_branch, # glue-ing happens inside check_current_branch(), where `gb` gives the # current branch "Must be on the default branch ({ui_value(is)}), not {ui_value(gb)}." ) challenge_uncommitted_changes(msg = " There are uncommitted changes and we're about to create and push to a new \\ GitHub repo") check_no_origin() if (is.null(organisation)) { if (visibility_specified) { ui_stop(" The {ui_code('visibility')} setting is only relevant for organisation-owned repos, within the context of certain \\ GitHub Enterprise products.") } visibility <- if (private) "private" else "public" } if (!is.null(organisation) && !visibility_specified) { visibility <- if (private) "private" else "public" } whoami <- suppressMessages(gh::gh_whoami(.api_url = host)) if (is.null(whoami)) { ui_stop(" Unable to discover a GitHub personal access token A token is required in order to create and push to a new repo Call {ui_code('gh_token_help()')} for help configuring a token") } empirical_host <- parse_github_remotes(glue("{whoami$html_url}/REPO"))$host if (empirical_host != "github.com") { ui_info("Targeting the GitHub host {ui_value(empirical_host)}") } owner <- organisation %||% whoami$login repo_name <- project_name() check_no_github_repo(owner, repo_name, host) repo_desc <- if (is_package()) package_data()$Title %||% "" else "" repo_desc <- gsub("\n", " ", repo_desc) repo_spec <- glue("{owner}/{repo_name}") visibility_string <- if (visibility == "public") "" else glue("{visibility} ") ui_done("Creating {visibility_string}GitHub repository {ui_value(repo_spec)}") if (is.null(organisation)) { create <- gh::gh( "POST /user/repos", name = repo_name, description = repo_desc, private = private, .api_url = host ) } else { create <- gh::gh( "POST /orgs/{org}/repos", org = organisation, name = repo_name, description = repo_desc, visibility = visibility, # this is necessary to set `visibility` in GHE 2.22 (but not in 3.2) # hopefully it's harmless when not needed .accept = "application/vnd.github.nebula-preview+json", .api_url = host ) } origin_url <- switch( protocol, https = create$clone_url, ssh = create$ssh_url ) withr::defer(view_url(create$html_url)) ui_done("Setting remote {ui_value('origin')} to {ui_value(origin_url)}") use_git_remote("origin", origin_url) if (is_package()) { # we tryCatch(), because we can't afford any failure here to result in not # making the first push and configuring default branch # such an incomplete setup is hard to diagnose / repair post hoc tryCatch( use_github_links(), error = function(e) NULL ) } repo <- git_repo() remref <- glue("origin/{default_branch}") ui_done(" Pushing {ui_value(default_branch)} branch to GitHub and setting \\ {ui_value(remref)} as upstream branch") gert::git_push( remote = "origin", set_upstream = TRUE, repo = repo, verbose = FALSE ) gbl <- gert::git_branch_list(local = TRUE, repo = repo) if (nrow(gbl) > 1) { ui_done(" Setting {ui_value(default_branch)} as default branch on GitHub") gh::gh( "PATCH /repos/{owner}/{repo}", owner = owner, repo = repo_name, default_branch = default_branch, .api_url = host ) } invisible() } #' Use GitHub links in URL and BugReports #' #' @description #' Populates the `URL` and `BugReports` fields of a GitHub-using R package with #' appropriate links. The GitHub repo to link to is determined from the current #' project's GitHub remotes: #' * If we are not working with a fork, this function expects `origin` to be a #' GitHub remote and the links target that repo. #' * If we are working in a fork, this function expects to find two GitHub #' remotes: `origin` (the fork) and `upstream` (the fork's parent) remote. In #' an interactive session, the user can confirm which repo to use for the #' links. In a noninteractive session, links are formed using `upstream`. #' #' @param host,auth_token `r lifecycle::badge("deprecated")`: No longer consulted #' now that usethis consults the current project's GitHub remotes to get the #' `host` and then relies on gh to discover an appropriate token. #' @param overwrite By default, `use_github_links()` will not overwrite existing #' fields. Set to `TRUE` to overwrite existing links. #' @export #' @examples #' \dontrun{ #' use_github_links() #' } #' use_github_links <- function(auth_token = deprecated(), host = deprecated(), overwrite = FALSE) { if (lifecycle::is_present(auth_token)) { deprecate_warn_auth_token("use_github_links") } if (lifecycle::is_present(host)) { deprecate_warn_host("use_github_links") } check_is_package("use_github_links()") tr <- target_repo(github_get = TRUE) gh <- gh_tr(tr) res <- gh("GET /repos/{owner}/{repo}") use_description_field("URL", res$html_url, overwrite = overwrite) use_description_field( "BugReports", glue("{res$html_url}/issues"), overwrite = overwrite ) git_ask_commit( "Add GitHub links to DESCRIPTION", untracked = TRUE, paths = "DESCRIPTION" ) invisible() } check_no_origin <- function() { remotes <- git_remotes() if ("origin" %in% names(remotes)) { ui_stop(" This repo already has an {ui_value('origin')} remote, \\ with value {ui_value(remotes[['origin']])}. You can remove this setting with: {ui_code('usethis::use_git_remote(\"origin\", url = NULL, overwrite = TRUE)')}") } invisible() } check_no_github_repo <- function(owner, repo, host) { repo_found <- tryCatch( { repo_info <- gh::gh( "/repos/{owner}/{repo}", owner = owner, repo = repo, .api_url = host ) TRUE }, "http_error_404" = function(err) FALSE ) if (!repo_found) { return(invisible()) } spec <- glue("{owner}/{repo}") empirical_host <- parse_github_remotes(repo_info$html_url)$host ui_stop("Repo {ui_value(spec)} already exists on {ui_value(empirical_host)}") } usethis/R/code-of-conduct.R0000644000175000017500000000420714153502006015364 0ustar nileshnilesh#' Add a code of conduct #' #' Adds a `CODE_OF_CONDUCT.md` file to the active project and lists in #' `.Rbuildignore`, in the case of a package. The goal of a code of conduct is #' to foster an environment of inclusiveness, and to explicitly discourage #' inappropriate behaviour. The template comes from #' , version 2: #' . #' #' If your package is going to CRAN, the link to the CoC in your README must #' be an absolute link to a rendered website as `CODE_OF_CONDUCT.md` is not #' included in the package sent to CRAN. `use_code_of_conduct()` will #' automatically generate this link if (1) you use pkgdown and (2) have set the #' `url` field in `_pkgdown.yml`; otherwise it will link to a copy of the CoC #' on . #' #' @param contact Contact details for making a code of conduct report. #' Usually an email address. #' @param path Path of the directory to put `CODE_OF_CONDUCT.md` in, relative to #' the active project. Passed along to [use_directory()]. Default is to locate #' at top-level, but `.github/` is also common. #' #' @export use_code_of_conduct <- function(contact, path = NULL) { if (missing(contact)) { ui_stop(" {ui_code('use_code_of_conduct()')} requires contact details in \\ first argument") } if (!is.null(path)) { use_directory(path, ignore = is_package()) } save_as <- path_join(c(path, "CODE_OF_CONDUCT.md")) new <- use_template( "CODE_OF_CONDUCT.md", save_as = save_as, data = list(contact = contact), ignore = is_package() && is.null(path) ) href <- pkgdown_url(pedantic = TRUE) %||% "https://contributor-covenant.org/version/2/0" href <- sub("/$", "", href) href <- paste0(href, "/CODE_OF_CONDUCT.html") ui_todo("Don't forget to describe the code of conduct in your README:") ui_code_block(" ## Code of Conduct Please note that the {project_name()} project is released with a \\ [Contributor Code of Conduct]({href}). By contributing to this project, \\ you agree to abide by its terms." ) invisible(new) } usethis/R/sitrep.R0000644000175000017500000000720314117743363013735 0ustar nileshnilesh#' Report working directory and usethis/RStudio project #' #' @description `proj_sitrep()` reports #' * current working directory #' * the active usethis project #' * the active RStudio Project #' #' @description Call this function if things seem weird and you're not sure #' what's wrong or how to fix it. Usually, all three of these should coincide #' (or be unset) and `proj_sitrep()` provides suggested commands for getting #' back to this happy state. #' #' @return A named list, with S3 class `sitrep` (for printing purposes), #' reporting current working directory, active usethis project, and active #' RStudio Project #' @export #' @family project functions #' @examples #' proj_sitrep() proj_sitrep <- function() { out <- list( working_directory = getwd(), active_usethis_proj = if (proj_active()) proj_get(), active_rstudio_proj = if (rstudioapi::hasFun("getActiveProject")) { rstudioapi::getActiveProject() } ## TODO(?): address home directory to help clarify fs issues on Windows? ## home_usethis = fs::path_home(), ## home_r = normalizePath("~") ) out <- ifelse(map_lgl(out, is.null), out, as.character(path_tidy(out))) structure(out, class = "sitrep") } #' @export print.sitrep <- function(x, ...) { keys <- format(names(x), justify = "right") purrr::walk2(keys, x, kv_line) rstudio_proj_is_active <- !is.null(x[["active_rstudio_proj"]]) usethis_proj_is_active <- !is.null(x[["active_usethis_proj"]]) rstudio_proj_is_not_wd <- rstudio_proj_is_active && x[["working_directory"]] != x[["active_rstudio_proj"]] usethis_proj_is_not_wd <- usethis_proj_is_active && x[["working_directory"]] != x[["active_usethis_proj"]] usethis_proj_is_not_rstudio_proj <- usethis_proj_is_active && rstudio_proj_is_active && x[["active_rstudio_proj"]] != x[["active_usethis_proj"]] if (rstudio_available() && !rstudio_proj_is_active) { ui_todo( " You are working in RStudio, but are not in an RStudio Project. A Project-based workflow offers many advantages. Read more at: {ui_field('https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects')} {ui_field('https://whattheyforgot.org/project-oriented-workflow.html')} " ) } if (!usethis_proj_is_active) { ui_todo( " There is currently no active usethis project. usethis attempts to activate a project upon first need. Call {ui_code('proj_get()')} to initiate project discovery. Call {ui_code('proj_set(\"path/to/project\")')} or \\ {ui_code('proj_activate(\"path/to/project\")')} to provide an explicit path. " ) } if (usethis_proj_is_not_wd) { ui_todo( " Your working directory is not the same as the active usethis project. Set working directory to the project: {ui_code('setwd(proj_get())')} Set project to working directory: {ui_code('proj_set(getwd())')} " ) } if (rstudio_proj_is_not_wd) { ui_todo( " Your working directory is not the same as the active RStudio Project. Set working directory to the Project: {ui_code('setwd(rstudioapi::getActiveProject())')} " ) } if (usethis_proj_is_not_rstudio_proj) { ui_todo( " Your active RStudio Project is not the same as the active usethis project. Set usethis project to RStudio Project: \\ {ui_code('proj_set(rstudioapi::getActiveProject())')} Restart RStudio in the usethis project: \\ {ui_code('rstudioapi::openProject(proj_get())')} Open the usethis project in a new instance of RStudio: \\ {ui_code('proj_activate(proj_get())')} " ) } invisible(x) } usethis/R/test.R0000644000175000017500000000477514132400710013400 0ustar nileshnilesh#' Sets up overall testing infrastructure #' #' Creates `tests/testthat/`, `tests/testthat.R`, and adds the testthat package #' to the Suggests field. Learn more in #' #' @param edition testthat edition to use. Defaults to the latest edition, i.e. #' the major version number of the currently installed testthat. #' @param parallel Should tests be run in parallel? This feature appeared in #' testthat 3.0.0; see for #' details and caveats. #' @seealso [use_test()] to create individual test files #' @export #' @examples #' \dontrun{ #' use_testthat() #' #' use_test() #' #' use_test("something-management") #' } use_testthat <- function(edition = NULL, parallel = FALSE) { use_testthat_impl(edition, parallel = parallel) ui_todo( "Call {ui_code('use_test()')} to initialize a basic test file and open it \\ for editing." ) } use_testthat_impl <- function(edition = NULL, parallel = FALSE) { check_installed("testthat") if (utils::packageVersion("testthat") < "2.1.0") { ui_stop("testthat 2.1.0 or greater needed. Please install before re-trying") } if (is_package()) { edition <- check_edition(edition) use_dependency("testthat", "Suggests", paste0(edition, ".0.0")) use_description_field("Config/testthat/edition", edition, overwrite = TRUE) if (parallel) { use_description_field("Config/testthat/parallel", "true", overwrite = TRUE) } else { desc::desc_del("Config/testthat/parallel", file = proj_get()) } } else { if (!is.null(edition)) { ui_stop("Can't declare testthat edition outside of a package") } } use_directory(path("tests", "testthat")) use_template( "testthat.R", save_as = path("tests", "testthat.R"), data = list(name = project_name()) ) } check_edition <- function(edition = NULL) { version <- utils::packageVersion("testthat")[[1, c(1, 2)]] if (version[[2]] == "99") { version <- version[[1]] + 1L } else { version <- version[[1]] } if (is.null(edition)) { version } else { if (!is.numeric(edition) || length(edition) != 1) { ui_stop("`edition` must be a single number") } if (edition > version) { vers <- utils::packageVersion("testthat") ui_stop("`edition` ({edition}) not available in installed testthat ({vers})") } as.integer(edition) } } uses_testthat <- function() { paths <- proj_path(c(path("inst", "tests"), path("tests", "testthat"))) any(dir_exists(paths)) } usethis/R/use_import_from.R0000644000175000017500000000420514131622147015627 0ustar nileshnilesh#' Import a function from another package #' #' @description #' `use_import_from()` imports a function from another package by adding the #' roxygen2 `@importFrom` tag to the package-level documentation (which can be #' created with [`use_package_doc()`]). Importing a function from another #' package allows you to refer to it without a namespace (e.g., `fun()` instead #' of `package::fun()`). #' #' `use_import_from()` also re-documents the NAMESPACE, and re-load the current #' package. This ensures that `fun` is immediately available in your development #' session. #' #' @param package Package name #' @param fun A vector of function names #' @param load Logical. Re-load with [`pkgload::load_all()`]? #' @return #' Invisibly, `TRUE` if the package document has changed, `FALSE` if not. #' @export #' @examples #' \dontrun{ #' use_import_from("usethis", "ui_todo") #' } use_import_from <- function(package, fun, load = is_interactive()) { if (!is_string(package)) { ui_stop("{ui_code('package')} must be a single string") } check_is_package("use_import_from()") check_uses_roxygen("use_import_from()") check_installed(package) check_has_package_doc("use_import_from()") check_functions_exist(package, fun) use_dependency(package, "Imports") changed <- roxygen_ns_append(glue("@importFrom {package} {fun}")) if (changed) { roxygen_update_ns(load) } invisible(changed) } check_functions_exist <- function(package, fun) { purrr::walk2(package, fun, check_fun_exists) } check_fun_exists <- function(package, fun) { if (exists(fun, envir = asNamespace(package))) { return() } name <- paste0(package, "::", fun, "()") ui_stop("Can't find {ui_code(name)}") } check_has_package_doc <- function(whos_asking) { if (has_package_doc()) { return(invisible(TRUE)) } msg <- c( "{ui_code(whos_asking)} requires package-level documentation.", "Would you like to add it now?" ) if (is_interactive() && ui_yeah(msg)) { use_package_doc() } else { ui_stop(c( "{ui_code(whos_asking)} requires package docs", "You can add it by running {ui_code('use_package_doc()')}" )) } invisible(TRUE) } usethis/R/create.R0000644000175000017500000003074714153723057013701 0ustar nileshnilesh#' Create a package or project #' #' @description #' These functions create an R project: #' * `create_package()` creates an R package #' * `create_project()` creates a non-package project, i.e. a data analysis #' project #' #' Both functions can be called on an existing project; you will be asked before #' any existing files are changed. #' #' @inheritParams use_description #' @param path A path. If it exists, it is used. If it does not exist, it is #' created, provided that the parent path exists. #' @param roxygen Do you plan to use roxygen2 to document your package? #' @param rstudio If `TRUE`, calls [use_rstudio()] to make the new package or #' project into an [RStudio #' Project](https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects). #' If `FALSE` and a non-package project, a sentinel `.here` file is placed so #' that the directory can be recognized as a project by the #' [here](https://here.r-lib.org) or #' [rprojroot](https://rprojroot.r-lib.org) packages. #' @param open If `TRUE`, [activates][proj_activate()] the new project: #' #' * If RStudio desktop, the package is opened in a new session. #' * If on RStudio server, the current RStudio project is activated. #' * Otherwise, the working directory and active project is changed. #' #' @return Path to the newly created project or package, invisibly. #' @seealso [create_tidy_package()] is a convenience function that extends #' `create_package()` by immediately applying as many of the tidyverse #' development conventions as possible. #' @export create_package <- function(path, fields = list(), rstudio = rstudioapi::isAvailable(), roxygen = TRUE, check_name = TRUE, open = rlang::is_interactive()) { path <- user_path_prep(path) check_path_is_directory(path_dir(path)) name <- path_file(path_abs(path)) if (check_name) { check_package_name(name) } challenge_nested_project(path_dir(path), name) challenge_home_directory(path) create_directory(path) local_project(path, force = TRUE) use_directory("R") use_description(fields, check_name = FALSE, roxygen = roxygen) use_namespace(roxygen = roxygen) if (rstudio) { use_rstudio() } if (open) { if (proj_activate(proj_get())) { # working directory/active project already set; clear the scheduled # restoration of the original project withr::deferred_clear() } } invisible(proj_get()) } #' @export #' @rdname create_package create_project <- function(path, rstudio = rstudioapi::isAvailable(), open = rlang::is_interactive()) { path <- user_path_prep(path) name <- path_file(path_abs(path)) challenge_nested_project(path_dir(path), name) challenge_home_directory(path) create_directory(path) local_project(path, force = TRUE) use_directory("R") if (rstudio) { use_rstudio() } else { ui_done("Writing a sentinel file {ui_path('.here')}") ui_todo("Build robust paths within your project via {ui_code('here::here()')}") ui_todo("Learn more at ") file_create(proj_path(".here")) } if (open) { if (proj_activate(proj_get())) { # working directory/active project already set; clear the scheduled # restoration of the original project withr::deferred_clear() } } invisible(proj_get()) } #' Create a project from a GitHub repo #' #' @description #' Creates a new local project and Git repository from a repo on GitHub, by #' either cloning or #' [fork-and-cloning](https://docs.github.com/articles/fork-a-repo). In the #' fork-and-clone case, `create_from_github()` also does additional remote and #' branch setup, leaving you in the perfect position to make a pull request with #' [pr_init()], one of several [functions that work pull #' requests][pull-requests]. #' #' `create_from_github()` works best when your GitHub credentials are #' discoverable. See below for more about authentication. #' #' @template double-auth #' #' @seealso #' * [use_github()] to go the opposite direction, i.e. create a GitHub repo #' from your local repo #' * [git_protocol()] for background on `protocol` (HTTPS vs SSH) #' * [use_course()] to download a snapshot of all files in a GitHub repo, #' without the need for any local or remote Git operations #' #' @inheritParams create_package #' @param repo_spec A string identifying the GitHub repo in one of these forms: #' * Plain `OWNER/REPO` spec #' * Browser URL, such as `"https://github.com/OWNER/REPO"` #' * HTTPS Git URL, such as `"https://github.com/OWNER/REPO.git"` #' * SSH Git URL, such as `"git@github.com:OWNER/REPO.git"` #' #' In the case of a browser, HTTPS, or SSH URL, the `host` is extracted from #' the URL. The `REPO` part will be the name of the new local folder, which is #' also a project and Git repo. #' @inheritParams use_course #' @param fork If `FALSE`, we clone `repo_spec`. If `TRUE`, we fork #' `repo_spec`, clone that fork, and do additional set up favorable for #' future pull requests: #' * The source repo, `repo_spec`, is configured as the `upstream` remote, #' using the indicated `protocol`. #' * The local `DEFAULT` branch is set to track `upstream/DEFAULT`, where #' `DEFAULT` is typically `main` or `master`. It is also immediately pulled, #' to cover the case of a pre-existing, out-of-date fork. #' #' If `fork = NA` (the default), we check your permissions on `repo_spec`. If #' you can push, we set `fork = FALSE`, If you cannot, we set `fork = TRUE`. #' @param rstudio Initiate an [RStudio #' Project](https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects)? #' Defaults to `TRUE` if in an RStudio session and project has no #' pre-existing `.Rproj` file. Defaults to `FALSE` otherwise (but note that #' the cloned repo may already be an RStudio Project, i.e. may already have a #' `.Rproj` file). #' @inheritParams use_github #' #' @export #' @examples #' \dontrun{ #' create_from_github("r-lib/usethis") #' #' # repo_spec can be a URL #' create_from_github("https://github.com/r-lib/usethis") #' #' # a URL repo_spec also specifies the host (e.g. GitHub Enterprise instance) #' create_from_github("https://github.acme.com/OWNER/REPO") #' } create_from_github <- function(repo_spec, destdir = NULL, fork = NA, rstudio = NULL, open = rlang::is_interactive(), protocol = git_protocol(), host = NULL, auth_token = deprecated(), credentials = deprecated()) { if (lifecycle::is_present(auth_token)) { deprecate_warn_auth_token("create_from_github") } if (lifecycle::is_present(credentials)) { deprecate_warn_credentials("create_from_github") } check_protocol(protocol) parsed_repo_spec <- parse_repo_url(repo_spec) if (!is.null(parsed_repo_spec$host)) { repo_spec <- parsed_repo_spec$repo_spec host <- parsed_repo_spec$host } whoami <- suppressMessages(gh::gh_whoami(.api_url = host)) no_auth <- is.null(whoami) user <- if (no_auth) NULL else whoami$login hint <- code_hint_with_host("gh_token_help", host) if (no_auth && is.na(fork)) { ui_stop(" Unable to discover a GitHub personal access token Therefore, can't determine your permissions on {ui_value(repo_spec)} Therefore, can't decide if `fork` should be `TRUE` or `FALSE` You have two choices: 1. Make your token available (if in doubt, DO THIS): - Call {ui_code(hint)} for directions 2. Call {ui_code('create_from_github()')} again, but with \\ {ui_code('fork = FALSE')} - Only do this if you are absolutely sure you don't want to fork - Note you will NOT be in a position to make a pull request") } if (no_auth && isTRUE(fork)) { ui_stop(" Unable to discover a GitHub personal access token A token is required in order to fork {ui_value(repo_spec)} Call {ui_code(hint)} for help configuring a token") } # one of these is true: # - gh is discovering a token for `host` # - gh is NOT discovering a token, but `fork = FALSE`, so that's OK source_owner <- spec_owner(repo_spec) repo_name <- spec_repo(repo_spec) gh <- gh_tr(list(repo_owner = source_owner, repo_name = repo_name, .api_url = host)) repo_info <- gh("GET /repos/{owner}/{repo}") # 2020-10-14 GitHub has had some bugs lately around default branch # today, the POST payload, if I create a fork, mis-reports the default branch # it reports 'main', even though actual default branch is 'master' # therefore, we're consulting the source repo for this info default_branch <- repo_info$default_branch if (is.na(fork)) { fork <- !isTRUE(repo_info$permissions$push) fork_status <- glue("fork = {fork}") ui_done("Setting {ui_code(fork_status)}") } # fork is either TRUE or FALSE if (fork && identical(user, repo_info$owner$login)) { ui_stop(" Can't fork, because the authenticated user {ui_value(user)} \\ already owns the source repo {ui_value(repo_info$full_name)}") } destdir <- user_path_prep(destdir %||% conspicuous_place()) check_path_is_directory(destdir) challenge_nested_project(destdir, repo_name) repo_path <- path(destdir, repo_name) create_directory(repo_path) check_directory_is_empty(repo_path) if (fork) { ## https://developer.github.com/v3/repos/forks/#create-a-fork ui_done("Forking {ui_value(repo_info$full_name)}") upstream_url <- switch( protocol, https = repo_info$clone_url, ssh = repo_info$ssh_url ) repo_info <- gh("POST /repos/{owner}/{repo}/forks") } origin_url <- switch( protocol, https = repo_info$clone_url, ssh = repo_info$ssh_url ) ui_done("Cloning repo from {ui_value(origin_url)} into {ui_value(repo_path)}") gert::git_clone(origin_url, repo_path, verbose = FALSE) local_project(repo_path, force = TRUE) # schedule restoration of project # 2020-10-14 due to a GitHub bug, we are consulting the source repo for this # previously (and more naturally) we consulted the fork itself # default_branch <- repo_info$default_branch ui_info("Default branch is {ui_value(default_branch)}") if (fork) { ui_done("Adding {ui_value('upstream')} remote: {ui_value(upstream_url)}") use_git_remote("upstream", upstream_url) pr_merge_main() upstream_remref <- glue("upstream/{default_branch}") ui_done(" Setting remote tracking branch for local {ui_value(default_branch)} \\ branch to {ui_value(upstream_remref)}") gert::git_branch_set_upstream(upstream_remref, repo = git_repo()) config_key <- glue("remote.upstream.created-by") gert::git_config_set(config_key, "usethis::create_from_github", repo = git_repo()) } rstudio <- rstudio %||% rstudio_available() rstudio <- rstudio && !is_rstudio_project(proj_get()) if (rstudio) { use_rstudio() } if (open) { if (proj_activate(proj_get())) { # Working directory/active project changed; so don't undo on exit withr::deferred_clear() } } invisible(proj_get()) } # creates a backdoor we can exploit in tests allow_nested_project <- function() FALSE challenge_nested_project <- function(path, name) { if (!possibly_in_proj(path)) { return(invisible()) } # we mock this in a few tests, to allow a nested project if (allow_nested_project()) { return() } ui_line( "New project {ui_value(name)} is nested inside an existing project \\ {ui_path(path)}, which is rarely a good idea. If this is unexpected, the here package has a function, \\ {ui_code('here::dr_here()')} that reveals why {ui_path(path)} \\ is regarded as a project." ) if (ui_nope("Do you want to create anyway?")) { ui_stop("Aborting project creation.") } invisible() } challenge_home_directory <- function(path) { homes <- unique(c(path_home(), path_home_r())) if (!path %in% homes) { return(invisible()) } qualification <- if (is_windows()) { glue("a special directory, i.e. some applications regard it as ") } else { "" } ui_line(" {ui_path(path)} is {qualification}your home directory. It is generally a bad idea to create a new project here. You should probably create your new project in a subdirectory.") if (ui_nope("Do you want to create anyway?")) { ui_stop("Good move! Cancelling project creation.") } invisible() } usethis/R/readme.R0000644000175000017500000000515214131645451013660 0ustar nileshnilesh#' Create README files #' #' @description #' Creates skeleton README files with sections for #' * a high-level description of the package and its goals #' * R code to install from GitHub, if GitHub usage detected #' * a basic example #' #' Use `Rmd` if you want a rich intermingling of code and output. Use `md` for a #' basic README. `README.Rmd` will be automatically added to `.Rbuildignore`. #' The resulting README is populated with default YAML frontmatter and R fenced #' code blocks (`md`) or chunks (`Rmd`). #' #' If you use `Rmd`, you'll still need to render it regularly, to keep #' `README.md` up-to-date. `devtools::build_readme()` is handy for this. You #' could also use GitHub Actions to re-render `README.Rmd` every time you push. #' An example workflow can be found in the `examples/` directory here: #' . #' #' @inheritParams use_template #' @seealso The [important files #' section](https://r-pkgs.org/release.html#important-files) of [R #' Packages](https://r-pkgs.org). #' @export #' @examples #' \dontrun{ #' use_readme_rmd() #' use_readme_md() #' } use_readme_rmd <- function(open = rlang::is_interactive()) { check_is_project() check_installed("rmarkdown") is_pkg <- is_package() repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL) nm <- if (is_pkg) "Package" else "Project" data <- list2( !!nm := project_name(), Rmd = TRUE, on_github = !is.null(repo_spec), github_spec = repo_spec ) new <- use_template( if (is_pkg) "package-README" else "project-README", "README.Rmd", data = data, ignore = is_pkg, open = open ) if (!new) { return(invisible(FALSE)) } if (is_pkg && !data$on_github) { ui_todo(" Update {ui_path('README.Rmd')} to include installation instructions.") } if (uses_git()) { use_git_hook( "pre-commit", render_template("readme-rmd-pre-commit.sh") ) } invisible(TRUE) } #' @export #' @rdname use_readme_rmd use_readme_md <- function(open = rlang::is_interactive()) { check_is_project() is_pkg <- is_package() repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL) nm <- if (is_pkg) "Package" else "Project" data <- list2( !!nm := project_name(), Rmd = FALSE, on_github = !is.null(repo_spec), github_spec = repo_spec ) new <- use_template( if (is_pkg) "package-README" else "project-README", "README.md", data = data, open = open ) if (is_pkg && !data$on_github) { ui_todo(" Update {ui_path('README.md')} to include installation instructions.") } invisible(new) } usethis/R/utils-roxygen.R0000644000175000017500000000071214117743363015256 0ustar nileshnilesh# functions to help reduce duplication and increase consistency in the docs # repo_spec ---- param_repo_spec <- function(...) { template <- glue(" @param repo_spec \\ Optional GitHub repo specification in this form: `owner/repo`. \\ This can usually be inferred from the GitHub remotes of active \\ project. ") dots <- list2(...) if (length(dots) > 0) { template <- c(template, dots) } glue_collapse(template, sep = " ") } usethis/R/write.R0000644000175000017500000000724414117743363013566 0ustar nileshnilesh#' Write into or over a file #' #' Helpers to write into or over a new or pre-existing file. Designed mostly for #' for internal use. File is written with UTF-8 encoding. #' #' @name write-this #' @param path Path to target file. It is created if it does not exist, but the #' parent directory must exist. #' @param lines Character vector of lines. For `write_union()`, these are lines #' to add to the target file, if not already present. For `write_over()`, #' these are the exact lines desired in the target file. #' @param quiet Logical. Whether to message about what is happening. #' @return Logical indicating whether a write occurred, invisibly. #' @keywords internal #' #' @examples #' \dontshow{ #' .old_wd <- setwd(tempdir()) #' } #' write_union("a_file", letters[1:3]) #' readLines("a_file") #' write_union("a_file", letters[1:5]) #' readLines("a_file") #' #' write_over("another_file", letters[1:3]) #' readLines("another_file") #' write_over("another_file", letters[1:3]) #' \dontrun{ #' ## will error if user isn't present to approve the overwrite #' write_over("another_file", letters[3:1]) #' } #' #' ## clean up #' file.remove("a_file", "another_file") #' \dontshow{ #' setwd(.old_wd) #' } NULL #' @describeIn write-this writes lines to a file, taking the union of what's #' already there, if anything, and some new lines. Note, there is no explicit #' promise about the line order. Designed to modify simple config files like #' `.Rbuildignore` and `.gitignore`. #' @export write_union <- function(path, lines, quiet = FALSE) { stopifnot(is.character(lines)) path <- user_path_prep(path) if (file_exists(path)) { existing_lines <- read_utf8(path) } else { existing_lines <- character() } new <- setdiff(lines, existing_lines) if (length(new) == 0) { return(invisible(FALSE)) } if (!quiet) { ui_done("Adding {ui_value(new)} to {ui_path(proj_rel_path(path))}") } all <- c(existing_lines, new) write_utf8(path, all) } #' @describeIn write-this writes a file with specific lines, creating it if #' necessary or overwriting existing, if proposed contents are not identical #' and user is available to give permission. #' @param contents Character vector of lines. #' @export write_over <- function(path, lines, quiet = FALSE) { stopifnot(is.character(lines), length(lines) > 0) path <- user_path_prep(path) if (same_contents(path, lines)) { return(invisible(FALSE)) } if (can_overwrite(path)) { if (!quiet) { ui_done("Writing {ui_path(path)}") } write_utf8(path, lines) } else { if (!quiet) { ui_done("Leaving {ui_path(path)} unchanged") } invisible(FALSE) } } read_utf8 <- function(path, n = -1L) { base::readLines(path, n = n, encoding = "UTF-8", warn = FALSE) } write_utf8 <- function(path, lines, append = FALSE, line_ending = NULL) { stopifnot(is.character(path)) stopifnot(is.character(lines)) file_mode <- if (append) "ab" else "wb" con <- file(path, open = file_mode, encoding = "utf-8") withr::defer(close(con)) if (is.null(line_ending)) { if (is_in_proj(path)) { # path is in active project line_ending <- proj_line_ending() } else if (possibly_in_proj(path)) { # path is some other project line_ending <- with_project(proj_find(path), proj_line_ending(), quiet = TRUE) } else { line_ending <- platform_line_ending() } } # convert embedded newlines lines <- gsub("\r?\n", line_ending, lines) base::writeLines(enc2utf8(lines), con, sep = line_ending, useBytes = TRUE) invisible(TRUE) } same_contents <- function(path, contents) { if (!file_exists(path)) { return(FALSE) } identical(read_utf8(path), contents) } usethis/R/data.R0000644000175000017500000001016514131622147013331 0ustar nileshnilesh#' Create package data #' #' `use_data()` makes it easy to save package data in the correct format. I #' recommend you save scripts that generate package data in `data-raw`: use #' `use_data_raw()` to set it up. You also need to document exported datasets. #' #' @param ... Unquoted names of existing objects to save. #' @param internal If `FALSE`, saves each object in its own `.rda` #' file in the `data/` directory. These data files bypass the usual #' export mechanism and are available whenever the package is loaded #' (or via [data()] if `LazyData` is not true). #' #' If `TRUE`, stores all objects in a single `R/sysdata.rda` file. #' Objects in this file follow the usual export rules. Note that this means #' they will be exported if you are using the common `exportPattern()` #' rule which exports all objects except for those that start with `.`. #' @param overwrite By default, `use_data()` will not overwrite existing #' files. If you really want to do so, set this to `TRUE`. #' @param compress Choose the type of compression used by [save()]. #' Should be one of "gzip", "bzip2", or "xz". #' @param version The serialization format version to use. The default, 2, was #' the default format from R 1.4.0 to 3.5.3. Version 3 became the default from #' R 3.6.0 and can only be read by R versions 3.5.0 and higher. #' #' @seealso The [data chapter](https://r-pkgs.org/data.html) of [R #' Packages](https://r-pkgs.org). #' @export #' @examples #' \dontrun{ #' x <- 1:10 #' y <- 1:100 #' #' use_data(x, y) # For external use #' use_data(x, y, internal = TRUE) # For internal use #' } use_data <- function(..., internal = FALSE, overwrite = FALSE, compress = "bzip2", version = 2) { check_is_package("use_data()") objs <- get_objs_from_dots(dots(...)) use_dependency("R", "depends", "2.10") if (internal) { use_directory("R") paths <- path("R", "sysdata.rda") objs <- list(objs) } else { use_directory("data") paths <- path("data", objs, ext = "rda") if (!desc::desc_has_fields("LazyData")) { ui_done("Setting {ui_field('LazyData')} to \\ {ui_value('true')} in {ui_path('DESCRIPTION')}") desc::desc_set("LazyData", "true") } } check_files_absent(proj_path(paths), overwrite = overwrite) ui_done("Saving {ui_value(unlist(objs))} to {ui_value(paths)}") if (!internal) ui_todo("Document your data (see {ui_value('https://r-pkgs.org/data.html')})") envir <- parent.frame() mapply( save, list = objs, file = proj_path(paths), MoreArgs = list(envir = envir, compress = compress, version = version) ) invisible() } get_objs_from_dots <- function(.dots) { if (length(.dots) == 0L) { ui_stop("Nothing to save.") } is_name <- vapply(.dots, is.symbol, logical(1)) if (any(!is_name)) { ui_stop("Can only save existing named objects.") } objs <- vapply(.dots, as.character, character(1)) duplicated_objs <- which(stats::setNames(duplicated(objs), objs)) if (length(duplicated_objs) > 0L) { objs <- unique(objs) ui_warn("Saving duplicates only once: {ui_value(names(duplicated_objs))}") } objs } check_files_absent <- function(paths, overwrite) { if (overwrite) { return() } ok <- !file_exists(paths) if (all(ok)) { return() } ui_stop( " {ui_path(paths[!ok])} already exist., Use {ui_code('overwrite = TRUE')} to overwrite. " ) } #' @param name Name of the dataset to be prepared for inclusion in the package. #' @inheritParams use_template #' @rdname use_data #' @export #' @examples #' \dontrun{ #' use_data_raw("daisy") #' } use_data_raw <- function(name = "DATASET", open = rlang::is_interactive()) { stopifnot(is_string(name)) r_path <- path("data-raw", asciify(name), ext = "R") use_directory("data-raw", ignore = TRUE) use_template( "packagename-data-prep.R", save_as = r_path, data = list(name = name), ignore = FALSE, open = open ) ui_todo("Finish the data preparation script in {ui_value(r_path)}") ui_todo("Use {ui_code('usethis::use_data()')} to add prepared data to package") } usethis/R/rprofile.R0000644000175000017500000000317214131622147014242 0ustar nileshnilesh#' Helpers to make useful changes to `.Rprofile` #' #' @description #' All functions open your `.Rprofile` and give you the code you need to #' paste in. #' #' * `use_devtools()`: makes devtools available in interactive sessions. #' * `use_usethis()`: makes usethis available in interactive sessions. #' * `use_reprex()`: makes reprex available in interactive sessions. #' * `use_conflicted()`: makes conflicted available in interactive sessions. #' * `use_partial_warnings()`: warns on partial matches. #' #' @name rprofile-helper NULL #' @rdname rprofile-helper #' @export use_conflicted <- function() { use_rprofile_package("conflicted") } #' @rdname rprofile-helper #' @export use_reprex <- function() { use_rprofile_package("reprex") } #' @rdname rprofile-helper #' @export use_usethis <- function() { use_rprofile_package("usethis") } #' @rdname rprofile-helper #' @export use_devtools <- function() { use_rprofile_package("devtools") } use_rprofile_package <- function(package) { check_installed(package) ui_todo( "Include this code in {ui_value('.Rprofile')} to make \\ {ui_field(package)} available in all interactive sessions." ) ui_code_block( " if (interactive()) {{ suppressMessages(require({package})) }} " ) edit_r_profile("user") } #' @rdname rprofile-helper #' @export use_partial_warnings <- function() { ui_todo( "Include this code in {ui_path('.Rprofile')} to warn on partial matches." ) ui_code_block( " options( warnPartialMatchArgs = TRUE, warnPartialMatchDollar = TRUE, warnPartialMatchAttr = TRUE ) " ) edit_r_profile("user") } usethis/R/addin.R0000644000175000017500000000166014117743363013507 0ustar nileshnilesh#' Add minimal RStudio Addin binding #' #' This function helps you add a minimal #' [RStudio Addin](https://rstudio.github.io/rstudioaddins/) binding to #' `inst/rstudio/addins.dcf`. #' #' @param addin Name of the addin function, which should be defined in the #' `R` folder. #' @inheritParams use_template #' #' @export use_addin <- function(addin = "new_addin", open = rlang::is_interactive()) { addin_dcf_path <- proj_path("inst", "rstudio", "addins.dcf") if (!file_exists(addin_dcf_path)) { create_directory(proj_path("inst", "rstudio")) file_create(addin_dcf_path) ui_done("Creating {ui_path(addin_dcf_path)}") } addin_info <- render_template("addins.dcf", data = list(addin = addin)) addin_info[length(addin_info) + 1] <- "" write_utf8(addin_dcf_path, addin_info, append = TRUE) ui_done("Adding binding to {ui_code(addin)} to addins.dcf.") if (open) { edit_file(addin_dcf_path) } invisible(TRUE) } usethis/R/cpp11.R0000644000175000017500000000214714117743363013355 0ustar nileshnilesh#' Use C++ via the cpp11 package #' #' Adds infrastructure needed to use the [cpp11](https://cpp11.r-lib.org) #' package, a header-only R package that helps R package developers handle R #' objects with C++ code. compiled code: #' * Creates `src/` #' * Adds cpp11 to `DESCRIPTION` #' * Creates `src/code.cpp`, an initial placeholder `.cpp` file #' #' @export use_cpp11 <- function() { check_is_package("use_cpp11()") check_uses_roxygen("use_cpp11()") use_src() use_dependency("cpp11", "LinkingTo") use_system_requirement("C++11") use_template( "code-cpp11.cpp", path("src", "code.cpp"), open = is_interactive() ) check_cpp_register_deps() invisible() } get_cpp_register_deps <- function() { res <- desc::desc(package = "cpp11")$get_field("Config/Needs/cpp11/cpp_register") strsplit(res, "[[:space:]]*,[[:space:]]*")[[1]] } check_cpp_register_deps <- function() { cpp_register_deps <- get_cpp_register_deps() installed <- map_lgl(cpp_register_deps, is_installed) if (!all(installed)) { ui_todo("Now install {ui_value(cpp_register_deps[!installed])} to use cpp11.") } } usethis/R/github-labels.R0000644000175000017500000002301014131645451015136 0ustar nileshnilesh#' Manage GitHub issue labels #' #' @description #' `use_github_labels()` can create new labels, update colours and descriptions, #' and optionally delete GitHub's default labels (if `delete_default = TRUE`). #' It will never delete labels that have associated issues. #' #' `use_tidy_github_labels()` calls `use_github_labels()` with tidyverse #' conventions powered by `tidy_labels()`, `tidy_labels_rename()`, #' `tidy_label_colours()` and `tidy_label_descriptions()`. #' #' @section Label usage: #' Labels are used as part of the issue-triage process, designed to minimise the #' time spent re-reading issues. The absence of a label indicates that an issue #' is new, and has yet to be triaged. #' * `reprex` indicates that an issue does not have a minimal reproducible #' example, and that a reply has been sent requesting one from the user. #' * `bug` indicates an unexpected problem or unintended behavior. #' * `feature` indicates a feature request or enhancement. #' * `docs` indicates an issue with the documentation. #' * `wip` indicates that someone is working on it or has promised to. #' * `good first issue` indicates a good issue for first-time contributors. #' * `help wanted` indicates that a maintainer wants help on an issue. #' #' @param repo_spec,host,auth_token `r lifecycle::badge("deprecated")`: These #' arguments are now deprecated and will be removed in the future. Any input #' provided via these arguments is not used. The target repo, host, and auth #' token are all now determined from the current project's Git remotes. #' @param labels A character vector giving labels to add. #' @param rename A named vector with names giving old names and values giving #' new names. #' @param colours,descriptions Named character vectors giving hexadecimal #' colours (like `e02a2a`) and longer descriptions. The names should match #' label names, and anything unmatched will be left unchanged. If you create a #' new label, and don't supply colours, it will be given a random colour. #' @param delete_default If `TRUE`, removes GitHub default labels that do not #' appear in the `labels` vector and that do not have associated issues. #' #' @export #' @examples #' \dontrun{ #' # typical use in, e.g., a new tidyverse project #' use_github_labels(delete_default = TRUE) #' #' # create labels without changing colours/descriptions #' use_github_labels( #' labels = c("foofy", "foofier", "foofiest"), #' colours = NULL, #' descriptions = NULL #' ) #' #' # change descriptions without changing names/colours #' use_github_labels( #' labels = NULL, #' colours = NULL, #' descriptions = c("foofiest" = "the foofiest issue you ever saw") #' ) #' } use_github_labels <- function(repo_spec = deprecated(), labels = character(), rename = character(), colours = character(), descriptions = character(), delete_default = FALSE, host = deprecated(), auth_token = deprecated()) { if (lifecycle::is_present(repo_spec)) { deprecate_warn_repo_spec("use_github_labels") } if (lifecycle::is_present(host)) { deprecate_warn_host("use_github_labels") } if (lifecycle::is_present(auth_token)) { deprecate_warn_auth_token("use_github_labels") } tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_stop(" You don't seem to have push access for {ui_value(tr$repo_spec)}, which \\ is required to modify labels.") } gh <- gh_tr(tr) cur_labels <- gh("GET /repos/{owner}/{repo}/labels") label_attr <- function(x, l, mapper = map_chr) { mapper(l, x, .default = NA) } # Rename existing labels cur_label_names <- label_attr("name", cur_labels) to_rename <- intersect(cur_label_names, names(rename)) if (length(to_rename) > 0) { delta <- purrr::map2_chr( to_rename, rename[to_rename], ~ paste0(ui_value(.x), " -> ", ui_value(.y)) ) ui_done("Renaming labels: {paste0(delta, collapse = '\n')}") # Can't do this at label level, i.e. "old_label_name --> new_label_name" # Fails if "new_label_name" already exists # https://github.com/r-lib/usethis/issues/551 # Must first PATCH issues, then sort out labels issues <- map( to_rename, ~ gh("GET /repos/{owner}/{repo}/issues", labels = .x) ) issues <- purrr::flatten(issues) number <- map_int(issues, "number") old_labels <- map(issues, "labels") df <- data.frame( number = rep.int(number, lengths(old_labels)) ) df$labels <- purrr::flatten(old_labels) df$labels <- map_chr(df$labels, "name") # enact relabelling m <- match(df$labels, names(rename)) df$labels[!is.na(m)] <- rename[m[!is.na(m)]] df <- df[!duplicated(df), ] new_labels <- split(df$labels, df$number) purrr::iwalk( new_labels, ~ gh( "PATCH /repos/{owner}/{repo}/issues/{issue_number}", issue_number = .y, labels = I(.x) ) ) # issues have correct labels now; safe to edit labels themselves purrr::walk( to_rename, ~ gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = .x) ) labels <- union(labels, setdiff(rename, cur_label_names)) } else { ui_info("No labels need renaming") } cur_labels <- gh("GET /repos/{owner}/{repo}/labels") cur_label_names <- label_attr("name", cur_labels) # Add missing labels if (all(labels %in% cur_label_names)) { ui_info("No new labels needed") } else { to_add <- setdiff(labels, cur_label_names) ui_done("Adding missing labels: {ui_value(to_add)}") for (label in to_add) { gh( "POST /repos/{owner}/{repo}/labels", name = label, color = purrr::pluck(colours, label, .default = random_colour()), description = purrr::pluck(descriptions, label, .default = "") ) } } cur_labels <- gh("GET /repos/{owner}/{repo}/labels") cur_label_names <- label_attr("name", cur_labels) # Update colours cur_label_colours <- set_names( label_attr("color", cur_labels), cur_label_names ) if (identical(cur_label_colours[names(colours)], colours)) { ui_info("Label colours are up-to-date") } else { to_update <- intersect(cur_label_names, names(colours)) ui_done("Updating colours: {ui_value(to_update)}") for (label in to_update) { gh( "PATCH /repos/{owner}/{repo}/labels/{name}", name = label, color = colours[[label]] ) } } # Update descriptions cur_label_descriptions <- set_names( label_attr("description", cur_labels), cur_label_names ) if (identical(cur_label_descriptions[names(descriptions)], descriptions)) { ui_info("Label descriptions are up-to-date") } else { to_update <- intersect(cur_label_names, names(descriptions)) ui_done("Updating descriptions: {ui_value(to_update)}") for (label in to_update) { gh( "PATCH /repos/{owner}/{repo}/labels/{name}", name = label, description = descriptions[[label]] ) } } # Delete unused default labels if (delete_default) { default <- map_lgl(cur_labels, "default") to_remove <- setdiff(cur_label_names[default], labels) if (length(to_remove) > 0) { ui_done("Removing default labels: {ui_value(to_remove)}") for (label in to_remove) { issues <- gh("GET /repos/{owner}/{repo}/issues", labels = label) if (length(issues) > 0) { ui_todo("Delete {ui_value(label)} label manually; it has associated issues") } else { gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = label) } } } } } #' @export #' @rdname use_github_labels use_tidy_github_labels <- function() { use_github_labels( labels = tidy_labels(), rename = tidy_labels_rename(), colours = tidy_label_colours(), descriptions = tidy_label_descriptions(), delete_default = TRUE ) } #' @rdname use_github_labels #' @export tidy_labels <- function() { names(tidy_label_colours()) } #' @rdname use_github_labels #' @export tidy_labels_rename <- function() { c( # before = after "enhancement" = "feature", "question" = "reprex", "good first issue" = "good first issue :heart:", "help wanted" = "help wanted :heart:", "docs" = "documentation" ) } #' @rdname use_github_labels #' @export tidy_label_colours <- function() { # http://tristen.ca/hcl-picker/#/hlc/5/0.26/E0B3A2/E1B996 c( "breaking change :skull_and_crossbones:" = "E0B3A2", "bug" = "E0B3A2", "documentation" = "CBBAB8", "feature" = "B4C3AE", "upkeep" = "C2ACC0", "good first issue :heart:" = "CBBAB8", "help wanted :heart:" = "C5C295", "reprex" = "C5C295", "tidy-dev-day :nerd_face:" = "CBBAB8" ) } #' @rdname use_github_labels #' @export tidy_label_descriptions <- function() { c( "bug" = "an unexpected problem or unintended behavior", "feature" = "a feature request or enhancement", "upkeep" = "maintenance, infrastructure, and similar", "reprex" = "needs a minimal reproducible example", "wip" = "work in progress", "documentation" = "", "good first issue :heart:" = "good issue for first-time contributors", "help wanted :heart:" = "we'd love your help!", "breaking change :skull_and_crossbones:" = "API change likely to affect existing code", "tidy-dev-day :nerd_face:" = "Tidyverse Developer Day rstd.io/tidy-dev-day" ) } random_colour <- function() { format(as.hexmode(sample(256 * 256 * 256 - 1, 1)), width = 6) } usethis/R/citation.R0000644000175000017500000000047013676400413014233 0ustar nileshnilesh#' Create a CITATION template #' #' Use this if you want to encourage users of your package to cite an #' article or book. #' #' @export use_citation <- function() { use_directory("inst") use_template( "citation-template.R", path("inst", "CITATION"), data = package_data(), open = TRUE ) } usethis/R/usethis-package.R0000644000175000017500000000040214132400710015455 0ustar nileshnilesh#' @keywords internal #' @import fs #' @import rlang "_PACKAGE" ## usethis namespace: start #' @importFrom glue glue glue_collapse glue_data #' @importFrom lifecycle deprecated #' @importFrom purrr map map_chr map_lgl map_int ## usethis namespace: end NULL usethis/R/package.R0000644000175000017500000001223614153711115014012 0ustar nileshnilesh#' Depend on another package #' #' `use_package()` adds a CRAN package dependency to `DESCRIPTION` and offers a #' little advice about how to best use it. `use_dev_package()` adds a #' dependency on an in-development package, adding the dev repo to `Remotes` so #' it will be automatically installed from the correct location. #' #' @param package Name of package to depend on. #' @param type Type of dependency: must be one of "Imports", "Depends", #' "Suggests", "Enhances", or "LinkingTo" (or unique abbreviation). Matching #' is case insensitive. #' @param min_version Optionally, supply a minimum version for the package. #' Set to `TRUE` to use the currently installed version. #' @param remote By default, an `OWNER/REPO` GitHub remote is inserted. #' Optionally, you can supply a character string to specify the remote, e.g. #' `"gitlab::jimhester/covr"`, using any syntax supported by the [remotes #' package]( #' https://remotes.r-lib.org/articles/dependencies.html#other-sources). #' #' @seealso The [dependencies #' section](https://r-pkgs.org/description.html#dependencies) of [R #' Packages](https://r-pkgs.org). #' #' @export #' @examples #' \dontrun{ #' use_package("ggplot2") #' use_package("dplyr", "suggests") #' use_dev_package("glue") #' } use_package <- function(package, type = "Imports", min_version = NULL) { if (type == "Imports") { refuse_package(package, verboten = c("tidyverse", "tidymodels")) } changed <- use_dependency(package, type, min_version = min_version) if (changed) { how_to_use(package, type) } invisible() } #' @export #' @rdname use_package use_dev_package <- function(package, type = "Imports", remote = NULL) { refuse_package(package, verboten = c("tidyverse", "tidymodels")) changed <- use_dependency(package, type = type, min_version = TRUE) use_remote(package, remote) if (changed) { how_to_use(package, type) } invisible() } use_remote <- function(package, package_remote = NULL) { remotes <- desc::desc_get_remotes(proj_get()) if (any(grepl(package, remotes))) { return(invisible()) } if (is.null(package_remote)) { desc <- desc::desc(package = package) package_remote <- package_remote(desc) } ui_done(" Adding {ui_value(package_remote)} to {ui_field('Remotes')} field in \\ DESCRIPTION") remotes <- c(remotes, package_remote) desc::desc_set_remotes(remotes, file = proj_get()) invisible() } # Helpers ----------------------------------------------------------------- package_remote <- function(desc) { remote <- as.list(desc$get(c("RemoteType", "RemoteUsername", "RemoteRepo"))) is_recognized_remote <- all(map_lgl(remote, ~ is_string(.x) && !is.na(.x))) if (is_recognized_remote) { # non-GitHub remotes get a 'RemoteType::' prefix if (!identical(remote$RemoteType, "github")) { remote$RemoteUsername <- paste0(remote$RemoteType, "::", remote$RemoteUsername) } return(paste0(remote$RemoteUsername, "/", remote$RemoteRepo)) } package <- desc$get_field("Package") urls <- desc_urls(package, desc = desc) urls <- urls[urls$is_github, ] if (nrow(urls) < 1) { ui_stop("Cannot determine remote for {ui_value(package)}") } parsed <- parse_github_remotes(urls$url[[1]]) remote <- paste0(parsed$repo_owner, "/", parsed$repo_name) if (ui_yeah(" {ui_value(package)} was either installed from CRAN or local source. Based on DESCRIPTION, we propose the remote: {ui_value(remote)} Is this OK?")) { remote } else { ui_stop("Cannot determine remote for {ui_value(package)}") } } refuse_package <- function(package, verboten) { if (package %in% verboten) { code <- glue("use_package(\"{package}\", type = \"depends\")") ui_stop( "{ui_value(package)} is a meta-package and it is rarely a good idea to \\ depend on it. Please determine the specific underlying package(s) that \\ offer the function(s) you need and depend on that instead. \\ For data analysis projects that use a package structure but do not implement \\ a formal R package, adding {ui_value(package)} to Depends is a \\ reasonable compromise. Call {ui_code(code)} to achieve this. " ) } invisible(package) } how_to_use <- function(package, type) { types <- tolower(c("Imports", "Depends", "Suggests", "Enhances", "LinkingTo")) type <- match.arg(tolower(type), types) switch(type, imports = ui_todo("Refer to functions with {ui_code(paste0(package, '::fun()'))}"), depends = ui_todo( "Are you sure you want {ui_field('Depends')}? \\ {ui_field('Imports')} is almost always the better choice." ), suggests = { code <- glue("requireNamespace(\"{package}\", quietly = TRUE)") ui_todo("Use {ui_code(code)} to test if package is installed") code <- glue("{package}::fun()") ui_todo("Then directly refer to functions with {ui_code(code)}") }, enhances = "", linkingto = show_includes(package) ) } show_includes <- function(package) { incl <- path_package("include", package = package) h <- dir_ls(incl, regexp = "[.](h|hpp)$") if (length(h) == 0) { return() } ui_todo("Possible includes are:") ui_code_block("#include <{path_file(h)}>", copy = FALSE) } usethis/R/make.R0000644000175000017500000000052614132400710013324 0ustar nileshnilesh#' Create Makefile #' #' `use_make()` adds a basic Makefile to the project root directory. #' #' @seealso The [documentation for GNU #' Make](https://www.gnu.org/software/make/manual/html_node/). #' @export use_make <- function() { use_template( "Makefile", data = list(name = project_name()) ) use_build_ignore("Makefile") } usethis/R/issue.R0000644000175000017500000001332214131645451013551 0ustar nileshnilesh#' Helpers for GitHub issues #' #' @description #' The `issue_*` family of functions allows you to perform common operations on #' GitHub issues from within R. They're designed to help you efficiently deal #' with large numbers of issues, particularly motivated by the challenges faced #' by the tidyverse team. #' #' * `issue_close_community()` closes an issue, because it's not a bug report or #' feature request, and points the author towards RStudio Community as a #' better place to discuss usage (). #' #' * `issue_reprex_needed()` labels the issue with the "reprex" label and #' gives the author some advice about what is needed. #' #' @section Saved replies: #' #' Unlike GitHub's "saved replies", these functions can: #' * Be shared between people #' * Perform other actions, like labelling, or closing #' * Have additional arguments #' * Include randomness (like friendly gifs) #' #' @param number Issue number #' @param reprex Does the issue also need a reprex? #' #' @examples #' \dontrun{ #' issue_close_community(12, reprex = TRUE) #' #' issue_reprex_needed(241) #' } #' @name issue-this NULL #' @export #' @rdname issue-this issue_close_community <- function(number, reprex = FALSE) { tr <- target_repo(github_get = TRUE) if (!tr$can_push) { # https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level # I have not found a way to detect triage permission via API. # It seems you just have to try? ui_line(" You don't seem to have push access for {ui_value(tr$repo_spec)}. Unless you have triage permissions, you won't be allowed to close an \\ issue.") if (ui_nope("Do you want to try anyway?")) { ui_stop("Aborting.") } } info <- issue_info(number, tr) issue <- issue_details(info) ui_done(" Closing issue {ui_value(issue$shorthand)} \\ ({ui_field(issue$author)}): {ui_value(issue$title)}") if (info$state == "closed") { ui_stop("Issue {number} is already closed") } reprex_insert <- glue(" But before you ask there, I'd suggest that you create a \\ [reprex](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.htm), \\ because that greatly increases your chances getting help.") message <- glue( "Hi {issue$author},\n", "\n", "This issue doesn't appear to be a bug report or a specific feature ", "request, so it's more suitable for ", "[RStudio Community](https://community.rstudio.com). ", if (reprex) reprex_insert else "", "\n\n", "Thanks!" ) issue_comment_add(number, message = message, tr = tr) issue_edit(number, state = "closed", tr = tr) } #' @export #' @rdname issue-this issue_reprex_needed <- function(number) { tr <- target_repo(github_get = TRUE) if (!tr$can_push) { # https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level # I can't find anyway to detect triage permission via API. # It seems you just have to try? ui_line(" You don't seem to have push access for {ui_value(tr$repo_spec)}. Unless you have triage permissions, you won't be allowed to label an \\ issue.") if (ui_nope("Do you want to try anyway?")) { ui_stop("Aborting.") } } info <- issue_info(number, tr) labels <- map_chr(info$labels, "name") issue <- issue_details(info) if ("reprex" %in% labels) { ui_stop("Issue {number} already has 'reprex' label") } ui_done(" Labelling and commenting on issue {ui_value(issue$shorthand)} \\ ({ui_field(issue$author)}): {ui_value(issue$title)}") message <- glue(" Can you please provide a minimal reproducible example using the \\ [reprex](http://reprex.tidyverse.org) package? The goal of a reprex is to make it as easy as possible for me to \\ recreate your problem so that I can fix it. If you've never made a minimal reprex before, there is lots of good advice \\ [here](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.html).") issue_comment_add(number, message = message, tr = tr) issue_edit(number, labels = as.list(union(labels, "reprex")), tr = tr) } # low-level operations ---------------------------------------------------- issue_comment_add <- function(number, message, tr = NULL) { issue_gh( "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", number = number, body = message, tr = tr ) } issue_edit <- function(number, ..., tr = NULL) { issue_gh( "PATCH /repos/{owner}/{repo}/issues/{issue_number}", ..., number = number, tr = tr ) } issue_info <- function(number, tr = NULL) { issue_gh( "GET /repos/{owner}/{repo}/issues/{issue_number}", number = number, tr = tr ) } # Helpers ----------------------------------------------------------------- # Assumptions: # * Issue number is called `issue_number`; make sure to tweak `endpoint` if # necessary. # * The user-facing caller should pass information about the target repo, # because that is required to vet the GitHub remote config anyway. # The fallback to target_repo() is purely for development convenience. issue_gh <- function(endpoint, ..., number, tr = NULL) { tr <- tr %||% target_repo(github_get = NA) gh <- gh_tr(tr) out <- gh(endpoint, ..., issue_number = number) if (substr(endpoint, 1, 4) == "GET ") { out } else { invisible(out) } } issue_details <- function(info) { repo_dat <- parse_github_remotes(info$html_url) list( shorthand = glue( "{repo_dat$repo_owner}/{repo_dat$repo_name}/#{info$number}" ), author = glue("@{info$user$login}"), title = info$title ) } usethis/R/utils-ui.R0000644000175000017500000000055314131645451014176 0ustar nileshnilesh# opening act of an eventual transition away from the ui_*() functions and towards # the cli-mediated UI we're using in other packages usethis_abort <- function(message, ..., class = NULL, .envir = parent.frame()) { #cli::cli_div(theme = usethis_theme()) cli::cli_abort( message, class = c(class, "usethis_error"), .envir = .envir, ... ) } usethis/R/namespace.R0000644000175000017500000000137614117743363014370 0ustar nileshnilesh#' Use a basic `NAMESPACE` #' #' If `roxygen` is `TRUE` generates an empty `NAMESPACE` that exports nothing; #' you'll need to explicitly export functions with `@export`. If `roxygen` #' is `FALSE`, generates a default `NAMESPACE` that exports all functions #' except those that start with `.`. #' #' @param roxygen Do you plan to manage `NAMESPACE` with roxygen2? #' @seealso The [namespace chapter](https://r-pkgs.org/namespace.html) of #' [R Packages](https://r-pkgs.org). #' @export use_namespace <- function(roxygen = TRUE) { check_is_package("use_namespace()") path <- proj_path("NAMESPACE") if (roxygen) { write_over(path, c("# Generated by roxygen2: do not edit by hand", "")) } else { write_over(path, 'exportPattern("^[^\\\\.]")') } } usethis/R/git.R0000644000175000017500000003464114131645451013213 0ustar nileshnilesh#' Initialise a git repository #' #' `use_git()` initialises a Git repository and adds important files to #' `.gitignore`. If user consents, it also makes an initial commit. #' #' @param message Message to use for first commit. #' @family git helpers #' @export #' @examples #' \dontrun{ #' use_git() #' } use_git <- function(message = "Initial commit") { needs_init <- !uses_git() if (needs_init) { ui_done("Initialising Git repo") git_init() } use_git_ignore(git_ignore_lines) if (git_uncommitted(untracked = TRUE)) { git_ask_commit(message, untracked = TRUE) } if (needs_init) { restart_rstudio("A restart of RStudio is required to activate the Git pane") } invisible(TRUE) } #' Add a git hook #' #' Sets up a git hook using specified script. Creates hook directory if #' needed, and sets correct permissions on hook. #' #' @param hook Hook name. One of "pre-commit", "prepare-commit-msg", #' "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch", #' "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout", #' "post-merge", "pre-push", "pre-auto-gc". #' @param script Text of script to run #' @family git helpers #' @export use_git_hook <- function(hook, script) { check_uses_git() hook_path <- proj_path(".git", "hooks", hook) create_directory(path_dir(hook_path)) write_over(hook_path, script) file_chmod(hook_path, "0744") invisible() } #' Tell Git to ignore files #' #' @param ignores Character vector of ignores, specified as file globs. #' @param directory Directory relative to active project to set ignores #' @family git helpers #' @export use_git_ignore <- function(ignores, directory = ".") { write_union(proj_path(directory, ".gitignore"), ignores) rstudio_git_tickle() } #' Configure Git #' #' Sets Git options, for either the user or the project ("global" or "local", in #' Git terminology). Wraps [gert::git_config_set()] and #' [gert::git_config_global_set()]. To inspect Git config, see #' [gert::git_config()]. #' #' @param ... Name-value pairs, processed as #' <[`dynamic-dots`][rlang::dyn-dots]>. #' #' @return Invisibly, the previous values of the modified components, as a named #' list. #' @inheritParams edit #' #' @family git helpers #' @export #' @examples #' \dontrun{ #' # set the user's global user.name and user.email #' use_git_config(user.name = "Jane", user.email = "jane@example.org") #' #' # set the user.name and user.email locally, i.e. for current repo/project #' use_git_config( #' scope = "project", #' user.name = "Jane", #' user.email = "jane@example.org" #' ) #' } use_git_config <- function(scope = c("user", "project"), ...) { scope <- match.arg(scope) dots <- list2(...) stopifnot(is_dictionaryish(dots)) orig <- stats::setNames( vector(mode = "list", length = length(dots)), names(dots) ) for (i in seq_along(dots)) { nm <- names(dots)[[i]] vl <- dots[[i]] if (scope == "user") { orig[nm] <- git_cfg_get(nm, "global") %||% list(NULL) gert::git_config_global_set(nm, vl) } else { check_uses_git() orig[nm] <- git_cfg_get(nm, "local") %||% list(NULL) gert::git_config_set(nm, vl, repo = git_repo()) } } invisible(orig) } #' See or set the default Git protocol #' #' @description #' Git operations that address a remote use a so-called "transport protocol". #' usethis supports HTTPS and SSH. The protocol dictates the Git URL format used #' when usethis needs to configure the first GitHub remote for a repo: #' * `protocol = "https"` implies `https://github.com//.git` #' * `protocol = "ssh"` implies `git@@github.com:/.git` #' #' Two helper functions are available: #' * `git_protocol()` reveals the protocol "in force". As of usethis v2.0.0, #' this defaults to "https". You can change this for the duration of the #' R session with `use_git_protocol()`. Change the default for all R #' sessions with code like this in your `.Rprofile` (easily editable via #' [edit_r_profile()]): #' ``` #' options(usethis.protocol = "ssh") #' ``` #' * `use_git_protocol()` sets the Git protocol for the current R session #' #' This protocol only affects the Git URL for newly configured remotes. All #' existing Git remote URLs are always respected, whether HTTPS or SSH. #' #' @param protocol One of "https" or "ssh" #' #' @return The protocol, either "https" or "ssh" #' @export #' #' @examples #' \dontrun{ #' git_protocol() #' #' use_git_protocol("ssh") #' git_protocol() #' #' use_git_protocol("https") #' git_protocol() #' } git_protocol <- function() { protocol <- tolower(getOption("usethis.protocol", "unset")) if (identical(protocol, "unset")) { ui_info("Defaulting to {ui_value('https')} Git protocol") protocol <- "https" } else { check_protocol(protocol) } options("usethis.protocol" = protocol) getOption("usethis.protocol") } #' @rdname git_protocol #' @export use_git_protocol <- function(protocol) { options("usethis.protocol" = protocol) invisible(git_protocol()) } check_protocol <- function(protocol) { if (!is_string(protocol) || !(tolower(protocol) %in% c("https", "ssh"))) { options(usethis.protocol = NULL) ui_stop(" {ui_code('protocol')} must be either {ui_value('https')} or \\ {ui_value('ssh')}") } invisible() } #' Configure and report Git remotes #' #' Two helpers are available: #' * `use_git_remote()` sets the remote associated with `name` to `url`. #' * `git_remotes()` reports the configured remotes, similar to #' `git remote -v`. #' #' @param name A string giving the short name of a remote. #' @param url A string giving the url of a remote. #' @param overwrite Logical. Controls whether an existing remote can be #' modified. #' #' @return Named list of Git remotes. #' @export #' #' @examples #' \dontrun{ #' # see current remotes #' git_remotes() #' #' # add new remote named 'foo', a la `git remote add ` #' use_git_remote(name = "foo", url = "https://github.com//.git") #' #' # remove existing 'foo' remote, a la `git remote remove ` #' use_git_remote(name = "foo", url = NULL, overwrite = TRUE) #' #' # change URL of remote 'foo', a la `git remote set-url ` #' use_git_remote( #' name = "foo", #' url = "https://github.com//.git", #' overwrite = TRUE #' ) #' #' # Scenario: Fix remotes when you cloned someone's repo, but you should #' # have fork-and-cloned (in order to make a pull request). #' #' # Store origin = main repo's URL, e.g., "git@github.com:/.git" #' upstream_url <- git_remotes()[["origin"]] #' #' # IN THE BROWSER: fork the main GitHub repo and get your fork's remote URL #' my_url <- "git@github.com:/.git" #' #' # Rotate the remotes #' use_git_remote(name = "origin", url = my_url) #' use_git_remote(name = "upstream", url = upstream_url) #' git_remotes() #' #' # Scenario: Add upstream remote to a repo that you fork-and-cloned, so you #' # can pull upstream changes. #' # Note: If you fork-and-clone via `usethis::create_from_github()`, this is #' # done automatically! #' #' # Get URL of main GitHub repo, probably in the browser #' upstream_url <- "git@github.com:/.git" #' use_git_remote(name = "upstream", url = upstream_url) #' } use_git_remote <- function(name = "origin", url, overwrite = FALSE) { stopifnot(is_string(name)) stopifnot(is.null(url) || is_string(url)) stopifnot(is_true(overwrite) || is_false(overwrite)) remotes <- git_remotes() repo <- git_repo() if (name %in% names(remotes) && !overwrite) { ui_stop(" Remote {ui_value(name)} already exists. Use \\ {ui_code('overwrite = TRUE')} to edit it anyway.") } if (name %in% names(remotes)) { if (is.null(url)) { gert::git_remote_remove(remote = name, repo = repo) } else { gert::git_remote_set_url(url = url, remote = name, repo = repo) } } else if (!is.null(url)) { gert::git_remote_add(url = url, name = name, repo = repo) } invisible(git_remotes()) } #' @rdname use_git_remote #' @export git_remotes <- function() { x <- gert::git_remote_list(repo = git_repo()) if (nrow(x) == 0) { return(NULL) } stats::setNames(as.list(x$url), x$name) } # unexported function to improve my personal quality of life git_clean <- function() { if (!is_interactive() || !uses_git()) { return(invisible()) } st <- gert::git_status(staged = FALSE, repo = git_repo()) paths <- st[st$status == "new", ][["file"]] n <- length(paths) if (n == 0) { ui_info("Found no untracked files") return(invisible()) } paths <- sort(paths) ui_paths <- map_chr(paths, ui_path) if (n > 10) { ui_paths <- c(ui_paths[1:10], "...") } if (n == 1) { file_hint <- "There is 1 untracked file:" } else { file_hint <- "There are {n} untracked files:" } ui_line(c( file_hint, paste0("* ", ui_paths) )) if (ui_yeah(" Do you want to remove {if (n == 1) 'it' else 'them'}?", yes = "yes", no = "no", shuffle = FALSE)) { file_delete(paths) ui_done("{n} file(s) deleted") } rstudio_git_tickle() invisible() } #' Git/GitHub sitrep #' #' Get a situation report on your current Git/GitHub status. Useful for #' diagnosing problems. [git_vaccinate()] adds some basic R- and RStudio-related #' entries to the user-level git ignore file. #' @export #' @examples #' \dontrun{ #' git_sitrep() #' } git_sitrep <- function() { ui_silence(try(proj_get(), silent = TRUE)) # git (global / user) -------------------------------------------------------- hd_line("Git config (global)") kv_line("Name", git_cfg_get("user.name", "global")) kv_line("Email", git_cfg_get("user.email", "global")) kv_line("Global (user-level) gitignore file", git_ignore_path("user")) vaccinated <- git_vaccinated() kv_line("Vaccinated", vaccinated) if (!vaccinated) { ui_info("See {ui_code('?git_vaccinate')} to learn more") } kv_line("Default Git protocol", git_protocol()) init_default_branch <- git_cfg_get("init.defaultBranch", where = "global") kv_line("Default initial branch name", init_default_branch) # github (global / user) ----------------------------------------------------- hd_line("GitHub") default_gh_host <- get_hosturl(default_api_url()) kv_line("Default GitHub host", default_gh_host) pat_sitrep(default_gh_host) # git and github for active project ------------------------------------------ hd_line("Git repo for current project") if (!proj_active()) { ui_info("No active usethis project") return(invisible()) } kv_line("Active usethis project", proj_get()) if (!uses_git()) { ui_info("Active project is not a Git repo") return(invisible()) } # local git config ----------------------------------------------------------- if (proj_active() && uses_git()) { local_user <- list( user.name = git_cfg_get("user.name", "local"), user.email = git_cfg_get("user.email", "local") ) if (!is.null(local_user$user.name) || !is.null(local_user$user.name)) { ui_info("This repo has a locally configured user") kv_line("Name", local_user$user.name) kv_line("Email", local_user$user.email) } } # default branch ------------------------------------------------------------- default_branch_sitrep() # current branch ------------------------------------------------------------- branch <- tryCatch(git_branch(), error = function(e) NULL) tracking_branch <- if (is.null(branch)) NA_character_ else git_branch_tracking() # TODO: can't really express with kv_line() helper branch <- if (is.null(branch)) "" else branch tracking_branch <- if (is.na(tracking_branch)) "" else tracking_branch # vertical alignment would make this nicer, but probably not worth it ui_bullet(glue(" Current local branch -> remote tracking branch: {ui_value(branch)} -> {ui_value(tracking_branch)}")) # GitHub remote config ------------------------------------------------------- cfg <- github_remote_config() repo_host <- cfg$host_url if (!is.na(repo_host) && repo_host != default_gh_host) { kv_line("Non-default GitHub host", repo_host) pat_sitrep(repo_host, scold_for_renviron = FALSE) } hd_line("GitHub remote configuration") purrr::walk(format(cfg), ui_bullet) invisible() } # TODO: when I really overhaul the UI, determine if I can just re-use the # git_default_branch() error messages in the sitrep # the main point is converting an error to an "oops" type of message default_branch_sitrep <- function() { tryCatch( kv_line("Default branch", git_default_branch()), error_default_branch = function(e) { if (has_name(e, "db_local")) { # FYI existence of db_local implies existence of db_source ui_oops(" Default branch mismatch between local repo and remote! {ui_value(e$db_source$name)} remote default branch: \\ {ui_value(e$db_source$default_branch)} Local default branch: {ui_value(e$db_local)} Call {ui_code('git_default_branch_rediscover()')} to resolve this.") } else if (has_name(e, "db_source")) { ui_oops(" Default branch mismatch between local repo and remote! {ui_value(e$db_source$name)} remote default branch: \\ {ui_value(e$db_source$default_branch)} Local repo has no branch by that name nor any other obvious candidates. Call {ui_code('git_default_branch_rediscover()')} to resolve this.") } else { ui_oops("Default branch cannot be determined.") } } ) } # Vaccination ------------------------------------------------------------- #' Vaccinate your global gitignore file #' #' Adds `.DS_Store`, `.Rproj.user`, `.Rdata`, `.Rhistory`, and `.httr-oauth` to #' your global (a.k.a. user-level) `.gitignore`. This is good practice as it #' decreases the chance that you will accidentally leak credentials to GitHub. #' `git_vaccinate()` also tries to detect and fix the situation where you have a #' global gitignore file, but it's missing from your global Git config. #' #' @export git_vaccinate <- function() { ensure_core_excludesFile() path <- git_ignore_path(scope = "user") if (!file_exists(path)) { ui_done("Creating the global (user-level) gitignore: {ui_path(path)}") } write_union(path, git_ignore_lines) } git_vaccinated <- function() { path <- git_ignore_path("user") if (is.null(path) || !file_exists(path)) { return(FALSE) } lines <- read_utf8(path) all(git_ignore_lines %in% lines) } git_ignore_lines <- c( ".Rproj.user", ".Rhistory", ".Rdata", ".httr-oauth", ".DS_Store" ) usethis/R/pr.R0000644000175000017500000010475414131645451013054 0ustar nileshnilesh#' Helpers for GitHub pull requests #' #' @description #' The `pr_*` family of functions is designed to make working with GitHub pull #' requests (PRs) as painless as possible for both contributors and package #' maintainers. #' #' To use the `pr_*` functions, your project must be a Git repo and have one of #' these GitHub remote configurations: #' * "ours": You can push to the GitHub remote configured as `origin` and it's #' not a fork. #' * "fork": You can push to the GitHub remote configured as `origin`, it's a #' fork, and its parent is configured as `upstream`. `origin` points to your #' **personal** copy and `upstream` points to the **source repo**. #' #' "Ours" and "fork" are two of several GitHub remote configurations examined in #' [Common remote setups](https://happygitwithr.com/common-remote-setups.html) #' in Happy Git and GitHub for the useR. #' #' The [Pull Request #' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) #' article walks through the process of making a pull request with the `pr_*` #' functions. #' #' The `pr_*` functions also use your Git/GitHub credentials to carry out #' various remote operations; see below for more about auth. The `pr_*` #' functions also proactively check for agreement re: the default branch in your #' local repo and the source repo. See [git_default_branch()] for more. #' #' @template double-auth #' #' @section For contributors: #' To contribute to a package, first use `create_from_github("OWNER/REPO")`. #' This forks the source repository and checks out a local copy. #' #' Next use `pr_init()` to create a branch for your PR. It is best practice to #' never make commits to the default branch branch of a fork (usually named #' `main` or `master`), because you do not own it. A pull request should always #' come from a feature branch. It will be much easier to pull upstream changes #' from the fork parent if you only allow yourself to work in feature branches. #' It is also much easier for a maintainer to explore and extend your PR if you #' create a feature branch. #' #' Work locally, in your branch, making changes to files, and committing your #' work. Once you're ready to create the PR, run `pr_push()` to push your local #' branch to GitHub, and open a webpage that lets you initiate the PR (or draft #' PR). #' #' To learn more about the process of making a pull request, read the [Pull #' Request #' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html) #' vignette. #' #' If you are lucky, your PR will be perfect, and the maintainer will accept it. #' You can then run `pr_finish()` to delete your PR branch. In most cases, #' however, the maintainer will ask you to make some changes. Make the changes, #' then run `pr_push()` to update your PR. #' #' It's also possible that the maintainer will contribute some code to your PR: #' to get those changes back onto your computer, run `pr_pull()`. It can also #' happen that other changes have occurred in the package since you first #' created your PR. You might need to merge the default branch (usually named #' `main` or `master`) into your PR branch. Do that by running #' `pr_merge_main()`: this makes sure that your PR is compatible with the #' primary repo's main line of development. Both `pr_pull()` and #' `pr_merge_main()` can result in merge conflicts, so be prepared to resolve #' before continuing. #' #' @section For maintainers: #' To download a PR locally so that you can experiment with it, run #' `pr_fetch()` and select the PR or, if you already know its number, call #' `pr_fetch()`. If you make changes, run `pr_push()` to push them #' back to GitHub. After you have merged the PR, run `pr_finish()` to delete the #' local branch and remove the remote associated with the contributor's fork. #' #' @section Overview of all the functions: #' * `pr_init()`: Does a preparatory pull of the default branch from the source #' repo, to get a good start point. Creates and checks out a new branch. Nothing #' is pushed to or created on GitHub (that does not happen until the first time #' you call `pr_push()`). #' * `pr_resume()`: Resume work on a PR by switching to an existing local branch #' and pulling any changes from its upstream tracking branch, if it has one. If #' called with no arguments, up to 9 local branches are offered for interactive #' selection, with a preference for branches connected to PRs and for branches #' with recent activity. #' * `pr_fetch()`: Checks out a PR on the source repo for local exploration. If #' called with no arguments, up to 9 open PRs are offered for interactive #' selection. This can cause a new remote to be configured and a new local #' branch to be created. The local branch is configured to track its remote #' counterpart. The transport protocol (HTTPS vs SSH) for any new remote is #' inherited from the remote representing the source repo. `pr_fetch()` puts a #' maintainer in a position where they can push changes into an internal or #' external PR via `pr_push()`. #' * `pr_push()`: The first time it's called, a PR branch is pushed to GitHub #' and you're taken to a webpage where a new PR (or draft PR) can be created. #' This also sets up the local branch to track its remote counterpart. #' Subsequent calls to `pr_push()` make sure the local branch has all the remote #' changes and, if so, pushes local changes, thereby updating the PR. #' * `pr_pull()`: Pulls changes from the local branch's remote tracking branch. #' If a maintainer has extended your PR, this is how you bring those changes #' back into your local work. #' * `pr_merge_main()`: Pulls changes from the default branch of the source repo #' into the current local branch. This can be used when the local branch is the #' default branch or when it's a PR branch. #' * `pr_pause()`: Makes sure you're up-to-date with any remote changes in the #' PR. Then switches back to the default branch and pulls from the source repo. #' * `pr_view()`: Visits the PR associated with the current branch in the #' browser (default) or the specific PR identified by `number`. #' (FYI [browse_github_pulls()] is a handy way to visit the list of all PRs for #' the current project.) #' * `pr_forget()`: Does local clean up when the current branch is an actual or #' notional PR that you want to abandon. Maybe you initiated it yourself, via #' `pr_init()`, or you used `pr_fetch()` to explore a PR from GitHub. Only does #' *local* operations: does not update or delete any remote branches, nor does #' it close any PRs. Alerts the user to any uncommitted or unpushed work that is #' at risk of being lost. If user chooses to proceed, switches back to the #' default branch, pulls changes from source repo, and deletes local PR branch. #' Any associated Git remote is deleted, if the "forgotten" PR was the only #' branch using it. #' * `pr_finish()`: Does post-PR clean up, but does NOT actually merge or close #' a PR (maintainer should do this in the browser). If `number` is not given, #' infers the PR from the upstream tracking branch of the current branch. If #' `number` is given, it does not matter whether the PR exists locally. If PR #' exists locally, alerts the user to uncommitted or unpushed changes, then #' switches back to the default branch, pulls changes from source repo, and #' deletes local PR branch. If the PR came from an external fork, any associated #' Git remote is deleted, provided it's not in use by any other local branches. #' If the PR has been merged and user has permission, deletes the remote branch #' (this is the only remote operation that `pr_finish()` potentially does). #' #' @name pull-requests NULL #' @export #' @rdname pull-requests #' @param branch Name of a new or existing local branch. If creating a new #' branch, note this should usually consist of lower case letters, numbers, #' and `-`. pr_init <- function(branch) { check_string(branch) repo <- git_repo() if (gert::git_branch_exists(branch, local = TRUE, repo = repo)) { code <- glue("pr_resume(\"{branch}\")") ui_info(" Branch {ui_value(branch)} already exists, calling {ui_code(code)}") return(pr_resume(branch)) } # don't absolutely require PAT success, because we could be offline # or in another salvageable situation, e.g. need to configure PAT cfg <- github_remote_config(github_get = NA) check_for_bad_config(cfg) tr <- target_repo(cfg, ask = FALSE) maybe_good_configs <- c("maybe_ours_or_theirs", "maybe_fork") if (cfg$type %in% maybe_good_configs) { ui_line(' Unable to confirm the GitHub remote configuration is "pull request ready". You probably need to configure a personal access token for \\ {ui_value(tr$host)}. See {ui_code("gh_token_help()")} for help. (Or maybe we\'re just offline?)') if (ui_github_remote_config_wat(cfg)) { ui_oops("Cancelling.") return(invisible()) } } default_branch <- git_default_branch() challenge_non_default_branch( "Are you sure you want to create a PR branch based on a non-default branch?", default_branch = default_branch ) online <- is_online(tr$host) if (online) { # this is not pr_pull_source_override() because: # a) we may NOT be on default branch (although we probably are) # b) we didn't just switch to the branch we're on, therefore we have to # consider that the pull may be affected by uncommitted changes or a # merge current_branch <- git_branch() if (current_branch == default_branch) { # override for mis-configured forks, that have default branch tracking # the fork (origin) instead of the source (upstream) remref <- glue("{tr$remote}/{default_branch}") } else { remref <- git_branch_tracking(current_branch) } if (!is.na(remref)) { comparison <- git_branch_compare(current_branch, remref) if (comparison$remote_only > 0) { challenge_uncommitted_changes(untracked = TRUE) } ui_done("Pulling changes from {ui_value(remref)}.") git_pull(remref = remref, verbose = FALSE) } } else { ui_info(" Unable to pull changes for current branch, since we are offline.") } ui_done("Creating and switching to local branch {ui_value(branch)}.") gert::git_branch_create(branch, repo = repo) config_key <- glue("branch.{branch}.created-by") gert::git_config_set(config_key, value = "usethis::pr_init", repo = repo) ui_todo("Use {ui_code('pr_push()')} to create a PR.") invisible() } #' @export #' @rdname pull-requests pr_resume <- function(branch = NULL) { repo <- git_repo() if (is.null(branch)) { ui_info(" No branch specified ... looking up local branches and associated PRs.") default_branch <- git_default_branch() branch <- choose_branch(exclude = default_branch) if (is.null(branch)) { ui_oops("Repo doesn't seem to have any non-default branches.") return(invisible()) } if (length(branch) == 0) { ui_oops("No branch selected, exiting.") return(invisible()) } } check_string(branch) if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) { code <- glue("pr_init(\"{branch}\")") ui_stop(" No branch named {ui_value(branch)} exists. Call {ui_code(code)} to create a new PR branch.") } challenge_uncommitted_changes(untracked = TRUE) ui_done("Switching to branch {ui_value(branch)}.") gert::git_branch_checkout(branch, repo = repo) git_pull() ui_todo("Use {ui_code('pr_push()')} to create or update PR.") invisible() } #' @export #' @rdname pull-requests #' @param number Number of PR. #' @param target Which repo to target? This is only a question in the case of a #' fork. In a fork, there is some slim chance that you want to consider pull #' requests against your fork (the primary repo, i.e. `origin`) instead of #' those against the source repo (i.e. `upstream`, which is the default). #' #' @examples #' \dontrun{ #' pr_fetch(123) #' } pr_fetch <- function(number = NULL, target = c("source", "primary")) { repo <- git_repo() tr <- target_repo(github_get = NA, role = target, ask = FALSE) challenge_uncommitted_changes() if (is.null(number)) { ui_info("No PR specified ... looking up open PRs.") pr <- choose_pr(tr = tr) if (is.null(pr)) { ui_oops("No open PRs found for {ui_value(tr$repo_spec)}.") return(invisible()) } if (min(lengths(pr)) == 0) { ui_oops("No PR selected, exiting.") return(invisible()) } } else { pr <- pr_get(number = number, tr = tr) } if (is.na(pr$pr_repo_owner)) { ui_stop(" The repo or branch where PR #{pr$pr_number} originates seems to have been \\ deleted.") } pr_user <- glue("@{pr$pr_user}") ui_done(" Checking out PR {ui_value(pr$pr_string)} ({ui_field(pr_user)}): \\ {ui_value(pr$pr_title)}.") if (pr$pr_from_fork && isFALSE(pr$maintainer_can_modify)) { ui_info(" Note that user does NOT allow maintainer to modify this PR at this \\ time, although this can be changed.") } remote <- github_remote_list(pr$pr_remote) if (nrow(remote) == 0) { url <- switch(tr$protocol, https = pr$pr_https_url, ssh = pr$pr_ssh_url) ui_done("Adding remote {ui_value(pr$pr_remote)} as {ui_value(url)}.") gert::git_remote_add(url = url, name = pr$pr_remote, repo = repo) config_key <- glue("remote.{pr$pr_remote}.created-by") gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo) } pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}") gert::git_fetch( remote = pr$pr_remote, refspec = pr$pr_ref, repo = repo, verbose = FALSE ) if (is.na(pr$pr_local_branch)) { pr$pr_local_branch <- if (pr$pr_from_fork) sub(":", "-", pr$pr_label) else pr$pr_ref } # Create local branch, if necessary, and switch to it ---- if (!gert::git_branch_exists(pr$pr_local_branch, local = TRUE, repo = repo)) { ui_done(" Creating and switching to local branch {ui_value(pr$pr_local_branch)}.") ui_done("Setting {ui_value(pr_remref)} as remote tracking branch.") gert::git_branch_create(pr$pr_local_branch, ref = pr_remref, repo = repo) config_key <- glue("branch.{pr$pr_local_branch}.created-by") gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo) config_url <- glue("branch.{pr$pr_local_branch}.pr-url") gert::git_config_set(config_url, pr$pr_html_url, repo = repo) return(invisible()) } # Local branch pre-existed; make sure tracking branch is set, switch, & pull ui_done("Switching to branch {ui_value(pr$pr_local_branch)}.") gert::git_branch_checkout(pr$pr_local_branch, repo = repo) config_url <- glue("branch.{pr$pr_local_branch}.pr-url") gert::git_config_set(config_url, pr$pr_html_url, repo = repo) pr_branch_ours_tracking <- git_branch_tracking(pr$pr_local_branch) if (is.na(pr_branch_ours_tracking) || pr_branch_ours_tracking != pr_remref) { ui_done("Setting {ui_value(pr_remref)} as remote tracking branch.") gert::git_branch_set_upstream(pr_remref, repo = repo) } git_pull(verbose = FALSE) } #' @export #' @rdname pull-requests pr_push <- function() { repo <- git_repo() cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg) default_branch <- git_default_branch() check_pr_branch(default_branch) challenge_uncommitted_changes() branch <- git_branch() remref <- git_branch_tracking(branch) if (is.na(remref)) { # this is the first push if (cfg$type == "fork" && cfg$upstream$can_push && is_interactive()) { choices <- c( origin = glue( "{cfg$origin$repo_spec} = {ui_value('origin')} (external PR)"), upstream = glue( "{cfg$upstream$repo_spec} = {ui_value('upstream')} (internal PR)") ) title <- glue("Which repo do you want to push to?") choice <- utils::menu(choices, graphics = FALSE, title = title) remote <- names(choices)[[choice]] } else { remote <- "origin" } ui_done(" Pushing local {ui_value(branch)} branch to {ui_value(remote)} remote.") gert::git_push(remote = remote, verbose = FALSE, repo = repo) } else { check_branch_pulled(use = "pr_pull()") ui_done("Pushing local {ui_value(branch)} branch to {ui_value(remref)}.") gert::git_push( remote = remref_remote(remref), refspec = glue("refs/heads/{branch}:refs/heads/{remref_branch(remref)}"), verbose = FALSE, repo = repo ) } # Prompt to create PR if does not exist yet tr <- target_repo(cfg, ask = FALSE) pr <- pr_find(branch, tr = tr) if (is.null(pr)) { pr_create() } else { ui_todo(" View PR at {ui_value(pr$pr_html_url)} or call {ui_code('pr_view()')}.") } invisible() } #' @export #' @rdname pull-requests pr_pull <- function() { cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg) default_branch <- git_default_branch() check_pr_branch(default_branch) challenge_uncommitted_changes() git_pull() # note associated PR in git config, if applicable tr <- target_repo(cfg, ask = FALSE) pr_find(tr = tr) invisible(TRUE) } #' @export #' @rdname pull-requests pr_merge_main <- function() { tr <- target_repo(github_get = TRUE, ask = FALSE) challenge_uncommitted_changes() remref <- glue("{tr$remote}/{tr$default_branch}") ui_done("Pulling changes from {ui_value(remref)}.") git_pull(remref, verbose = FALSE) } #' @export #' @rdname pull-requests pr_view <- function(number = NULL, target = c("source", "primary")) { tr <- target_repo(github_get = NA, role = target, ask = FALSE) url <- NULL if (is.null(number)) { branch <- git_branch() default_branch <- git_default_branch() if (branch != default_branch) { url <- pr_url(branch = branch, tr = tr) if (is.null(url)) { ui_info(" Current branch ({ui_value(branch)}) does not appear to be \\ connected to a PR.") } else { number <- sub("^.+pull/", "", url) ui_info(" Current branch ({ui_value(branch)}) is connected to PR #{number}.") } } } else { pr <- pr_get(number = number, tr = tr) url <- pr$pr_html_url } if (is.null(url)) { ui_info("No PR specified ... looking up open PRs.") pr <- choose_pr(tr = tr) if (is.null(pr)) { ui_oops("No open PRs found for {ui_value(tr$repo_spec)}.") return(invisible()) } if (min(lengths(pr)) == 0) { ui_oops("No PR selected, exiting.") return(invisible()) } url <- pr$pr_html_url } view_url(url) } #' @export #' @rdname pull-requests pr_pause <- function() { # intentionally naive selection of target repo tr <- target_repo(github_get = FALSE, ask = FALSE) ui_done("Switching back to the default branch.") default_branch <- git_default_branch() if (git_branch() == default_branch) { ui_info(" Already on this repo's default branch ({ui_value(default_branch)}), \\ nothing to do.") return(invisible()) } challenge_uncommitted_changes() # TODO: what happens here if offline? check_branch_pulled(use = "pr_pull()") ui_done("Switching back to default branch ({ui_value(default_branch)}).") gert::git_branch_checkout(default_branch, repo = git_repo()) pr_pull_source_override(tr = tr, default_branch = default_branch) } #' @export #' @rdname pull-requests pr_finish <- function(number = NULL, target = c("source", "primary")) { pr_clean(number = number, target = target, mode = "finish") } #' @export #' @rdname pull-requests pr_forget <- function() pr_clean(mode = "forget") # unexported helpers ---- # Removes local evidence of PRs that you're done with or wish you'd never # started or fetched # Only possible remote action is to delete the remote branch for a merged PR pr_clean <- function(number = NULL, target = c("source", "primary"), mode = c("finish", "forget")) { mode <- match.arg(mode) repo <- git_repo() tr <- target_repo(github_get = NA, role = target, ask = FALSE) default_branch <- git_default_branch() if (is.null(number)) { check_pr_branch(default_branch) pr <- pr_find(git_branch(), tr = tr, state = "all") # if the remote branch has already been deleted (probably post-merge), we # can't always reverse engineer what the corresponding local branch was, but # we already know it -- it's how we found the PR in the first place! if (!is.null(pr)) { pr$pr_local_branch <- pr$pr_local_branch %|% git_branch() } } else { pr <- pr_get(number = number, tr = tr) } pr_local_branch <- if (is.null(pr)) git_branch() else pr$pr_local_branch if (!is.na(pr_local_branch)) { if (pr_local_branch == git_branch()) { challenge_uncommitted_changes() } tracking_branch <- git_branch_tracking(pr_local_branch) if (is.na(tracking_branch)) { if (ui_nope(" Local branch {ui_value(pr_local_branch)} has no associated remote \\ branch. If we delete {ui_value(pr_local_branch)}, any work that exists only \\ on this branch may be hard for you to recover. Proceed anyway?")) { ui_oops("Cancelling.") return(invisible()) } } else { cmp <- git_branch_compare( branch = pr_local_branch, remref = tracking_branch ) if (cmp$local_only > 0 && ui_nope(" Local branch {ui_value(pr_local_branch)} has 1 or more commits \\ that have not been pushed to {ui_value(tracking_branch)}. If we delete {ui_value(pr_local_branch)}, this work may be hard \\ for you to recover. Proceed anyway?")) { ui_oops("Cancelling.") return(invisible()) } } } if (git_branch() != default_branch) { ui_done("Switching back to default branch ({ui_value(default_branch)}).") gert::git_branch_checkout(default_branch, force = TRUE, repo = repo) pr_pull_source_override(tr = tr, default_branch = default_branch) } if (!is.na(pr_local_branch)) { ui_done("Deleting local {ui_value(pr_local_branch)} branch.") gert::git_branch_delete(pr_local_branch, repo = repo) } if (is.null(pr)) { return(invisible()) } pr_branch_delete(pr) # delete remote, if we (usethis) added it AND no remaining tracking branches created_by <- git_cfg_get(glue("remote.{pr$pr_remote}.created-by")) if (is.null(created_by) || !grepl("^usethis::", created_by)) { return(invisible()) } branches <- gert::git_branch_list(local = TRUE, repo = repo) branches <- branches[!is.na(branches$upstream), ] if (sum(grepl(glue("^refs/remotes/{pr$pr_remote}"), branches$upstream)) == 0) { ui_done("Removing remote {ui_value(pr$pr_remote)}") gert::git_remote_remove(remote = pr$pr_remote, repo = repo) } invisible() } # Make sure to pull from upstream/DEFAULT (as opposed to origin/DEFAULT) if # we're in DEFAULT branch of a fork. I wish everyone set up DEFAULT to track the # DEFAULT branch in the source repo, but this protects us against sub-optimal # setup. pr_pull_source_override <- function(tr = NULL, default_branch = NULL) { # naive selection of target repo; calling function should analyse the config tr <- tr %||% target_repo(github_get = FALSE, ask = FALSE) # TODO: why does this not use a check_*() function, i.e. shared helper? # I guess to issue a specific error message? current_branch <- git_branch() default_branch <- default_branch %||% git_default_branch() if (current_branch != default_branch) { ui_stop(" Internal error: pr_pull_source_override() should only be used when on \\ default branch") } # guard against mis-configured forks, that have default branch tracking # the fork (origin) instead of the source (upstream) # TODO: should I just change the upstream tracking branch, i.e. fix it? remref <- glue("{tr$remote}/{default_branch}") if (is_online(tr$host)) { ui_done("Pulling changes from {ui_value(remref)}") git_pull(remref = remref, verbose = FALSE) } else { ui_info(" Can't reach {ui_value(tr$host)}, therefore unable to pull changes from \\ {ui_value(remref)}") } } pr_create <- function() { branch <- git_branch() tracking_branch <- git_branch_tracking(branch) remote <- remref_remote(tracking_branch) remote_dat <- github_remotes(remote, github_get = FALSE) ui_todo("Create PR at link given below") view_url(glue_data(remote_dat, "{host_url}/{repo_spec}/compare/{branch}")) } # retrieves 1 PR, if: # * we can establish a tracking relationship between `branch` and a PR branch # * we can get the user to choose 1 pr_find <- function(branch = git_branch(), tr = NULL, state = c("open", "closed", "all")) { # Have we done this before? Check if we've cached pr-url in git config. config_url <- glue("branch.{branch}.pr-url") url <- git_cfg_get(config_url, where = "local") if (!is.null(url)) { return(pr_get(number = sub("^.+pull/", "", url), tr = tr)) } tracking_branch <- git_branch_tracking(branch) if (is.na(tracking_branch)) { return(NULL) } state <- match.arg(state) remote <- remref_remote(tracking_branch) remote_dat <- github_remotes(remote) pr_head <- glue("{remote_dat$repo_owner}:{remref_branch(tracking_branch)}") pr_dat <- pr_list(tr = tr, state = state, head = pr_head) if (nrow(pr_dat) == 0) { return(NULL) } if (nrow(pr_dat) > 1) { spec <- sub(":", "/", pr_head) ui_info("Multiple PRs are associated with {ui_value(spec)}.") pr_dat <- choose_pr(pr_dat = pr_dat) if (min(lengths(pr_dat)) == 0) { ui_stop(" One of these PRs must be specified explicitly or interactively: \\ {ui_value(paste0('#', pr_dat$pr_number))}") } } gert::git_config_set(config_url, pr_dat$pr_html_url, repo = git_repo()) as.list(pr_dat) } pr_url <- function(branch = git_branch(), tr = NULL, state = c("open", "closed", "all")) { state <- match.arg(state) pr <- pr_find(branch, tr = tr, state = state) if (is.null(pr)) { NULL } else { pr$pr_html_url } } pr_data_tidy <- function(pr) { out <- list( pr_number = pluck_int(pr, "number"), pr_title = pluck_chr(pr, "title"), pr_state = pluck_chr(pr, "state"), pr_user = pluck_chr(pr, "user", "login"), pr_created_at = pluck_chr(pr, "created_at"), pr_updated_at = pluck_chr(pr, "updated_at"), pr_merged_at = pluck_chr(pr, "merged_at"), pr_label = pluck_chr(pr, "head", "label"), # the 'repo' element of 'head' is NULL when fork has been deleted pr_repo_owner = pluck_chr(pr, "head", "repo", "owner", "login"), pr_ref = pluck_chr(pr, "head", "ref"), pr_repo_spec = pluck_chr(pr, "head", "repo", "full_name"), pr_from_fork = pluck_lgl(pr, "head", "repo", "fork"), # 'maintainer_can_modify' is only present when we GET one specific PR pr_maintainer_can_modify = pluck_lgl(pr, "maintainer_can_modify"), pr_https_url = pluck_chr(pr, "head", "repo", "clone_url"), pr_ssh_url = pluck_chr(pr, "head", "repo", "ssh_url"), pr_html_url = pluck_chr(pr, "html_url"), pr_string = glue(" {pluck_chr(pr, 'base', 'repo', 'full_name')}/#{pluck_int(pr, 'number')}") ) grl <- github_remote_list(these = NULL) m <- match(out$pr_repo_spec, grl$repo_spec) out$pr_remote <- if (is.na(m)) out$pr_repo_owner else grl$remote[m] pr_remref <- glue("{out$pr_remote}/{out$pr_ref}") gbl <- gert::git_branch_list(local = TRUE, repo = git_repo()) gbl <- gbl[!is.na(gbl$upstream), c("name", "upstream")] gbl$upstream <- sub("^refs/remotes/", "", gbl$upstream) m <- match(pr_remref, gbl$upstream) out$pr_local_branch <- if (is.na(m)) NA_character_ else gbl$name[m] # If the fork has been deleted, these are all NA # - Because pr$head$repo is NULL: # pr_repo_owner, pr_repo_spec, pr_from_fork, pr_https_url, pr_ssh_url # - Because derived from those above: # pr_remote, pr_remref pr_local_branch # I suppose one could already have a local branch, if you fetched the PR # before the fork got deleted. # But an initial pr_fetch() won't work if the fork has been deleted. # I'm willing to accept that the pr_*() functions don't necessarily address # the "deleted fork" scenario. It's relatively rare. # example: https://github.com/r-lib/httr/pull/634 out } pr_list <- function(tr = NULL, github_get = NA, state = c("open", "closed", "all"), head = NULL) { tr <- tr %||% target_repo(github_get = github_get, ask = FALSE) state <- match.arg(state) gh <- gh_tr(tr) safely_gh <- purrr::safely(gh, otherwise = NULL) out <- safely_gh( "GET /repos/{owner}/{repo}/pulls", state = state, head = head, .limit = Inf ) if (is.null(out$error)) { prs <- out$result } else { ui_oops("Unable to retrieve PRs for {ui_value(tr$repo_spec)}.") prs <- NULL } no_prs <- length(prs) == 0 if (no_prs) { prs <- list(list()) } out <- map(prs, pr_data_tidy) out <- map(out, ~ as.data.frame(.x, stringsAsFactors = FALSE)) out <- do.call(rbind, out) if (no_prs) { out[0, ] } else { pr_is_open <- out$pr_state == "open" rbind(out[pr_is_open, ], out[!pr_is_open, ]) } } # retrieves specific PR by number pr_get <- function(number, tr = NULL, github_get = NA) { tr <- tr %||% target_repo(github_get = github_get, ask = FALSE) gh <- gh_tr(tr) raw <- gh("GET /repos/{owner}/{repo}/pulls/{number}", number = number) pr_data_tidy(raw) } branches_with_no_upstream_or_github_upstream <- function(tr = NULL) { repo <- git_repo() gb_dat <- gert::git_branch_list(local = TRUE, repo = repo) gb_dat <- gb_dat[, c("name", "upstream", "updated")] gb_dat$remref <- sub("^refs/remotes/", "", gb_dat$upstream) gb_dat$upstream <- NULL gb_dat$remote <- remref_remote(gb_dat$remref) gb_dat$ref <- remref_branch(gb_dat$remref) gb_dat$cfg_pr_url <- map_chr( glue("branch.{gb_dat$name}.pr-url"), ~ git_cfg_get(.x, where = "local") %||% NA_character_ ) ghr <- github_remote_list(these = NULL)[["remote"]] gb_dat <- gb_dat[is.na(gb_dat$remref) | (gb_dat$remote %in% ghr), ] pr_dat <- pr_list(tr = tr) dat <- merge( x = gb_dat, y = pr_dat, by.x = "name", by.y = "pr_local_branch", all.x = TRUE ) dat <- dat[order(dat$pr_number, dat$pr_updated_at, dat$updated, decreasing = TRUE), ] missing_cfg <- is.na(dat$cfg_pr_url) & !is.na(dat$pr_html_url) purrr::walk2( glue("branch.{dat$name[missing_cfg]}.pr-url"), dat$pr_html_url[missing_cfg], ~ gert::git_config_set(.x, .y, repo = repo) ) dat } choose_branch <- function(exclude = character()) { if (!is_interactive()) { return(character()) } dat <- branches_with_no_upstream_or_github_upstream() dat <- dat[!dat$name %in% exclude, ] if (nrow(dat) == 0) { return() } prompt <- "Which branch do you want to checkout? (0 to exit)" if (nrow(dat) > 9) { branches_not_shown <- utils::tail(dat$name, -9) n <- length(branches_not_shown) dat <- dat[1:9, ] pre <- glue("{n} branch{if (n > 1) 'es' else ''} not listed: ") listing <- glue_collapse( branches_not_shown, sep = ", ", width = getOption("width") - nchar(pre) ) prompt <- glue(" {prompt} {pre}{listing}") } dat$pretty_user <- map(dat$pr_user, ~ glue("@{.x}")) dat$pretty_name <- format(dat$name, justify = "right") dat_pretty <- purrr::pmap_chr( dat[c("pretty_name", "pr_number", "pretty_user", "pr_title")], function(pretty_name, pr_number, pretty_user, pr_title) { if (is.na(pr_number)) { glue("{pretty_name}") } else { glue("{pretty_name} --> #{pr_number} ({ui_value(pretty_user)}): {pr_title}") } } ) choice <- utils::menu(title = prompt, choices = cli::ansi_strtrim(dat_pretty)) dat$name[choice] } choose_pr <- function(tr = NULL, pr_dat = NULL) { if (!is_interactive()) { return(list(pr_number = list())) } if (is.null(pr_dat)) { tr <- tr %||% target_repo() pr_dat <- pr_list(tr) } if (nrow(pr_dat) == 0) { return() } # wording needs to make sense for several PR-choosing tasks, e.g. fetch, view, # finish, forget prompt <- "Which PR are you interested in? (0 to exit)" if (nrow(pr_dat) > 9) { n <- nrow(pr_dat) - 9 pr_dat <- pr_dat[1:9, ] prompt <- glue(" {prompt} Not shown: {n} more {if (n > 1) 'PRs' else 'PR'}; \\ call {ui_code('browse_github_pulls()')} to browse all PRs.") } some_closed <- any(pr_dat$pr_state == "closed") pr_pretty <- purrr::pmap_chr( pr_dat[c("pr_number", "pr_user", "pr_state", "pr_title")], function(pr_number, pr_user, pr_state, pr_title) { hash_number <- glue("#{pr_number}") at_user <- glue("@{pr_user}") if (some_closed) { glue("{hash_number} ({ui_field(at_user)}, {pr_state}): {ui_value(pr_title)}") } else { glue("{hash_number} ({ui_field(at_user)}): {ui_value(pr_title)}") } } ) choice <- utils::menu( title = prompt, choices = cli::ansi_strtrim(pr_pretty) ) as.list(pr_dat[choice, ]) } # deletes the remote branch associated with a PR # returns invisible TRUE/FALSE re: whether a deletion actually occurred # reasons this returns FALSE # * don't have push permission on remote where PR branch lives # * PR has not been merged # * remote branch has already been deleted pr_branch_delete <- function(pr) { remote <- pr$pr_remote remote_dat <- github_remotes(remote) if (!isTRUE(remote_dat$can_push)) { return(invisible(FALSE)) } gh <- gh_tr(remote_dat) pr_ref <- tryCatch( gh( "GET /repos/{owner}/{repo}/git/ref/{ref}", ref = glue("heads/{pr$pr_ref}") ), http_error_404 = function(cnd) NULL ) pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}") if (is.null(pr_ref)) { ui_info(" PR {ui_value(pr$pr_string)} originated from branch \\ {ui_value(pr_remref)}, which no longer exists") return(invisible(FALSE)) } if (is.na(pr$pr_merged_at)) { ui_info(" PR {ui_value(pr$pr_string)} is unmerged, \\ we will not delete the remote branch {ui_value(pr_remref)}") return(invisible(FALSE)) } ui_done(" PR {ui_value(pr$pr_string)} has been merged, \\ deleting remote branch {ui_value(pr_remref)}") # TODO: tryCatch here? gh( "DELETE /repos/{owner}/{repo}/git/refs/{ref}", ref = glue("heads/{pr$pr_ref}") ) invisible(TRUE) } check_pr_branch <- function(default_branch = git_default_branch()) { # the glue-ing happens inside check_current_branch(), where `gb` gives the # current git branch check_current_branch( is_not = default_branch, message = " The {ui_code('pr_*()')} functions facilitate pull requests. The current branch ({ui_value(gb)}) is this repo's default \\ branch, but pull requests should NOT come from the default branch. Do you need to call {ui_code('pr_init()')} (new PR)? Or {ui_code('pr_resume()')} or {ui_code('pr_fetch()')} (existing PR)?" ) } usethis/R/ui.R0000644000175000017500000002114714117743363013047 0ustar nileshnilesh#' User interface #' #' @description #' These functions are used to construct the user interface of usethis. Use #' them in your own package so that your `use_` functions work the same way #' as usethis. #' #' The `ui_` functions can be broken down into four main categories: #' #' * block styles: `ui_line()`, `ui_done()`, `ui_todo()`, `ui_oops()`, #' `ui_info()`. #' * conditions: `ui_stop()`, `ui_warn()`. #' * questions: [ui_yeah()], [ui_nope()]. #' * inline styles: `ui_field()`, `ui_value()`, `ui_path()`, `ui_code()`, #' `ui_unset()`. #' #' The question functions [ui_yeah()] and [ui_nope()] have their own [help #' page][ui-questions]. #' #' @section Silencing output: #' All UI output (apart from `ui_yeah()`/`ui_nope()` prompts) can be silenced #' by setting `options(usethis.quiet = TRUE)`. Use `ui_silence()` to silence #' selected actions. #' #' @param x A character vector. #' #' For block styles, conditions, and questions, each element of the #' vector becomes a line, and the result is processed by [glue::glue()]. #' For inline styles, each element of the vector becomes an entry in a #' comma separated list. #' @param .envir Used to ensure that [glue::glue()] gets the correct #' environment. For expert use only. #' #' @return The block styles, conditions, and questions are called for their #' side-effect. The inline styles return a string. #' @keywords internal #' @family user interface functions #' @name ui #' @examples #' new_val <- "oxnard" #' ui_done("{ui_field('name')} set to {ui_value(new_val)}") #' ui_todo("Redocument with {ui_code('devtools::document()')}") #' #' ui_code_block(c( #' "Line 1", #' "Line 2", #' "Line 3" #' )) NULL # Block styles ------------------------------------------------------------ #' @rdname ui #' @export ui_line <- function(x = character(), .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_inform(x) } #' @rdname ui #' @export ui_todo <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_bullet(x, crayon::red(cli::symbol$bullet)) } #' @rdname ui #' @export ui_done <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_bullet(x, crayon::green(cli::symbol$tick)) } #' @rdname ui #' @export ui_oops <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_bullet(x, crayon::red(cli::symbol$cross)) } #' @rdname ui #' @export ui_info <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) ui_bullet(x, crayon::yellow(cli::symbol$info)) } #' @param copy If `TRUE`, the session is interactive, and the clipr package #' is installed, will copy the code block to the clipboard. #' @rdname ui #' @export ui_code_block <- function(x, copy = rlang::is_interactive(), .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) block <- indent(x, " ") block <- crayon::silver(block) ui_inform(block) if (copy && clipr::clipr_available()) { x <- crayon::strip_style(x) clipr::write_clip(x) ui_inform(" [Copied to clipboard]") } } # Conditions -------------------------------------------------------------- #' @rdname ui #' @export ui_stop <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) cnd <- structure( class = c("usethis_error", "error", "condition"), list(message = x) ) stop(cnd) } #' @rdname ui #' @export ui_warn <- function(x, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) warning(x, call. = FALSE, immediate. = TRUE) } # Silence ----------------------------------------------------------------- #' @rdname ui #' @param code Code to execute with usual UI output silenced. #' @export ui_silence <- function(code) { withr::with_options(list(usethis.quiet = TRUE), code) } # Questions --------------------------------------------------------------- #' User interface - Questions #' #' These functions are used to interact with the user by posing a simple yes or #' no question. For details on the other `ui_*()` functions, see the [ui] help #' page. #' #' @inheritParams ui #' @param yes A character vector of "yes" strings, which are randomly sampled to #' populate the menu. #' @param no A character vector of "no" strings, which are randomly sampled to #' populate the menu. #' @param n_yes An integer. The number of "yes" strings to include. #' @param n_no An integer. The number of "no" strings to include. #' @param shuffle A logical. Should the order of the menu options be randomly #' shuffled? #' #' @return A logical. `ui_yeah()` returns `TRUE` when the user selects a "yes" #' option and `FALSE` otherwise, i.e. when user selects a "no" option or #' refuses to make a selection (cancels). `ui_nope()` is the logical opposite #' of `ui_yeah()`. #' @name ui-questions #' @keywords internal #' @family user interface functions #' @examples #' \dontrun{ #' ui_yeah("Do you like R?") #' ui_nope("Have you tried turning it off and on again?", n_yes = 1, n_no = 1) #' ui_yeah("Are you sure its plugged in?", yes = "Yes", no = "No", shuffle = FALSE) #' } NULL #' @rdname ui-questions #' @export ui_yeah <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { x <- glue_collapse(x, "\n") x <- glue(x, .envir = .envir) if (!is_interactive()) { ui_stop(c( "User input required, but session is not interactive.", "Query: {x}" )) } n_yes <- min(n_yes, length(yes)) n_no <- min(n_no, length(no)) qs <- c(sample(yes, n_yes), sample(no, n_no)) if (shuffle) { qs <- sample(qs) } # TODO: should this be ui_inform()? rlang::inform(x) out <- utils::menu(qs) out != 0L && qs[[out]] %in% yes } #' @rdname ui-questions #' @export ui_nope <- function(x, yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"), no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"), n_yes = 1, n_no = 2, shuffle = TRUE, .envir = parent.frame()) { # TODO(jennybc): is this correct in the case of no selection / cancelling? !ui_yeah( x = x, yes = yes, no = no, n_yes = n_yes, n_no = n_no, shuffle = shuffle, .envir = .envir ) } # Inline styles ----------------------------------------------------------- #' @rdname ui #' @export ui_field <- function(x) { x <- crayon::green(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui #' @export ui_value <- function(x) { if (is.character(x)) { x <- encodeString(x, quote = "'") } x <- crayon::blue(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui #' @export #' @param base If specified, paths will be displayed relative to this path. ui_path <- function(x, base = NULL) { is_directory <- is_dir(x) | grepl("/$", x) if (is.null(base)) { x <- proj_rel_path(x) } else if (!identical(base, NA)) { x <- path_rel(x, base) } # rationalize trailing slashes x <- path_tidy(x) x <- ifelse(is_directory, paste0(x, "/"), x) ui_value(x) } #' @rdname ui #' @export ui_code <- function(x) { x <- encodeString(x, quote = "`") x <- crayon::silver(x) x <- glue_collapse(x, sep = ", ") x } #' @rdname ui #' @export ui_unset <- function(x = "unset") { stopifnot(length(x) == 1) x <- glue("<{x}>") x <- crayon::silver(x) x } # rlang::inform() wrappers ----------------------------------------------------- indent <- function(x, first = " ", indent = first) { x <- gsub("\n", paste0("\n", indent), x) paste0(first, x) } ui_bullet <- function(x, bullet = cli::symbol$bullet) { bullet <- paste0(bullet, " ") x <- indent(x, bullet, " ") ui_inform(x) } # All UI output must eventually go through ui_inform() so that it # can be quieted with 'usethis.quiet' when needed. ui_inform <- function(..., quiet = getOption("usethis.quiet", default = FALSE)) { if (!quiet) { inform(paste0(...)) } invisible() } # Sitrep helpers --------------------------------------------------------------- hd_line <- function(name) { ui_inform(crayon::bold(name)) } kv_line <- function(key, value, .envir = parent.frame()) { value <- if (is.null(value)) ui_unset() else ui_value(value) key <- glue(key, .envir = .envir) ui_inform(glue("{cli::symbol$bullet} {key}: {value}")) } usethis/R/github_token.R0000644000175000017500000002241214153723017015102 0ustar nileshnilesh#' Get help with GitHub personal access tokens #' #' @description #' A [personal access #' token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line) #' (PAT) is needed for certain tasks usethis does via the GitHub API, such as #' creating a repository, a fork, or a pull request. If you use HTTPS remotes, #' your PAT is also used when interacting with GitHub as a conventional Git #' remote. These functions help you get and manage your PAT: #' * `gh_token_help()` guides you through token troubleshooting and setup. #' * `create_github_token()` opens a browser window to the GitHub form to #' generate a PAT, with suggested scopes pre-selected. It also offers advice #' on storing your PAT. #' * `gitcreds::gitcreds_set()` helps you register your PAT with the Git #' credential manager used by your operating system. Later, other packages, #' such as usethis, gert, and gh can automatically retrieve that PAT and use #' it to work with GitHub on your behalf. #' #' Usually, the first time the PAT is retrieved in an R session, it is cached in #' an environment variable, for easier reuse for the duration of that R session. #' After initial acquisition and storage, all of this should happen #' automatically in the background. GitHub is encouraging the use of PATs that #' expire after, e.g., 30 days, so prepare yourself to re-generate and re-store #' your PAT periodically. #' #' Git/GitHub credential management is covered in a dedicated article: [Managing #' Git(Hub) #' Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html) #' #' @details #' `create_github_token()` has previously gone by some other names: #' `browse_github_token()` and `browse_github_pat()`. #' #' @param scopes Character vector of token scopes, pre-selected in the web form. #' Final choices are made in the GitHub form. Read more about GitHub API #' scopes at #' . #' @param description Short description or nickname for the token. You might #' (eventually) have multiple tokens on your GitHub account and a label can #' help you keep track of what each token is for. #' @inheritParams use_github #' #' @seealso [gh::gh_whoami()] for information on an existing token and #' `gitcreds::gitcreds_set()` and `gitcreds::gitcreds_get()` for a secure way #' to store and retrieve your PAT. #' #' @return Nothing #' @name github-token NULL #' @export #' @rdname github-token #' @examples #' \dontrun{ #' create_github_token() #' } create_github_token <- function(scopes = c("repo", "user", "gist", "workflow"), description = "DESCRIBE THE TOKEN'S USE CASE", host = NULL) { scopes <- glue_collapse(scopes, ",") host <- get_hosturl(host %||% default_api_url()) url <- glue( "{host}/settings/tokens/new?scopes={scopes}&description={description}" ) withr::defer(view_url(url)) hint <- code_hint_with_host("gitcreds::gitcreds_set", host) ui_todo(" Call {ui_code(hint)} to register this token in the \\ local Git credential store It is also a great idea to store this token in any password-management \\ software that you use") invisible() } #' @inheritParams use_github #' @export #' @rdname github-token #' @examples #' \dontrun{ #' gh_token_help() #' } gh_token_help <- function(host = NULL) { host_url <- get_hosturl(host %||% default_api_url()) kv_line("GitHub host", host_url) pat_sitrep(host_url) } code_hint_with_host <- function(function_name, host = NULL, arg_name = NULL) { arg_hint <- function(host, arg_name) { if (is.null(host) || is_github_dot_com(host)) { return("") } if (is_null(arg_name)) { glue('"{host}"') } else { glue('{arg_name} = "{host}"') } } glue_chr("{function_name}({arg_hint(host, arg_name)})") } # workhorse behind gh_token_help() and called, possibly twice, in git_sitrep() # hence the need for `scold_for_renviron = TRUE/FALSE` pat_sitrep <- function(host = "https://github.com", scold_for_renviron = TRUE) { if (scold_for_renviron) { scold_for_renviron() } maybe_pat <- purrr::safely(gh::gh_token)(api_url = host) if (is.null(maybe_pat$result)) { ui_oops("The PAT discovered for {ui_path(host)} has the wrong structure.") ui_inform(maybe_pat$error) return(invisible(FALSE)) } pat <- maybe_pat$result have_pat <- pat != "" if (!have_pat) { kv_line("Personal access token for {ui_value(host)}", NULL) hint <- code_hint_with_host("create_github_token", host, "host") ui_todo("To create a personal access token, call {ui_code(hint)}") hint <- code_hint_with_host("gitcreds::gitcreds_set", host) ui_todo("To store a token for current and future use, call {ui_code(hint)}") ui_info(" Read more in the {ui_value('Managing Git(Hub) Credentials')} article: https://usethis.r-lib.org/articles/articles/git-credentials.html") return(invisible(FALSE)) } kv_line("Personal access token for {ui_value(host)}", "") online <- is_online(host) if (!online) { ui_oops(" Host is not reachable. No further vetting of the personal access token is possible. Try again when {ui_value(host)} can be reached.") return(invisible()) } maybe_who <- purrr::safely(gh::gh_whoami)(.token = pat, .api_url = host) if (is.null(maybe_who$result)) { message <- "Can't get user information for this token." if (inherits(maybe_who$error, "http_error_401")) { message <- " Can't get user information for this token. The token may no longer be valid or perhaps it lacks the \\ {ui_value('user')} scope." } ui_oops(message) ui_inform(maybe_who$error) return(invisible(FALSE)) } who <- maybe_who$result kv_line("GitHub user", who$login) scopes <- who$scopes kv_line("Token scopes", who$scopes) scopes <- strsplit(scopes, ", ")[[1]] scold_for_scopes(scopes) maybe_emails <- purrr::safely(gh::gh)("/user/emails", .token = pat, .api_url = host) if (is.null(maybe_emails$result)) { ui_oops(" Can't retrieve registered email addresses from GitHub. Consider re-creating your PAT with the {ui_value('user')} \\ or at least {ui_value('user:email')} scope.") } else { emails <- maybe_emails$result addresses <- map_chr( emails, ~ if (.x$primary) glue_data(.x, "{email} (primary)") else .x[["email"]] ) kv_line("Email(s)", addresses) ui_silence( de_facto_email <- git_cfg_get("user.email", "de_facto") ) if (!any(grepl(de_facto_email, addresses))) { ui_oops(" Local Git user's email ({ui_value(de_facto_email)}) doesn't appear to \\ be registered with GitHub.") } } invisible(TRUE) } scold_for_renviron <- function() { renviron_path <- scoped_path_r("user", ".Renviron", envvar = "R_ENVIRON_USER") if (!file_exists(renviron_path)) { return(invisible()) } renviron_lines <- read_utf8(renviron_path) fishy_lines <- grep("^GITHUB_(PAT|TOKEN).*=.+", renviron_lines, value = TRUE) if (length(fishy_lines) == 0) { return(invisible()) } fishy_keys <- re_match(fishy_lines, "^(?.+)=.+")$key # TODO: when I switch to cli, this is a good place for `!` # in general, lots below is suboptimal, but good enough for now ui_info(c( "{ui_path(renviron_path)} defines environment variable(s):", paste0("- ", fishy_keys), "This can prevent your PAT from being retrieved from the Git credential store." )) ui_info(" If you are troubleshooting PAT problems, the root cause may be an old, \\ invalid PAT defined in {ui_path(renviron_path)}.") ui_todo("Call {ui_code('edit_r_environ()')} to edit that file.") ui_info(" For most use cases, it is better to NOT define the PAT in \\ {ui_code('.Renviron')}. Instead, call {ui_code('gitcreds::gitcreds_set()')} to put the PAT into \\ the Git credential store.") invisible() } scold_for_scopes <- function(scopes) { if (length(scopes) == 0) { ui_oops(" Token has no scopes! {ui_code('create_github_token()')} defaults to the recommended scopes.") return(invisible()) } # https://docs.github.com/en/free-pro-team@latest/developers/apps/scopes-for-oauth-apps # why these checks? # previous defaults for create_github_token(): repo, gist, user:email # more recently: repo, user, gist, workflow # (gist scope is a very weak recommendation) has_repo <- "repo" %in% scopes has_workflow <- "workflow" %in% scopes has_user_email <- "user" %in% scopes || "user:email" %in% scopes if (has_repo && has_workflow && has_user_email) { return(invisible()) } # current design of the ui_*() functions makes this pretty hard :( suggestions <- c( if (!has_repo) { "- {ui_value('repo')}: needed to fully access user's repos" }, if (!has_workflow) { "- {ui_value('workflow')}: needed to manage GitHub Actions workflow files" }, if (!has_user_email) { "- {ui_value('user:email')}: needed to read user's email addresses" } ) message <- c( "Token lacks recommended scopes:", suggestions, "Consider re-creating your PAT with the missing scopes.", "{ui_code('create_github_token()')} defaults to the recommended scopes." ) ui_oops(glue_collapse(message, sep = "\n")) } usethis/R/news.R0000644000175000017500000000207214117743363013402 0ustar nileshnilesh#' Create a simple `NEWS.md` #' #' This creates a basic `NEWS.md` in the root directory. #' #' @inheritParams use_template #' @seealso The [important files #' section](https://r-pkgs.org/release.html#important-files) of [R #' Packages](https://r-pkgs.org). #' @export use_news_md <- function(open = rlang::is_interactive()) { use_template( "NEWS.md", data = package_data(), open = open ) git_ask_commit("Add NEWS.md", untracked = TRUE, paths = "NEWS.md") } use_news_heading <- function(version) { news_path <- proj_path("NEWS.md") if (!file_exists(news_path)) { return(invisible()) } news <- read_utf8(news_path) title <- glue("# {project_name()} {version}") if (title == news[[1]]) { return(invisible()) } development_title <- glue("# {project_name()} (development version)") if (development_title == news[[1]]) { news[[1]] <- title ui_done("Replacing development heading in NEWS.md") return(write_utf8(news_path, news)) } ui_done("Adding new heading to NEWS.md") write_utf8(news_path, c(title, "", news)) } usethis/R/block.R0000644000175000017500000000534714117743363013530 0ustar nileshnileshblock_append <- function(desc, value, path, block_start = "# <<<", block_end = "# >>>", block_prefix = NULL, block_suffix = NULL, sort = FALSE) { if (!is.null(path) && file_exists(path)) { lines <- read_utf8(path) if (all(value %in% lines)) { return(FALSE) } block_lines <- block_find(lines, block_start, block_end) } else { block_lines <- NULL } if (is.null(block_lines)) { ui_todo(" Copy and paste the following lines into {ui_path(path)}:") ui_code_block(c(block_prefix, block_start, value, block_end, block_suffix)) return(FALSE) } ui_done("Adding {desc} to {ui_path(path)}") start <- block_lines[[1]] end <- block_lines[[2]] block <- lines[seq2(start, end)] new_lines <- union(block, value) if (sort) { new_lines <- sort(new_lines) } lines <- c( lines[seq2(1, start - 1L)], new_lines, lines[seq2(end + 1L, length(lines))] ) write_utf8(path, lines) TRUE } block_replace <- function(desc, value, path, block_start = "# <<<", block_end = "# >>>") { if (!is.null(path) && file_exists(path)) { lines <- read_utf8(path) block_lines <- block_find(lines, block_start, block_end) } else { block_lines <- NULL } if (is.null(block_lines)) { ui_todo("Copy and paste the following lines into {ui_value(path)}:") ui_code_block(c(block_start, value, block_end)) return(invisible(FALSE)) } start <- block_lines[[1]] end <- block_lines[[2]] block <- lines[seq2(start, end)] if (identical(value, block)) { return(invisible(FALSE)) } ui_done("Replacing {desc} in {ui_path(path)}") lines <- c( lines[seq2(1, start - 1L)], value, lines[seq2(end + 1L, length(lines))] ) write_utf8(path, lines) } block_show <- function(path, block_start = "# <<<", block_end = "# >>>") { lines <- read_utf8(path) block <- block_find(lines, block_start, block_end) lines[seq2(block[[1]], block[[2]])] } block_find <- function(lines, block_start = "# <<<", block_end = "# >>>") { # No file if (is.null(lines)) { return(NULL) } start <- which(lines == block_start) end <- which(lines == block_end) # No block if (length(start) == 0 && length(end) == 0) { return(NULL) } if (!(length(start) == 1 && length(end) == 1 && start < end)) { ui_stop( "Invalid block specification. Must start with {ui_code(block_start)} and end with {ui_code(block_end)}" ) } c(start + 1L, end - 1L) } block_create <- function(lines = character(), block_start = "# <<<", block_end = "# >>>") { c(block_start, unique(lines), block_end) } usethis/R/spelling.R0000644000175000017500000000231614132400710014223 0ustar nileshnilesh#' Use spell check #' #' Adds a unit test to automatically run a spell check on documentation and, #' optionally, vignettes during `R CMD check`, using the #' [spelling][spelling::spell_check_package] package. Also adds a `WORDLIST` #' file to the package, which is a dictionary of whitelisted words. See #' [spelling::wordlist] for details. #' #' @param vignettes Logical, `TRUE` to spell check all `rmd` and `rnw` files in #' the `vignettes/` folder. #' @param lang Preferred spelling language. Usually either `"en-US"` or #' `"en-GB"`. #' @param error Logical, indicating whether the unit test should fail if #' spelling errors are found. Defaults to `FALSE`, which does not error, but #' prints potential spelling errors #' @export use_spell_check <- function(vignettes = TRUE, lang = "en-US", error = FALSE) { check_is_package("use_spell_check()") check_installed("spelling") use_dependency("spelling", "Suggests") use_description_field("Language", lang, overwrite = TRUE) spelling::spell_check_setup( pkg = proj_get(), vignettes = vignettes, lang = lang, error = error ) ui_todo("Run {ui_code('devtools::check()')} to trigger spell check") } usethis/R/revdep.R0000644000175000017500000000162714117743363013720 0ustar nileshnilesh#' Reverse dependency checks #' #' Performs set up for checking the reverse dependencies of an R package, as #' implemented by the revdepcheck package: #' * Adds `revdep` directory and adds it to `.Rbuildignore` #' * Populates `revdep/.gitignore` to prevent tracking of various revdep #' artefacts #' * Creates `revdep/email.yml` for use with `revdepcheck::revdep_email()` #' * Prompts user to run the checks with `revdepcheck::revdep_check()` #' #' @export use_revdep <- function() { check_is_package("use_revdep()") use_directory("revdep", ignore = TRUE) use_git_ignore( directory = "revdep", c( "checks", "library", "checks.noindex", "library.noindex", "cloud.noindex", "data.sqlite", "*.html" ) ) new <- use_template( "revdep-email.yml", "revdep/email.yml" ) ui_todo("Run checks with {ui_code('revdepcheck::revdep_check(num_workers = 4)')}") invisible(new) } usethis/R/ci.R0000644000175000017500000001446614132400710013012 0ustar nileshnilesh#' Continuous integration setup and badges #' #' @description #' #' `r lifecycle::badge("deprecated")` #' #' Some of these functions are now soft-deprecated since the tidyverse team has #' started using [GitHub Actions (GHA)](https://github.com/features/actions) for #' continuous integration (CI). See [use_github_actions()] for help configuring #' GHA. GHA functionality in usethis is actively maintained and exercised, which #' is no longer true for Travis-CI or AppVeyor. #' #' Sets up third-party continuous integration (CI) services for an R package #' that is developed on GitHub or, perhaps, GitLab. These functions #' * Add service-specific configuration files and add them to `.Rbuildignore`. #' * Activate a service or give the user a detailed prompt. #' * Provide the markdown to insert a badge into README. #' #' @name ci #' @aliases NULL NULL #' @section `use_travis()`: #' Adds a basic `.travis.yml` to the top-level directory of a package. This is a #' configuration file for the [Travis CI](https://www.travis-ci.com/) continuous #' integration service. #' @param browse Open a browser window to enable automatic builds for the #' package. #' @param ext Which travis website to use. Defaults to `"com"` for #' https://www.travis-ci.com/. Change to `"org"` for https://travis-ci.org. #' @export #' @rdname ci use_travis <- function(browse = rlang::is_interactive(), ext = c("com", "org")) { lifecycle::deprecate_soft( when = "2.0.0", what = "usethis::use_travis()", with = "use_github_actions()" ) repo_spec <- target_repo_spec() ext <- arg_match(ext) new <- use_template( "travis.yml", ".travis.yml", ignore = TRUE ) if (!new) { return(invisible(FALSE)) } use_travis_badge(ext = ext, repo_spec = repo_spec) travis_activate(repo_spec, browse = browse, ext = ext) invisible(TRUE) } #' @section `use_travis_badge()`: #' Only adds the Travis CI badge. Use for a project where Travis is already #' configured. #' @eval param_repo_spec() #' @export #' @rdname ci use_travis_badge <- function(ext = c("com", "org"), repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() ext <- arg_match(ext) url <- glue("https://travis-ci.{ext}/{repo_spec}") img <- glue("{url}.svg?branch={git_default_branch()}") use_badge("Travis build status", url, img) } travis_activate <- function(repo_spec, browse = is_interactive(), ext = c("com", "org")) { ext <- arg_match(ext) url <- glue("https://travis-ci.{ext}/profile/{repo_spec}") ui_todo("Turn on travis for the repo at {url}") if (browse) { utils::browseURL(url) } } uses_travis <- function() { file_exists(proj_path(".travis.yml")) } #' @section `use_appveyor()`: #' Adds a basic `appveyor.yml` to the top-level directory of a package. This is #' a configuration file for the [AppVeyor](https://www.appveyor.com) continuous #' integration service for Windows. #' @export #' @rdname ci use_appveyor <- function(browse = rlang::is_interactive()) { lifecycle::deprecate_soft( when = "2.0.0", what = "usethis::use_appveyor()", with = "use_github_actions()" ) repo_spec <- target_repo_spec() new <- use_template("appveyor.yml", ignore = TRUE) if (!new) { return(invisible(FALSE)) } use_appveyor_badge(repo_spec) appveyor_activate(browse) invisible(TRUE) } appveyor_activate <- function(browse = is_interactive()) { url <- "https://ci.appveyor.com/projects/new" ui_todo("Turn on AppVeyor for this repo at {url}") if (browse) { utils::browseURL(url) } } #' @section `use_appveyor_badge()`: #' Only adds the [AppVeyor](https://www.appveyor.com) badge. Use for a project #' where AppVeyor is already configured. #' @export #' @rdname ci use_appveyor_badge <- function(repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() img <- glue( "https://ci.appveyor.com/api/projects/status/github/", "{repo_spec}?branch={git_default_branch()}&svg=true" ) url <- glue("https://ci.appveyor.com/project/{repo_spec}") use_badge("AppVeyor build status", url, img) } #' @section `use_gitlab_ci()`: #' Adds a basic `.gitlab-ci.yml` to the top-level directory of a package. This #' is a configuration file for the [GitLab #' CI/CD](https://docs.gitlab.com/ee/ci/) continuous integration service. #' @export #' @rdname ci use_gitlab_ci <- function() { check_uses_git() new <- use_template( "gitlab-ci.yml", ".gitlab-ci.yml", ignore = TRUE ) if (!new) { return(invisible(FALSE)) } invisible(TRUE) } #' @section `use_circleci()`: #' Adds a basic `.circleci/config.yml` to the top-level directory of a package. #' This is a configuration file for the [CircleCI](https://circleci.com/) #' continuous integration service. #' @param image The Docker image to use for build. Must be available on #' [DockerHub](https://hub.docker.com). The #' [rocker/verse](https://hub.docker.com/r/rocker/verse) image includes #' TeXLive, pandoc, and the tidyverse packages. For a minimal image, try #' [rocker/r-ver](https://hub.docker.com/r/rocker/r-ver). To specify a version #' of R, change the tag from `latest` to the version you want, e.g. #' `rocker/r-ver:3.5.3`. #' @export #' @rdname ci use_circleci <- function(browse = rlang::is_interactive(), image = "rocker/verse:latest") { repo_spec <- target_repo_spec() use_directory(".circleci", ignore = TRUE) new <- use_template( "circleci-config.yml", ".circleci/config.yml", data = list(package = project_name(), image = image), ignore = TRUE ) if (!new) { return(invisible(FALSE)) } use_circleci_badge(repo_spec) circleci_activate(spec_owner(repo_spec), browse) invisible(TRUE) } #' @section `use_circleci_badge()`: #' Only adds the [Circle CI](https://circleci.com/) badge. Use for a project #' where Circle CI is already configured. #' @rdname ci #' @export use_circleci_badge <- function(repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() url <- glue("https://circleci.com/gh/{repo_spec}") img <- glue("{url}.svg?style=svg") use_badge("CircleCI build status", url, img) } circleci_activate <- function(owner, browse = is_interactive()) { url <- glue("https://circleci.com/add-projects/gh/{owner}") ui_todo("Turn on CircleCI for your repo at {url}") if (browse) { utils::browseURL(url) } } usethis/R/directory.R0000644000175000017500000000265013737204645014436 0ustar nileshnilesh#' Use a directory #' #' `use_directory()` creates a directory (if it does not already exist) in the #' project's top-level directory. This function powers many of the other `use_` #' functions such as [use_data()] and [use_vignette()]. #' #' @param path Path of the directory to create, relative to the project. #' @inheritParams use_template #' #' @export #' @examples #' \dontrun{ #' use_directory("inst") #' } use_directory <- function(path, ignore = FALSE) { create_directory(proj_path(path)) if (ignore) { use_build_ignore(path) } invisible(TRUE) } create_directory <- function(path) { if (dir_exists(path)) { return(invisible(FALSE)) } else if (file_exists(path)) { ui_stop("{ui_path(path)} exists but is not a directory.") } dir_create(path, recurse = TRUE) ui_done("Creating {ui_path(path)}") invisible(TRUE) } check_path_is_directory <- function(path) { if (!file_exists(path)) { ui_stop("Directory {ui_path(path)} does not exist.") } if (is_link(path)) { path <- link_path(path) } if (!is_dir(path)) { ui_stop("{ui_path(path)} is not a directory.") } } count_directory_files <- function(x) { length(dir_ls(x)) } directory_has_files <- function(x) { count_directory_files(x) >= 1 } check_directory_is_empty <- function(x) { if (directory_has_files(x)) { ui_stop("{ui_path(x)} exists and is not an empty directory.") } invisible(x) } usethis/R/helpers.R0000644000175000017500000000620514132400710014051 0ustar nileshnileshuse_dependency <- function(package, type, min_version = NULL) { stopifnot(is_string(package)) stopifnot(is_string(type)) if (package != "R") { check_installed(package) } if (isTRUE(min_version)) { min_version <- utils::packageVersion(package) } version <- if (is.null(min_version)) "*" else paste0(">= ", min_version) types <- c("Depends", "Imports", "Suggests", "Enhances", "LinkingTo") names(types) <- tolower(types) type <- types[[match.arg(tolower(type), names(types))]] deps <- desc::desc_get_deps(proj_get()) existing_dep <- deps$package == package existing_type <- deps$type[existing_dep] existing_ver <- deps$version[existing_dep] is_linking_to <- (existing_type != "LinkingTo" & type == "LinkingTo") | (existing_type == "LinkingTo" & type != "LinkingTo") # No existing dependency, so can simply add if (!any(existing_dep) || any(is_linking_to)) { ui_done("Adding {ui_value(package)} to {ui_field(type)} field in DESCRIPTION") desc::desc_set_dep(package, type, version = version, file = proj_get()) return(invisible(TRUE)) } existing_type <- setdiff(existing_type, "LinkingTo") delta <- sign(match(existing_type, types) - match(type, types)) if (delta < 0) { # don't downgrade ui_warn( "Package {ui_value(package)} is already listed in \\ {ui_value(existing_type)} in DESCRIPTION, no change made." ) return(invisible(FALSE)) } else if (delta == 0 && !is.null(min_version)) { # change version upgrade <- existing_ver == "*" || numeric_version(min_version) > version_spec(existing_ver) if (upgrade) { ui_done( "Increasing {ui_value(package)} version to {ui_value(version)} in DESCRIPTION" ) desc::desc_set_dep(package, type, version = version, file = proj_get()) } } else if (delta > 0) { # upgrade if (existing_type != "LinkingTo") { ui_done( " Moving {ui_value(package)} from {ui_field(existing_type)} to {ui_field(type)} \\ field in DESCRIPTION " ) desc::desc_del_dep(package, existing_type, file = proj_get()) desc::desc_set_dep(package, type, version = version, file = proj_get()) } } invisible(TRUE) } use_system_requirement <- function(requirement) { stopifnot(is_string(requirement)) existing_requirements <- desc::desc_get_field("SystemRequirements", default = character(), file = proj_get()) existing_requirements <- utils::head(strsplit(existing_requirements, ", ?"), n = 1) if (requirement %in% existing_requirements) { return(invisible()) } new_requirements <- paste0(c(existing_requirements, requirement), collapse = ", ") ui_done( "Adding {ui_value(requirement)} to {ui_field('SystemRequirements')} field in DESCRIPTION" ) desc::desc_set("SystemRequirements", new_requirements) invisible() } version_spec <- function(x) { x <- gsub("(<=|<|>=|>|==)\\s*", "", x) numeric_version(x) } view_url <- function(..., open = is_interactive()) { url <- paste(..., sep = "/") if (open) { ui_done("Opening URL {ui_value(url)}") utils::browseURL(url) } else { ui_todo("Open URL {ui_value(url)}") } invisible(url) } usethis/R/utils.R0000644000175000017500000000515314132400710013550 0ustar nileshnileshcan_overwrite <- function(path) { if (!file_exists(path)) { return(TRUE) } if (getOption("usethis.overwrite", FALSE)) { # don't activate a project # don't assume `path` is in the active project if (is_in_proj(path) && uses_git()) { # path is in active project return(TRUE) } if (possibly_in_proj(path) && # path is some other project with_project(proj_find(path), uses_git(), quiet = TRUE)) { return(TRUE) } } if (is_interactive()) { ui_yeah("Overwrite pre-existing file {ui_path(path)}?") } else { FALSE } } check_is_named_list <- function(x, nm = deparse(substitute(x))) { if (!is_list(x)) { bad_class <- paste(class(x), collapse = "/") ui_stop("{ui_code(nm)} must be a list, not {ui_value(bad_class)}.") } if (!is_dictionaryish(x)) { ui_stop( "Names of {ui_code(nm)} must be non-missing, non-empty, and non-duplicated." ) } x } dots <- function(...) { eval(substitute(alist(...))) } asciify <- function(x) { stopifnot(is.character(x)) gsub("[^a-zA-Z0-9_-]+", "-", x) } slug <- function(x, ext) { x_base <- path_ext_remove(x) x_ext <- path_ext(x) ext <- if (identical(tolower(x_ext), tolower(ext))) x_ext else ext as.character(path_ext_set(x_base, ext)) } compact <- function(x) { is_empty <- vapply(x, function(x) length(x) == 0, logical(1)) x[!is_empty] } # Needed for mocking is_installed <- function(pkg) { rlang::is_installed(pkg) } check_installed <- function(pkg) { rlang::check_installed(pkg) } isFALSE <- function(x) { identical(x, FALSE) } isNA <- function(x) { length(x) == 1 && is.na(x) } path_first_existing <- function(paths) { # manual loop with explicit use of `[[` to retain "fs" class for (i in seq_along(paths)) { path <- paths[[i]] if (file_exists(path)) { return(path) } } NULL } is_online <- function(host) { bare_host <- sub("^https?://(.*)$", "\\1", host) !is.null(curl::nslookup(bare_host, error = FALSE)) } year <- function() format(Sys.Date(), "%Y") pluck_lgl <- function(.x, ...) { as_logical(purrr::pluck(.x, ..., .default = NA)) } pluck_chr <- function(.x, ...) { as_character(purrr::pluck(.x, ..., .default = NA)) } pluck_int <- function(.x, ...) { as_integer(purrr::pluck(.x, ..., .default = NA)) } is_windows <- function() { .Platform$OS.type == "windows" } check_string <- function(x, nm = deparse(substitute(x))) { if (!is_string(x)) { ui_stop("{ui_code(nm)} must be a string.") } x } maybe_string <- function(x, nm = deparse(substitute(x))) { if (is.null(x)) { x } else { check_string(x, nm = nm) } } usethis/R/tutorial.R0000644000175000017500000000277314117743363014301 0ustar nileshnilesh#' Create a learnr tutorial #' #' Creates a new tutorial below `inst/tutorials/`. Tutorials are interactive R #' Markdown documents built with the [`learnr` #' package](https://rstudio.github.io/learnr/index.html). `use_tutorial()` does #' this setup: #' * Adds learnr to Suggests in `DESCRIPTION`. #' * Gitignores `inst/tutorials/*.html` so you don't accidentally track #' rendered tutorials. #' * Creates a new `.Rmd` tutorial from a template and, optionally, opens it #' for editing. #' * Adds new `.Rmd` to `.Rbuildignore`. #' #' @param name Base for file name to use for new `.Rmd` tutorial. Should consist #' only of numbers, letters, `_` and `-`. We recommend using lower case. #' @param title The human-facing title of the tutorial. #' @inheritParams use_template #' @seealso The [learnr package #' documentation](https://rstudio.github.io/learnr/index.html). #' @export #' @examples #' \dontrun{ #' use_tutorial("learn-to-do-stuff", "Learn to do stuff") #' } use_tutorial <- function(name, title, open = rlang::is_interactive()) { stopifnot(is_string(name)) stopifnot(is_string(title)) dir_path <- path("inst", "tutorials", name) dir_create(dir_path) use_directory(dir_path) use_git_ignore("*.html", directory = dir_path) use_dependency("learnr", "Suggests") path <- path(dir_path, asciify(name), ext = "Rmd") new <- use_template( "tutorial-template.Rmd", save_as = path, data = list(tutorial_title = title), ignore = FALSE, open = open ) invisible(new) } usethis/R/utils-github.R0000644000175000017500000006600414153502006015036 0ustar nileshnilesh# OWNER/REPO --> OWNER, REPO parse_repo_spec <- function(repo_spec) { repo_split <- strsplit(repo_spec, "/")[[1]] if (length(repo_split) != 2) { ui_stop("{ui_code('repo_spec')} must be of form {ui_value('owner/repo')}.") } list(owner = repo_split[[1]], repo = repo_split[[2]]) } spec_owner <- function(repo_spec) parse_repo_spec(repo_spec)$owner spec_repo <- function(repo_spec) parse_repo_spec(repo_spec)$repo # OWNER, REPO --> OWNER/REPO make_spec <- function(owner = NA, repo = NA) { no_spec <- is.na(owner) | is.na(repo) as.character(ifelse(no_spec, NA, glue("{owner}/{repo}"))) } # named vector or list of GitHub URLs --> data frame of URL parts # more general than the name suggests # definitely designed for GitHub URLs but not overtly GitHub-specific # https://stackoverflow.com/questions/2514859/regular-expression-for-git-repository # https://git-scm.com/docs/git-clone#_git_urls # https://stackoverflow.com/questions/27745/getting-parts-of-a-url-regex github_remote_regex <- paste0( "^", "(?\\w+://)?", "(?.+@)?", "(?[^/:]+)", "[/:]", "(?[^/]+)", "/", "(?[^/#]+)", "(?.*)", "$" ) parse_github_remotes <- function(x) { # https://github.com/r-lib/usethis # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis.git # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis#readme # --> https, github.com, rlib, usethis # https://github.com/r-lib/usethis/issues/1169 # --> https, github.com, rlib, usethis # https://github.acme.com/r-lib/devtools.git # --> https, github.acme.com, rlib, usethis # git@github.com:r-lib/usethis.git # --> ssh, github.com, rlib, usethis # ssh://git@github.com/rstudio/packrat.git # --> ssh, github.com, rlib, usethis dat <- re_match(x, github_remote_regex) dat$protocol <- sub("://$", "", dat$protocol) dat$user <- sub("@$", "", dat$user) dat$repo_name <- sub("[.]git$", "", dat$repo_name) dat$url <- dat$.text # as.character() necessary for edge case of length-0 input dat$protocol <- as.character(ifelse(dat$protocol == "https", "https", "ssh")) dat$name <- if (rlang::is_named(x)) { names(x) } else { rep_len(NA_character_, length.out = nrow(dat)) } dat[c("name", "url", "host", "repo_owner", "repo_name", "protocol")] } parse_repo_url <- function(x) { stopifnot(is_string(x)) dat <- re_match(x, github_remote_regex) if (is.na(dat$.match)) { list(repo_spec = x, host = NULL) } else { dat <- parse_github_remotes(x) # TODO: generalize here for GHE hosts that don't include 'github' if (!grepl("github", dat$host)) { ui_stop("URL doesn't seem to be associated with GitHub: {ui_value(x)}") } list( repo_spec = make_spec(owner = dat$repo_owner, repo = dat$repo_name), host = glue("https://{dat$host}") ) } } github_url_from_git_remotes <- function() { tr <- tryCatch(target_repo(github_get = NA), error = function(e) NULL) if (is.null(tr)) { return() } parsed <- parse_github_remotes(tr$url) glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}") } #' Gather LOCAL data on GitHub-associated remotes #' #' Creates a data frame where each row represents a GitHub-associated remote. #' The data frame is initialized via `gert::git_remote_list()`, possibly #' filtered for specific remote names. The remote URLs are parsed into parts, #' like `host` and `repo_owner`. This is filtered again for rows where the #' `host` appears to be a GitHub deployment (currently a crude search for #' "github"). Some of these parts are recombined or embellished to get new #' columns (`host_url`, `api_url`, `repo_spec`). All operations are entirely #' mechanical and local. #' #' @param these Intersect the list of remotes with `these` remote names. To keep #' all remotes, use `these = NULL` or `these = character()`. #' @param x Data frame with character columns `name` and `url`. Exposed as an #' argument for internal reasons. It's so we can call the functions that #' marshal info about GitHub remotes with 0-row input to obtain a properly #' typed template without needing a Git repo or calling GitHub. We just want #' to get a data frame with zero rows, but with the column names and types #' implicit in our logic. #' @keywords internal #' @noRd github_remote_list <- function(these = c("origin", "upstream"), x = NULL) { x <- x %||% gert::git_remote_list(repo = git_repo()) stopifnot(is.null(these) || is.character(these)) stopifnot(is.data.frame(x), is.character(x$name), is.character(x$url)) if (length(these) > 0) { x <- x[x$name %in% these, ] } parsed <- parse_github_remotes(set_names(x$url, x$name)) # TODO: generalize here for GHE hosts that don't include 'github' is_github <- grepl("github", parsed$host) parsed <- parsed[is_github, ] parsed$remote <- parsed$name parsed$host_url <- glue_chr("https://{parsed$host}") parsed$api_url <- map_chr(parsed$host_url, get_apiurl) parsed$repo_spec <- make_spec(parsed$repo_owner, parsed$repo_name) parsed[c( "remote", "url", "host_url", "api_url", "host", "protocol", "repo_owner", "repo_name", "repo_spec" )] } #' Gather LOCAL and (maybe) REMOTE data on GitHub-associated remotes #' #' Creates a data frame where each row represents a GitHub-associated remote, #' starting with the output of `github_remote_list()` (local data). This #' function's job is to (maybe) add information we can only get from the GitHub #' API. If `github_get = FALSE`, we don't even attempt to call the API. #' Otherwise, we try and will succeed if gh discovers a suitable token. The #' resulting data, even if the API data is absent, is massaged into a data #' frame. #' #' @inheritParams github_remote_list #' @param github_get Whether to attempt to get repo info from the GitHub API. We #' try for `NA` (the default) and `TRUE`. If we aren't successful, we proceed #' anyway for `NA` but error for `TRUE`. When `FALSE`, no attempt is made to #' call the API. #' @keywords internal #' @noRd github_remotes <- function(these = c("origin", "upstream"), github_get = NA, x = NULL) { grl <- github_remote_list(these = these, x = x) get_gh_repo <- function(repo_owner, repo_name, api_url = "https://api.github.com") { if (isFALSE(github_get)) { f <- function(...) list() } else { f <- purrr::possibly(gh::gh, otherwise = list()) } f( "GET /repos/{owner}/{repo}", owner = repo_owner, repo = repo_name, .api_url = api_url ) } repo_info <- purrr::pmap( grl[c("repo_owner", "repo_name", "api_url")], get_gh_repo ) # NOTE: these can be two separate matters: # 1. Did we call the GitHub API? Means we know `is_fork` and the parent repo. # 2. If so, did we call it with auth? Means we know if we can push. grl$github_got <- map_lgl(repo_info, ~ length(.x) > 0) if (isTRUE(github_get) && any(!grl$github_got)) { oops <- which(!grl$github_got) oops_remotes <- grl$remote[oops] oops_hosts <- unique(grl$host[oops]) ui_stop(" Unable to get GitHub info for these remotes: {ui_value(oops_remotes)} Are we offline? Is GitHub down? Has the repo been deleted? Otherwise, you probably need to configure a personal access token (PAT) \\ for {ui_value(oops_hosts)} See {ui_code('?gh_token_help')} for advice") } grl$default_branch <- map_chr(repo_info, "default_branch", .default = NA) grl$is_fork <- map_lgl(repo_info, "fork", .default = NA) # `permissions` is an example of data that is not present if the request # did not include a PAT grl$can_push <- map_lgl(repo_info, c("permissions", "push"), .default = NA) grl$can_admin <- map_lgl(repo_info, c("permissions", "admin"), .default = NA) grl$perm_known <- !is.na(grl$can_push) grl$parent_repo_owner <- map_chr(repo_info, c("parent", "owner", "login"), .default = NA) grl$parent_repo_name <- map_chr(repo_info, c("parent", "name"), .default = NA) grl$parent_repo_spec <- make_spec(grl$parent_repo_owner, grl$parent_repo_name) parent_info <- purrr::pmap( set_names( grl[c("parent_repo_owner", "parent_repo_name", "api_url")], ~ sub("parent_", "", .x) ), get_gh_repo ) grl$can_push_to_parent <- map_lgl(parent_info, c("permissions", "push"), .default = NA) grl } #' Classify the GitHub remote configuration #' #' @description #' Classify the active project's GitHub remote situation, so diagnostic and #' other downstream functions can decide whether to proceed / abort / complain & #' offer to fix. #' We only consider the remotes where: #' * Name is `origin` or `upstream` and the remote URL "looks like github" #' (github.com or a GHE deployment) #' #' We have to call the GitHub API to fully characterize the GitHub remote #' situation. That's the only way to learn if the user can push to a remote, #' whether a remote is a fork, and which repo is the parent of a fork. #' `github_get` controls whether we make these API calls. #' #' Some functions can get by with the information that's available locally, i.e. #' we can use simple logic to decide whether to target `origin` or `upstream` or #' present the user with a choice. We can set `github_get = FALSE` in this case. #' Other functions, like the `pr_*()` functions, are more demanding and we'll #' always determine the config with `github_get = TRUE`. #' #' Most usethis functions should call the higher-level functions `target_repo()` #' or `target_repo_spec()`. #' #' Only functions that really need full access to the GitHub remote config #' should call this directly. Ways to work with a config: #' * `cfg <- github_remote_config(github_get = )` #' * `check_for_bad_config(cfg)` errors for obviously bad configs (by default) #' or you can specify the configs considered to be bad #' * Emit a custom message then call `stop_bad_github_remote_config()` directly #' * If the config is suboptimal-but-supported, use #' `ui_github_remote_config_wat()` to educate the user and give them a chance #' to back out. #' #' Fields in an instance of `github_remote_config`: #' * `type`: explained below #' * `pr_ready`: Logical. Do the `pr_*()` functions support it? #' * `desc`: A description used in messages and menus. #' * `origin`: Information about the `origin` GitHub remote. #' * `upstream`: Information about the `upstream` GitHub remote. #' #' Possible GitHub remote configurations, the common cases: #' * no_github: No `origin`, no `upstream`. #' * ours: `origin` exists, is not a fork, and we can push to it. Owner of #' `origin` could be current user, another user, or an org. No `upstream`. #' - Less common variant: `upstream` exists, `origin` does not, and we can #' push to `upstream`. The fork-ness of `upstream` is not consulted. #' * fork: `origin` exists and we can push to it. `origin` is a fork of the repo #' configured as `upstream`. We may or may not be able to push to `upstream`. #' * theirs: Exactly one of `origin` and `upstream` exist and we can't push to #' it. The fork-ness of this remote repo is not consulted. #' #' Possible GitHub remote configurations, the peculiar ones: #' * fork_upstream_is_not_origin_parent: `origin` exists, it's a fork, but its #' parent repo is not configured as `upstream`. Either there's no `upstream` #' or `upstream` exists but it's not the parent of `origin`. #' * fork_cannot_push_origin: `origin` is a fork and its parent is configured #' as `upstream`. But we can't push to `origin`. #' * upstream_but_origin_is_not_fork: `origin` and `upstream` both exist, but #' `origin` is not a fork of anything and, specifically, it's not a fork of #' `upstream`. #' #' Remote configuration "guesses" we apply when `github_get = FALSE` or when #' we make unauthorized requests (no PAT found) and therefore have no info on #' permissions #' * maybe_ours_or_theirs: Exactly one of `origin` and `upstream` exists. #' * maybe_fork: Both `origin` and `upstream` exist. #' #' @inheritParams github_remotes #' @keywords internal #' @noRd new_github_remote_config <- function() { ptype <- github_remotes( x = data.frame(name = character(), url = character(), stringsAsFactors = FALSE) ) # 0-row df --> a well-named list of properly typed NAs ptype <- map(ptype, ~ c(NA, .x)) structure( list( type = NA_character_, host_url = NA_character_, pr_ready = FALSE, desc = "Unexpected remote configuration.", origin = c(name = "origin", is_configured = FALSE, ptype), upstream = c(name = "upstream", is_configured = FALSE, ptype) ), class = "github_remote_config" ) } github_remote_config <- function(github_get = NA) { cfg <- new_github_remote_config() grl <- github_remotes(github_get = github_get) if (nrow(grl) == 0) { return(cfg_no_github(cfg)) } cfg$origin$is_configured <- "origin" %in% grl$remote cfg$upstream$is_configured <- "upstream" %in% grl$remote single_remote <- xor(cfg$origin$is_configured, cfg$upstream$is_configured) if (!single_remote) { if (length(unique(grl$host)) != 1) { ui_stop(" Internal error: Multiple GitHub hosts {ui_value(grl$host)}") } if (length(unique(grl$github_got)) != 1) { ui_stop(" Internal error: Got GitHub API info for some remotes, but not all Do all the remotes still exist? Do you still have access?") } if (length(unique(grl$perm_known)) != 1) { ui_stop(" Internal error: Know GitHub permissions for some remotes, but not all") } } cfg$host_url <- unique(grl$host_url) github_got <- any(grl$github_got) perm_known <- any(grl$perm_known) if (cfg$origin$is_configured) { cfg$origin <- utils::modifyList(cfg$origin, grl[grl$remote == "origin",]) } if (cfg$upstream$is_configured) { cfg$upstream <- utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",]) } if (github_got && !single_remote) { cfg$origin$parent_is_upstream <- identical(cfg$origin$parent_repo_spec, cfg$upstream$repo_spec) } if (!github_got || !perm_known) { if (single_remote) { return(cfg_maybe_ours_or_theirs(cfg)) } else { return(cfg_maybe_fork(cfg)) } } # `github_got` must be TRUE # `perm_known` must be TRUE # origin only if (single_remote && cfg$origin$is_configured) { if (cfg$origin$is_fork) { if (cfg$origin$can_push) { return(cfg_fork_upstream_is_not_origin_parent(cfg)) } else { return(cfg_theirs(cfg)) } } else { if (cfg$origin$can_push) { return(cfg_ours(cfg)) } else { return(cfg_theirs(cfg)) } } } # upstream only if (single_remote && cfg$upstream$is_configured) { if (cfg$upstream$can_push) { return(cfg_ours(cfg)) } else { return(cfg_theirs(cfg)) } } # origin and upstream if (cfg$origin$is_fork) { if (cfg$origin$parent_is_upstream) { if (cfg$origin$can_push) { return(cfg_fork(cfg)) } else { return(cfg_fork_cannot_push_origin(cfg)) } } else { return(cfg_fork_upstream_is_not_origin_parent(cfg)) } } else { return(cfg_upstream_but_origin_is_not_fork(cfg)) } } #' Select a target (GitHub) repo #' #' @description #' Returns information about ONE GitHub repository. Used when we need to #' designate which repo we will, e.g., open an issue on or activate a CI service #' for. This information might be used in a GitHub API request or to form URLs. #' #' Examples: #' * Badge URLs #' * URLs where you can activate a CI service #' * URLs for DESCRIPTION fields such as URL and BugReports #' `target_repo()` passes `github_get` along to `github_remote_config()`. If #' `github_get = TRUE`, `target_repo()` will error for configs other than #' `"ours"` or `"fork"`. `target_repo()` always errors for bad configs. If #' `github_get = NA` or `FALSE`, the "maybe" configs are tolerated. #' #' `target_repo_spec()` is a less capable function for when you just need an #' `OWNER/REPO` spec. Currently, it does not set or offer control over #' `github_get`, although I've considered explicitly setting `github_get = #' FALSE` or adding this argument, defaulting to `FALSE`. #' #' @inheritParams github_remotes #' @param cfg An optional GitHub remote configuration. Used to get the target #' repo when the function had some need for the full config. #' @param role We use "source" to mean the principal repo where a project's #' development happens. We use "primary" to mean the principal repo this #' particular user interacts with or has the greatest power over. They can be #' the same or different. Examples: #' * For a personal project you own, "source" and "primary" are the same. #' Presumably the `origin` remote. #' * For a collaboratively developed project, an outside contributor must create #' a fork in order to make a PR. For such a person, their fork is "primary" #' (presumably `origin`) and the original repo that they forked is "source" #' (presumably `upstream`). #' This is *almost* consistent with terminology used by the GitHub API. A fork #' has a "source repo" and a "parent repo", which are usually the same. They #' only differ when working with a fork of a repo that is itself a fork. In this #' rare case, the parent is the immediate fork parent and the source is the #' ur-parent, i.e. the root of this particular tree. The source repo is not a #' fork. #' @param ask In some configurations, if `ask = TRUE` and we're in an #' interactive session, user gets a choice between `origin` and `upstream`. #' @keywords internal #' @noRd target_repo <- function(cfg = NULL, github_get = NA, role = c("source", "primary"), ask = is_interactive()) { cfg <- cfg %||% github_remote_config(github_get = github_get) stopifnot(inherits(cfg, "github_remote_config")) role <- match.arg(role) check_for_bad_config(cfg) if (isTRUE(github_get)) { check_for_config(cfg) } # upstream only if (cfg$upstream$is_configured && !cfg$origin$is_configured) { return(cfg$upstream) } # origin only if (cfg$origin$is_configured && !cfg$upstream$is_configured) { return(cfg$origin) } if (!ask || !is_interactive()) { return(switch( role, source = cfg$upstream, primary = cfg$origin )) } choices <- c( origin = glue("{cfg$origin$repo_spec} = {ui_value('origin')}"), upstream = glue("{cfg$upstream$repo_spec} = {ui_value('upstream')}") ) title <- glue("Which repo should we target?") choice <- utils::menu(choices, graphics = FALSE, title = title) cfg[[names(choices)[choice]]] } target_repo_spec <- function(role = c("source", "primary"), ask = is_interactive()) { tr <- target_repo(role = match.arg(role), ask = ask) tr$repo_spec } # formatting github remote configurations for humans --------------------------- format_remote <- function(remote) { effective_spec <- function(remote) { if (remote$is_configured) { ui_value(remote$repo_spec) } else { ui_unset("not configured") } } push_clause <- function(remote) { if (!remote$is_configured || is.na(remote$can_push)) { return() } if (remote$can_push) " (can push)" else " (can not push)" } out <- c( glue("{remote$name} = {effective_spec(remote)}"), push_clause(remote), if (isTRUE(remote$is_fork)) { glue(" = fork of {ui_value(remote$parent_repo_spec)}") } ) glue_collapse(out) } format_fields <- function(cfg) { list( type = glue("Type = {ui_value(cfg$type)}"), host_url = glue("Host = {ui_value(cfg$host_url)}"), pr_ready = glue("Config supports a pull request = {ui_value(cfg$pr_ready)}"), origin = format_remote(cfg$origin), upstream = format_remote(cfg$upstream), desc = if (is.na(cfg$desc)) { glue("Desc = {ui_unset('no description')}") } else { glue("Desc = {cfg$desc}") } ) } #' @export format.github_remote_config <- function(x, ...) { glue::as_glue(format_fields(x)) } #' @export print.github_remote_config <- function(x, ...) { cat(format(x, ...), sep = "\n") invisible(x) } # refines output of format_fields() to create input better suited to # ui_github_remote_config_wat() and stop_bad_github_remote_config() github_remote_config_wat <- function(cfg, context = c("menu", "abort")) { context <- match.arg(context) adjective <- switch(context, menu = "Unexpected", abort = "Unsupported") out <- format_fields(cfg) out$pr_ready <- NULL out$type <- glue("{adjective} GitHub remote configuration: {ui_value(cfg$type)}") out$desc <- if (is.na(cfg$desc)) NULL else cfg$desc out } # returns TRUE if user selects "no" --> exit the calling function # return FALSE if user select "yes" --> keep going, they've been warned ui_github_remote_config_wat <- function(cfg) { ui_nope( github_remote_config_wat(cfg, context = "menu"), yes = "Yes, I want to proceed. I know what I'm doing.", no = "No, I want to stop and straighten out my GitHub remotes first.", shuffle = FALSE ) } stop_bad_github_remote_config <- function(cfg) { abort( message = unname(unlist(github_remote_config_wat(cfg, context = "abort"))), class = c("usethis_error_bad_github_remote_config", "usethis_error"), cfg = cfg ) } stop_maybe_github_remote_config <- function(cfg) { msg <- github_remote_config_wat(cfg) msg$type <- glue(" Pull request functions can't work with GitHub remote configuration: \\ {ui_value(cfg$type)} The most likely problem is that we aren't discovering your GitHub \\ personal access token Call {ui_code('gh_token_help()')} for help") abort( message = unname(msg), class = c("usethis_error_invalid_pr_config", "usethis_error"), cfg = cfg ) } check_for_bad_config <- function(cfg, bad_configs = c( "no_github", "fork_upstream_is_not_origin_parent", "fork_cannot_push_origin", "upstream_but_origin_is_not_fork" )) { if (cfg$type %in% bad_configs) { stop_bad_github_remote_config(cfg) } invisible() } check_for_maybe_config <- function(cfg, maybe_configs = c( "maybe_ours_or_theirs", "maybe_fork" )) { if (cfg$type %in% maybe_configs) { stop_maybe_github_remote_config(cfg) } invisible() } check_for_config <- function(cfg = NULL, ok_configs = c("ours", "fork")) { cfg <- cfg %||% github_remote_config(github_get = TRUE) stopifnot(inherits(cfg, "github_remote_config")) if (cfg$type %in% ok_configs) { return(invisible(cfg)) } check_for_bad_config(cfg) check_for_maybe_config(cfg) ui_stop(" Internal error: Unexpected GitHub remote configuration: {ui_value(cfg$type)}") } # github remote configurations ------------------------------------------------- # use for configs read_more <- function() { glue(" Read more about the GitHub remote configurations that usethis supports at: {ui_value('https://happygitwithr.com/common-remote-setups.html')}") } read_more_maybe <- function() { glue(" Read more about what this GitHub remote configurations means at: {ui_value('https://happygitwithr.com/common-remote-setups.html')}") } cfg_no_github <- function(cfg) { utils::modifyList( cfg, list( type = "no_github", pr_ready = FALSE, desc = glue(" Neither {ui_value('origin')} nor {ui_value('upstream')} is a GitHub \\ repo. {read_more()}") ) ) } cfg_ours <- function(cfg) { utils::modifyList( cfg, list( type = "ours", pr_ready = TRUE, desc = glue(" {ui_value('origin')} is both the source and primary repo. {read_more()}") ) ) } cfg_theirs <- function(cfg) { configured <- if (cfg$origin$is_configured) "origin" else "upstream" utils::modifyList( cfg, list( type = "theirs", pr_ready = FALSE, desc = glue(" The only configured GitHub remote is {ui_value(configured)}, which you cannot push to. If your goal is to make a pull request, you must fork-and-clone. {ui_code('usethis::create_from_github()')} can do this. {read_more()}") ) ) } cfg_maybe_ours_or_theirs <- function(cfg) { if (cfg$origin$is_configured) { configured <- "origin" not_configured <- "upstream" } else { configured <- "upstream" not_configured <- "origin" } utils::modifyList( cfg, list( type = "maybe_ours_or_theirs", pr_ready = NA, desc = glue(" {ui_value(configured)} is a GitHub repo and {ui_value(not_configured)} \\ is either not configured or is not a GitHub repo. We may be offline or you may need to configure a GitHub personal access token. {ui_code('gh_token_help()')} can help with that. {read_more_maybe()}") ) ) } cfg_fork <- function(cfg) { utils::modifyList( cfg, list( type = "fork", pr_ready = TRUE, desc = glue(" {ui_value('origin')} is a fork of {ui_value(cfg$upstream$repo_spec)}, \\ which is configured as the {ui_value('upstream')} remote. {read_more()}") ) ) } cfg_maybe_fork <- function(cfg) { utils::modifyList( cfg, list( type = "maybe_fork", pr_ready = NA, desc = glue(" Both {ui_value('origin')} and {ui_value('upstream')} appear to be \\ GitHub repos. However, we can't confirm their relationship to each \\ other (e.g., fork and fork parent) or your permissions (e.g. push \\ access). We may be offline or you may need to configure a GitHub personal access token. {ui_code('gh_token_help()')} can help with that. {read_more_maybe()}") ) ) } cfg_fork_cannot_push_origin <- function(cfg) { utils::modifyList( cfg, list( type = "fork_cannot_push_origin", pr_ready = FALSE, desc = glue(" The {ui_value('origin')} remote is a fork, but you can't push to it. {read_more()}") ) ) } cfg_fork_upstream_is_not_origin_parent <- function(cfg) { utils::modifyList( cfg, list( type = "fork_upstream_is_not_origin_parent", pr_ready = FALSE, desc = glue(" The {ui_value('origin')} GitHub remote is a fork, but its parent is \\ not configured as the {ui_value('upstream')} remote. {read_more()}") ) ) } cfg_upstream_but_origin_is_not_fork <- function(cfg) { utils::modifyList( cfg, list( type = "upstream_but_origin_is_not_fork", pr_ready = FALSE, desc = glue(" Both {ui_value('origin')} and {ui_value('upstream')} are GitHub \\ remotes, but {ui_value('origin')} is not a fork and, in particular, \\ is not a fork of {ui_value('upstream')}. {read_more()}") ) ) } usethis/R/tidyverse.R0000644000175000017500000003071014153502006014427 0ustar nileshnilesh#' Helpers for tidyverse development #' #' These helpers follow tidyverse conventions which are generally a little #' stricter than the defaults, reflecting the need for greater rigor in #' commonly used packages. #' #' @details #' #' * `create_tidy_package()`: creates a new package, immediately applies as many #' of the tidyverse conventions as possible, issues a few reminders, and #' activates the new package. #' #' * `use_tidy_dependencies()`: sets up standard dependencies used by all #' tidyverse packages (except packages that are designed to be dependency free). #' #' * `use_tidy_description()`: puts fields in standard order and alphabetises #' dependencies. #' #' * `use_tidy_eval()`: imports a standard set of helpers to facilitate #' programming with the tidy eval toolkit. #' #' * `use_tidy_style()`: styles source code according to the [tidyverse style #' guide](https://style.tidyverse.org). This function will overwrite files! See #' below for usage advice. #' #' * `use_tidy_contributing()`: adds standard tidyverse contributing guidelines. #' #' * `use_tidy_issue_template()`: adds a standard tidyverse issue template. #' #' * `use_tidy_release_test_env()`: updates the test environment section in #' `cran-comments.md`. #' #' * `use_tidy_support()`: adds a standard description of support resources for #' the tidyverse. #' #' * `use_tidy_coc()`: equivalent to `use_code_of_conduct()`, but puts the #' document in a `.github/` subdirectory. #' #' * `use_tidy_github()`: convenience wrapper that calls #' `use_tidy_contributing()`, `use_tidy_issue_template()`, `use_tidy_support()`, #' `use_tidy_coc()`. #' #' * [use_tidy_github_labels()] calls `use_github_labels()` to implement #' tidyverse conventions around GitHub issue label names and colours. #' #' * `use_tidy_upkeep_issue()` creates an issue containing a checklist of #' actions to bring your package up to current tidyverse standards. #' #' @section `use_tidy_style()`: #' Uses the [styler package](https://styler.r-lib.org) package to style all code #' in a package, project, or directory, according to the [tidyverse style #' guide](https://style.tidyverse.org). #' #' **Warning:** This function will overwrite files! It is strongly suggested to #' only style files that are under version control or to first create a backup #' copy. #' #' Invisibly returns a data frame with one row per file, that indicates whether #' styling caused a change. #' #' @param strict Boolean indicating whether or not a strict version of styling #' should be applied. See [styler::tidyverse_style()] for details. #' #' @name tidyverse NULL #' @export #' @rdname tidyverse #' @inheritParams create_package #' @inheritParams licenses create_tidy_package <- function(path, copyright_holder = NULL) { path <- create_package(path, rstudio = TRUE, open = FALSE) local_project(path) use_testthat() use_mit_license(copyright_holder) use_tidy_description() use_readme_rmd(open = FALSE) use_lifecycle_badge("experimental") use_cran_badge() use_cran_comments(open = FALSE) ui_todo("In the new package, remember to do:") ui_todo("{ui_code('use_git()')}") ui_todo("{ui_code('use_github()')}") ui_todo("{ui_code('use_tidy_github()')}") ui_todo("{ui_code('use_tidy_github_actions()')}") ui_todo("{ui_code('use_tidy_github_labels()')}") ui_todo("{ui_code('use_pkgdown_github_pages()')}") proj_activate(path) } #' @export #' @rdname tidyverse use_tidy_description <- function() { desc <- desc::description$new(file = proj_get()) tidy_desc(desc) desc$write() invisible(TRUE) } #' @export #' @rdname tidyverse use_tidy_dependencies <- function() { check_has_package_doc("use_tidy_dependencies()") use_dependency("rlang", "Imports") use_dependency("lifecycle", "Imports") use_dependency("cli", "Imports") use_dependency("glue", "Imports") use_dependency("withr", "Imports") # standard imports imports <- any( roxygen_ns_append("@import rlang"), roxygen_ns_append("@importFrom glue glue"), roxygen_ns_append("@importFrom lifecycle deprecated") ) if (imports) { roxygen_update_ns() } # add badges; we don't need the details ui_silence(use_lifecycle()) # If needed, copy in lightweight purrr compatibility layer if (!desc::desc(proj_get())$has_dep("purrr")) { use_directory("R") use_github_file( "r-lib/rlang", path = "R/compat-purrr.R", save_as = "R/compat-purrr.R" ) } invisible() } #' @export #' @rdname tidyverse use_tidy_eval <- function() { check_is_package("use_tidy_eval()") use_dependency("roxygen2", "Suggests") use_dependency("rlang", "Imports", min_version = "0.4.11") new <- use_template("tidy-eval.R", "R/utils-tidy-eval.R") ui_todo("Run {ui_code('devtools::document()')}") return(invisible(new)) } #' @export #' @rdname tidyverse use_tidy_contributing <- function() { use_dot_github() data <- list( Package = project_name(), github_spec = target_repo_spec(ask = FALSE) ) use_template( "tidy-contributing.md", path(".github", "CONTRIBUTING.md"), data = data ) } #' @export #' @rdname tidyverse use_tidy_support <- function() { use_dot_github() data <- list( Package = project_name(), github_spec = target_repo_spec(ask = FALSE) ) use_template( "tidy-support.md", path(".github", "SUPPORT.md"), data = data ) } #' @export #' @rdname tidyverse use_tidy_issue_template <- function() { use_dot_github() use_directory(path(".github", "ISSUE_TEMPLATE")) use_template( "tidy-issue.md", path(".github", "ISSUE_TEMPLATE", "issue_template.md") ) } #' @export #' @rdname tidyverse use_tidy_coc <- function() { use_dot_github() use_code_of_conduct("codeofconduct@rstudio.com", path = ".github") } #' @export #' @rdname tidyverse use_tidy_github <- function() { use_dot_github() use_tidy_contributing() use_tidy_issue_template() use_tidy_support() use_tidy_coc() } use_dot_github <- function(ignore = TRUE) { use_directory(".github", ignore = ignore) use_git_ignore("*.html", directory = ".github") } #' @export #' @rdname tidyverse use_tidy_style <- function(strict = TRUE) { check_installed("styler") challenge_uncommitted_changes(msg = " There are uncommitted changes and it is highly recommended to get into a \\ clean Git state before restyling your project's code") if (is_package()) { styled <- styler::style_pkg( proj_get(), style = styler::tidyverse_style, strict = strict ) } else { styled <- styler::style_dir( proj_get(), style = styler::tidyverse_style, strict = strict ) } ui_line() ui_done("Styled project according to the tidyverse style guide") invisible(styled) } #' Identify contributors via GitHub activity #' #' Derives a list of GitHub usernames, based on who has opened issues or pull #' requests. Used to populate the acknowledgment section of package release blog #' posts at . If no arguments are given, we #' retrieve all contributors to the active project since its last (GitHub) #' release. Unexported helper functions, `releases()` and `ref_df()` can be #' useful interactively to get a quick look at release tag names and a data #' frame about refs (defaulting to releases), respectively. #' #' @param repo_spec Optional GitHub repo specification in any form accepted for #' the `repo_spec` argument of [create_from_github()] (plain spec or a browser #' or Git URL). A URL specification is the only way to target a GitHub host #' other than `"github.com"`, which is the default. #' @param from,to GitHub ref (i.e., a SHA, tag, or release) or a timestamp in #' ISO 8601 format, specifying the start or end of the interval of interest, #' in the sense of `[from, to]`. Examples: "08a560d", "v1.3.0", #' "2018-02-24T00:13:45Z", "2018-05-01". When `from = NULL, to = NULL`, we set #' `from` to the timestamp of the most recent (GitHub) release. Otherwise, #' `NULL` means "no bound". #' #' @return A character vector of GitHub usernames, invisibly. #' @export #' #' @examples #' \dontrun{ #' # active project, interval = since the last release #' use_tidy_thanks() #' #' # active project, interval = since a specific datetime #' use_tidy_thanks(from = "2020-07-24T00:13:45Z") #' #' # r-lib/usethis, interval = since a certain date #' use_tidy_thanks("r-lib/usethis", from = "2020-08-01") #' #' # r-lib/usethis, up to a specific release #' use_tidy_thanks("r-lib/usethis", from = NULL, to = "v1.1.0") #' #' # r-lib/usethis, since a specific commit, up to a specific date #' use_tidy_thanks("r-lib/usethis", from = "08a560d", to = "2018-05-14") #' #' # r-lib/usethis, but with copy/paste of a browser URL #' use_tidy_thanks("https://github.com/r-lib/usethis") #' } use_tidy_thanks <- function(repo_spec = NULL, from = NULL, to = NULL) { repo_spec <- repo_spec %||% target_repo_spec() parsed_repo_spec <- parse_repo_url(repo_spec) repo_spec <- parsed_repo_spec$repo_spec # this is the most practical way to propagate `host` to downstream helpers if (!is.null(parsed_repo_spec$host)) { withr::local_envvar(c(GITHUB_API_URL = parsed_repo_spec$host)) } if (is.null(to)) { from <- from %||% releases(repo_spec)[[1]] } from_timestamp <- as_timestamp(repo_spec, x = from) %||% "2008-01-01" to_timestamp <- as_timestamp(repo_spec, x = to) ui_done(" Looking for contributors from {as.Date(from_timestamp)} to \\ {to_timestamp %||% 'now'}") res <- gh::gh( "/repos/{owner}/{repo}/issues", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec), since = from_timestamp, state = "all", filter = "all", .limit = Inf ) if (length(res) < 1) { ui_oops("No matching issues/PRs found") return(invisible()) } creation_time <- function(x) { as.POSIXct(map_chr(x, "created_at")) } res <- res[creation_time(res) >= as.POSIXct(from_timestamp)] if (!is.null(to_timestamp)) { res <- res[creation_time(res) <= as.POSIXct(to_timestamp)] } if (length(res) == 0) { ui_line("No matching issues/PRs found.") return(invisible()) } contributors <- sort(unique(map_chr(res, c("user", "login")))) contrib_link <- glue("[@{contributors}](https://github.com/{contributors})") ui_done("Found {length(contributors)} contributors:") ui_code_block(glue_collapse(contrib_link, sep = ", ", last = ", and ") + glue(".")) invisible(contributors) } ## if x appears to be a timestamp, pass it through ## otherwise, assume it's a ref and look up its timestamp as_timestamp <- function(repo_spec, x = NULL) { if (is.null(x)) { return(NULL) } as_POSIXct <- try(as.POSIXct(x), silent = TRUE) if (inherits(as_POSIXct, "POSIXct")) { return(x) } ui_done("Resolving timestamp for ref {ui_value(x)}") ref_df(repo_spec, refs = x)$timestamp } ## returns a data frame on GitHub refs, defaulting to all releases ref_df <- function(repo_spec, refs = NULL) { stopifnot(is_string(repo_spec)) refs <- refs %||% releases(repo_spec) if (is.null(refs)) { return(NULL) } get_thing <- function(thing) { gh::gh( "/repos/{owner}/{repo}/commits/{thing}", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec), thing = thing ) } res <- lapply(refs, get_thing) data.frame( ref = refs, sha = substr(map_chr(res, "sha"), 1, 7), timestamp = map_chr(res, c("commit", "committer", "date")), stringsAsFactors = FALSE ) } ## returns character vector of release tag names releases <- function(repo_spec) { stopifnot(is_string(repo_spec)) res <- gh::gh( "/repos/{owner}/{repo}/releases", owner = spec_owner(repo_spec), repo = spec_repo(repo_spec) ) if (length(res) < 1) { return(NULL) } map_chr(res, "tag_name") } ## approaches based on available.packages() and/or installed.packages() present ## several edge cases, requirements, and gotchas ## for this application, hard-wiring seems to be "good enough" base_and_recommended <- function() { # base_pkgs <- as.vector(installed.packages(priority = "base")[, "Package"]) # av <- available.packages() # keep <- av[ , "Priority", drop = TRUE] %in% "recommended" # rec_pkgs <- unname(av[keep, "Package", drop = TRUE]) # dput(sort(unique(c(base_pkgs, rec_pkgs)))) c( "base", "boot", "class", "cluster", "codetools", "compiler", "datasets", "foreign", "graphics", "grDevices", "grid", "KernSmooth", "lattice", "MASS", "Matrix", "methods", "mgcv", "nlme", "nnet", "parallel", "rpart", "spatial", "splines", "stats", "stats4", "survival", "tcltk", "tools", "utils" ) } usethis/R/git-default-branch.R0000644000175000017500000005256614153502006016066 0ustar nileshnilesh#' Get or set the default Git branch #' #' @description #' The `git_default_branch*()` functions put some structure around the somewhat #' fuzzy (but definitely real) concept of the default branch. In particular, #' they support new conventions around the Git default branch name, globally or #' in a specific project / Git repository. #' #' @section Background on the default branch: #' #' Technically, Git has no official concept of the default branch. But in #' reality, almost all Git repos have an *effective default branch*. If there's #' only one branch, this is it! It is the branch that most bug fixes and #' features get merged in to. It is the branch you see when you first visit a #' repo on a site such as GitHub. On a Git remote, it is the branch that `HEAD` #' points to. #' #' Historically, `master` has been the most common name for the default branch, #' but `main` is an increasingly popular choice. #' #' @section `git_default_branch_configure()`: #' This configures `init.defaultBranch` at the global (a.k.a user) level. This #' setting determines the name of the branch that gets created when you make the #' first commit in a new Git repo. `init.defaultBranch` only affects the local #' Git repos you create in the future. #' #' @section `git_default_branch()`: #' This figures out the default branch of the current Git repo, integrating #' information from the local repo and, if applicable, the `upstream` or #' `origin` remote. If there is a local vs. remote mismatch, #' `git_default_branch()` throws an error with advice to call #' `git_default_branch_rediscover()` to repair the situation. #' #' For a remote repo, the default branch is the branch that `HEAD` points to. #' #' For the local repo, if there is only one branch, that must be the default! #' Otherwise we try to identify the relevant local branch by looking for #' specific branch names, in this order: #' * whatever the default branch of `upstream` or `origin` is, if applicable #' * `main` #' * `master` #' * the value of the Git option `init.defaultBranch`, with the usual deal where #' a local value, if present, takes precedence over a global (a.k.a. #' user-level) value #' #' @section `git_default_branch_rediscover()`: #' This consults an external authority -- specifically, the remote **source #' repo** on GitHub -- to learn the default branch of the current project / #' repo. If that doesn't match the apparent local default branch (for example, #' the project switched from `master` to `main`), we do the corresponding branch #' renaming in your local repo and, if relevant, in your fork. #' #' See for more about #' GitHub remote configurations and, e.g., what we mean by the source repo. This #' function works for the configurations `"ours"`, `"fork"`, and `"theirs"`. #' @section `git_default_branch_rename()`: #' Note: this only works for a repo that you effectively own. In terms of #' GitHub, you must own the **source repo** personally or, if #' organization-owned, you must have `admin` permission on the **source repo**. #' #' This renames the default branch in the **source repo** on GitHub and then #' calls `git_default_branch_rediscover()`, to make any necessary changes in the #' local repo and, if relevant, in your personal fork. #' #' See for more about #' GitHub remote configurations and, e.g., what we mean by the source repo. This #' function works for the configurations `"ours"`, `"fork"`, and `"no_github"`. #' #' Regarding `"no_github"`: Of course, this function does what you expect for a #' local repo with no GitHub remotes, but that is not the primary use case. #' @return Name of the default branch. #' @name git-default-branch NULL #' @export #' @rdname git-default-branch #' @examples #' \dontrun{ #' git_default_branch() #' } git_default_branch <- function() { repo <- git_repo() # TODO: often when we call git_default_branch(), we already have a GitHub # configuration or target repo, as produced by github_remote_config() or # target_repo(). In that case, we don't need to start from scratch as we do # here. But I'm not sure it's worth adding complexity to allow passing this # data in. # TODO: this critique feels somewhat mis-placed, i.e. it brings up a general # concern about a repo's config (or the user's permissions and creds) # related to whether github_remotes() should be as silent as it is about # 404s critique_remote <- function(remote) { if (remote$is_configured && is.na(remote$default_branch)) { ui_oops(" The {ui_value(remote$name)} remote is configured, but we can't \\ determine its default branch. Possible reasons: - The remote repo no longer exists, suggesting the local remote should be deleted. - We are offline or that specific Git server is down. - You don't have the necessary permission or something is wrong with your credentials.") } } upstream <- git_default_branch_remote("upstream") if (is.na(upstream$default_branch)) { critique_remote(upstream) origin <- git_default_branch_remote("origin") if (is.na(origin$default_branch)) { critique_remote(origin) db_source <- list() } else { db_source <- origin } } else { db_source <- upstream } db_local_with_source <- tryCatch( guess_local_default_branch(db_source$default_branch), error = function(e) NA_character_ ) # these error sub-classes and error data are for the benefit of git_sitrep() if (is.na(db_local_with_source) ) { if (length(db_source)) { usethis_abort(c( "Default branch mismatch between local repo and remote.", "The default branch of the {.val {db_source$name}} remote is {.val {db_source$default_branch}}.", "But the local repo has no branch named {.val {db_source$default_branch}}.", "Call {.code git_default_branch_rediscover()} to resolve this." ), class = "error_default_branch", db_source = db_source ) } else { usethis_abort( "Can't determine the local repo's default branch.", class = "error_default_branch" ) } } # we learned a default branch from the local repo if (is.null(db_source$default_branch) || is.na(db_source$default_branch) || identical(db_local_with_source, db_source$default_branch)) { return(db_local_with_source) } # we learned a default branch from the source repo and it doesn't match # the local default branch usethis_abort(c( "Default branch mismatch between local repo and remote.", "The default branch of the {.val {db_source$name}} remote is {.val {db_source$default_branch}}.", "But the default branch of the local repo appears to be {.val {db_local_with_source}}.", "Call {.code git_default_branch_rediscover()} to resolve this." ), class = "error_default_branch", db_source = db_source, db_local = db_local_with_source ) } # returns a whole data structure, because the caller needs the surrounding # context to produce a helpful error message git_default_branch_remote <- function(remote = "origin") { repo <- git_repo() out <- list( name = remote, is_configured = NA, url = NA_character_, repo_spec = NA_character_, default_branch = NA_character_ ) url <- git_remotes()[[remote]] if (length(url) == 0) { out$is_configured <- FALSE return(out) } out$is_configured <- TRUE out$url <- url # TODO: generalize here for GHE hosts that don't include 'github' parsed <- parse_github_remotes(url) # if the protocol is ssh, I suppose we can't assume a PAT, i.e. it's better # to use the Git approach vs. the GitHub API approach if (grepl("github", parsed$host) && parsed$protocol == "https") { remote_dat <- github_remotes(remote, github_get = NA) out$repo_spec <- remote_dat$repo_spec out$default_branch <- remote_dat$default_branch return(out) } out$default_branch <- tryCatch( { gert::git_fetch(remote = remote, repo = repo, verbose = FALSE) res <- gert::git_remote_ls(remote = remote, verbose = FALSE, repo = repo) path_file(res$symref[res$ref == "HEAD"]) }, error = function(e) NA_character_ ) out } default_branch_candidates <- function() { c( "main", "master", # we use `where = "de_facto"` so that one can configure init.defaultBranch # *locally* (which is unusual, but possible) in a repo that uses an # unconventional default branch name git_cfg_get("init.defaultBranch", where = "de_facto") ) } # `prefer` is available if you want to inject external information, such as # the default branch of a remote guess_local_default_branch <- function(prefer = NULL, verbose = FALSE) { repo <- git_repo() gb <- gert::git_branch_list(local = TRUE, repo = repo)[["name"]] if (length(gb) == 0) { ui_stop(" Can't find any local branches. Do you need to make your first commit?") } candidates <- c(prefer, default_branch_candidates()) first_matched <- function(x, table) table[min(match(x, table), na.rm = TRUE)] if (length(gb) == 1) { db <- gb } else if (any(gb %in% candidates)) { db <- first_matched(gb, candidates) } else { # TODO: perhaps this should be classed, so I can catch it and distinguish # from the ui_stop() above, where there are no local branches. ui_stop(" Unable to guess which existing local branch plays the role of the default.") } if (verbose) { ui_info(" Local branch {ui_value(db)} appears to play the role of \\ the default branch.") } db } #' @export #' @rdname git-default-branch #' @param name Default name for the initial branch in new Git repositories. #' @examples #' \dontrun{ #' git_default_branch_configure() #' } git_default_branch_configure <- function(name = "main") { check_string(name) ui_done("Configuring {ui_field('init.defaultBranch')} as {ui_value(name)}.") ui_info("Remember: this only affects repos you create in the future.") use_git_config(scope = "user", `init.defaultBranch` = name) invisible(name) } #' @export #' @rdname git-default-branch #' @param current_local_default Name of the local branch that is currently #' functioning as the default branch. If unspecified, this can often be #' inferred. #' @examples #' \dontrun{ #' git_default_branch_rediscover() #' #' # you can always explicitly specify the local branch that's been playing the #' # role of the default #' git_default_branch_rediscover("unconventional_default_branch_name") #' } git_default_branch_rediscover <- function(current_local_default = NULL) { rediscover_default_branch(old_name = current_local_default) } #' @export #' @rdname git-default-branch #' @param from Name of the branch that is currently functioning as the default #' branch. #' @param to New name for the default branch. #' @examples #' \dontrun{ #' git_default_branch_rename() #' #' # you can always explicitly specify one or both branch names #' git_default_branch_rename(from = "this", to = "that") #' } git_default_branch_rename <- function(from = NULL, to = "main") { repo <- git_repo() maybe_string(from) check_string(to) if (!is.null(from) && !gert::git_branch_exists(from, local = TRUE, repo = repo)) { ui_stop("Can't find existing branch named {ui_value(from)}.") } cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg, ok_configs = c("ours", "fork", "no_github")) if (cfg$type == "no_github") { from <- from %||% guess_local_default_branch(verbose = TRUE) if (from == to) { ui_info("Local repo already has {ui_value(from)} as its default branch.") } else { ui_done("Moving local {ui_value(from)} branch to {ui_value(to)}.") gert::git_branch_move(branch = from, new_branch = to, repo = repo) rstudio_git_tickle() report_fishy_files(old_name = from, new_name = to) } return(invisible(to)) } # cfg is now either fork or ours tr <- target_repo(cfg, role = "source", ask = FALSE) old_source_db <- tr$default_branch if (!isTRUE(tr$can_admin)) { ui_stop(" You don't seem to have {ui_field('admin')} permissions for the source \\ repo {ui_value(tr$repo_spec)}, which is required to rename the default \\ branch.") } old_local_db <- from %||% guess_local_default_branch(old_source_db, verbose = FALSE) if (old_local_db != old_source_db) { ui_oops(" It's weird that the current default branch for your local repo and \\ the source repo are different: {ui_value(old_local_db)} (local) != {ui_value(old_source_db)} (source)") if (ui_nope( "Are you sure you want to proceed?", yes = "yes", no = "no", shuffle = FALSE)) { ui_oops("Cancelling.") return(invisible()) } } source_update <- old_source_db != to if (source_update) { gh <- gh_tr(tr) gh( "POST /repos/{owner}/{repo}/branches/{from}/rename", from = old_source_db, new_name = to ) } if (source_update) { ui_done(" Default branch of the source repo {ui_value(tr$repo_spec)} has moved: \\ {ui_value(old_source_db)} --> {ui_value(to)}") } else { ui_done(" Default branch of source repo {ui_value(tr$repo_spec)} is \\ {ui_value(to)}. Nothing to be done.") } report_fishy_files(old_name = old_local_db, new_name = to) rediscover_default_branch(old_name = old_local_db, report_on_source = FALSE) } rediscover_default_branch <- function(old_name = NULL, report_on_source = TRUE) { maybe_string(old_name) # GitHub's official TODOs re: manually updating local environments # after a source repo renames the default branch: # git branch -m OLD-BRANCH-NAME NEW-BRANCH-NAME # git fetch origin # git branch -u origin/NEW-BRANCH-NAME NEW-BRANCH-NAME # git remote set-head origin -a # optionally # git remote prune origin # Note: they are assuming the relevant repo is known as origin, but it could # just as easily be, e.g., upstream. repo <- git_repo() if (!is.null(old_name) && !gert::git_branch_exists(old_name, local = TRUE, repo = repo)) { ui_stop("Can't find existing local branch named {ui_value(old_name)}.") } cfg <- github_remote_config(github_get = TRUE) check_for_config(cfg, ok_configs = c("ours", "fork", "theirs")) tr <- target_repo(cfg, role = "source", ask = FALSE) db <- tr$default_branch # goal, in Git-speak: git remote set-head -a # goal, for humans: learn and record the default branch (i.e. the target of # the symbolic-ref refs/remotes//HEAD) for the named remote # https://git-scm.com/docs/git-remote#Documentation/git-remote.txt-emset-headem # for very stale repos, a fetch is a necessary pre-requisite # I provide `refspec = db` to avoid fetching all refs, which can be VERY slow # for a repo like ggplot2 (several minutes, with no progress reporting) gert::git_fetch(remote = tr$name, refspec = db, verbose = FALSE, repo = repo) gert::git_remote_ls(remote = tr$name, verbose = FALSE, repo = repo) old_name <- old_name %||% guess_local_default_branch(db, verbose = FALSE) local_update <- old_name != db if (local_update) { # goal, in Git-speak: git branch -m gert::git_branch_move(branch = old_name, new_branch = db, repo = repo) rstudio_git_tickle() } # goal, in Git-speak: git branch -u / gert::git_branch_set_upstream( branch = db, upstream = glue("{tr$name}/{db}"), repo = repo ) # goal: get rid of old remote tracking branch, e.g. origin/master # goal, in Git-speak: git remote prune origin # I provide a refspec to avoid fetching all refs, which can be VERY slow # for a repo like ggplot2 (several minutes, with no progress reporting) gert::git_fetch( remote = tr$name, refspec = glue("refs/heads/{old_name}:refs/remotes/{tr$name}/{old_name}"), verbose = FALSE, repo = repo, prune = TRUE ) # for "ours" and "theirs", the source repo is the only remote on our radar and # we're done ingesting the default branch from the source repo # but for "fork", we also need to update # the fork = the user's primary repo = the remote known as origin if (cfg$type == "fork") { old_name_fork <- cfg$origin$default_branch fork_update <- old_name_fork != db if (fork_update) { gh <- gh_tr(cfg$origin) gh( "POST /repos/{owner}/{repo}/branches/{from}/rename", from = old_name_fork, new_name = db ) gert::git_fetch(remote = "origin", refspec = db, verbose = FALSE, repo = repo) gert::git_remote_ls(remote = "origin", verbose = FALSE, repo = repo) gert::git_fetch( remote = "origin", refspec = glue("refs/heads/{old_name}:refs/remotes/origin/{old_name}"), verbose = FALSE, repo = repo, prune = TRUE ) } } if (report_on_source) { ui_info(" Default branch of the source repo {ui_value(tr$repo_spec)}: {ui_value(db)}") } if (local_update) { ui_done(" Default branch of local repo has moved: \\ {ui_value(old_name)} --> {ui_value(db)}") } else { ui_done(" Default branch of local repo is {ui_value(db)}. Nothing to be done.") } if (cfg$type == "fork") { if (fork_update) { ui_done(" Default branch of your fork has moved: \\ {ui_value(old_name_fork)} --> {ui_value(db)}") } else { ui_done(" Default branch of your fork is {ui_value(db)}. Nothing to be done.") } } invisible(db) } challenge_non_default_branch <- function(details = "Are you sure you want to proceed?", default_branch = NULL) { actual <- git_branch() default_branch <- default_branch %||% git_default_branch() if (nzchar(details)) { details <- paste0("\n", details) } if (actual != default_branch) { if (ui_nope(" Current branch ({ui_value(actual)}) is not repo's default \\ branch ({ui_value(default_branch)}).{details}")) { ui_stop("Cancelling. Not on desired branch.") } } invisible() } report_fishy_files <- function(old_name = "master", new_name = "main") { ui_todo(" Be sure to update files that refer to the default branch by name. Consider searching within your project for {ui_value(old_name)}.") # I don't want failure of a fishy file check to EVER cause # git_default_branch_rename() to fail and prevent the call to # git_default_branch_rediscover() # using a simple try() wrapper because these hints are just "nice to have" try(fishy_github_actions(new_name = new_name), silent = TRUE) try(fishy_badges(old_name = old_name), silent = TRUE) try(fishy_bookdown_config(old_name = old_name), silent = TRUE) } # good test cases: downlit, purrr, pkgbuild, zealot, glue, bench, # textshaping, scales fishy_github_actions <- function(new_name = "main") { if (!uses_github_actions()) { return(invisible(character())) } workflow_dir <- proj_path(".github", "workflows") workflows <- dir_ls(workflow_dir, regexp = "[.]ya?ml$") f <- function(pth, new_name) { x <- yaml::read_yaml(pth) x_unlisted <- unlist(x) locs <- grep("branches", re_match(names(x_unlisted), "[^//.]+$")$.match) branches <- x_unlisted[locs] length(branches) == 0 || new_name %in% branches } includes_branch_name <- map_lgl(workflows, f, new_name = new_name) paths <- proj_rel_path(workflows[!includes_branch_name]) if (length(paths) == 0) { return(invisible(character())) } paths <- sort(paths) ui_paths <- map_chr(paths, ui_path) ui_oops(c( "These GitHub Action files don't mention the new default branch {ui_value(new_name)}:", paste0("- ", ui_paths) )) invisible(paths) } fishy_badges <- function(old_name = "master") { path <- find_readme() if (is.null(path)) { return(invisible(character())) } readme_lines <- read_utf8(path) badge_lines_range <- block_find( readme_lines, block_start = badge_start, block_end = badge_end ) if (length(badge_lines_range) != 2) { return(invisible(character())) } badge_lines <- readme_lines[badge_lines_range[1]:badge_lines_range[2]] if (!any(grepl(old_name, badge_lines))) { return(invisible(character())) } ui_path <- ui_path(proj_rel_path(path)) ui_oops(c( "Some badges may refer to the old default branch {ui_value(old_name)}:", paste0("- ", ui_path) )) invisible(path) } fishy_bookdown_config <- function(old_name = "master") { # https://github.com/dncamp/shift/blob/a12a3fb0cd30ae864525f7a9f1f907a05f15f9a3/_bookdown.yml # https://github.com/Jattan08/Wonderland/blob/b9e7ddc694871d1d13a2a02abe2d3b4a944c4653/_bookdown.yml # edit: https://github.com/dncamp/shift/edit/master/%s # view: https://github.com/dncamp/shift/blob/master/%s # history: https://github.com/YOUR GITHUB USERNAME/YOUR REPO NAME/commits/master/%s bookdown_config <- dir_ls( proj_get(), regexp = "_bookdown[.]ya?ml$", recurse = TRUE ) if (length(bookdown_config) == 0) { return(invisible(character())) } # I am (very weakly) worried about more than 1 match, hence the [[1]] bookdown_config <- purrr::discard(bookdown_config, ~ grepl("revdep", .x))[[1]] bookdown_config_lines <- read_utf8(bookdown_config) linky_lines <- grep("^(edit|view|history)", bookdown_config_lines, value = TRUE) if (!any(grepl(old_name, linky_lines))) { return(invisible(character())) } ui_path <- ui_path(proj_rel_path(bookdown_config)) ui_oops(c( "The bookdown configuration file may refer to the old default branch {ui_value(old_name)}:", paste0("- ", ui_path) )) invisible(path) } usethis/R/use_github_file.R0000644000175000017500000001120414132400710015537 0ustar nileshnilesh#' Copy a file from any GitHub repo into the current project #' #' Gets the content of a file from GitHub, from any repo the user can read, and #' writes it into the active project. This function wraps an endpoint of the #' GitHub API which supports specifying a target reference (i.e. branch, tag, #' or commit) and which follows symlinks. #' #' @param repo_spec A string identifying the GitHub repo or, alternatively, a #' GitHub file URL. Acceptable forms: #' * Plain `OWNER/REPO` spec #' * A blob URL, such as `"https://github.com/OWNER/REPO/blob/REF/path/to/some/file"` #' * A raw URL, such as `"https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"` #' #' In the case of a URL, the `path`, `ref`, and `host` are extracted from it, in #' addition to the `repo_spec`. #' @param path Path of file to copy, relative to the GitHub repo it lives in. #' This is extracted from `repo_spec` when user provides a URL. #' @param save_as Path of file to create, relative to root of active project. #' Defaults to the last part of `path`, in the sense of `basename(path)` or #' `fs::path_file(path)`. #' @param ref The name of a branch, tag, or commit. By default, the file at #' `path` will by copied from its current state in the repo's default branch. #' This is extracted from `repo_spec` when user provides a URL. #' @inheritParams use_template #' @inheritParams use_github #' #' @return A logical indicator of whether a file was written, invisibly. #' @export #' #' @examples #' \dontrun{ #' use_github_file( #' "https://github.com/r-lib/actions/blob/v1/examples/check-standard.yaml" #' ) #' #' use_github_file( #' "r-lib/actions", #' path = "examples/check-standard.yaml", #' ref = "v1", #' save_as = ".github/workflows/R-CMD-check.yaml" #' ) #' } use_github_file <- function(repo_spec, path = NULL, save_as = NULL, ref = NULL, ignore = FALSE, open = FALSE, host = NULL) { check_string(repo_spec) dat <- parse_file_url(repo_spec) if (dat$parsed) { repo_spec <- dat$repo_spec path <- dat$path ref <- dat$ref host <- dat$host } check_string(path) save_as <- save_as %||% path_file(path) check_string(save_as) maybe_string(ref) maybe_string(host) ref_string <- if (is.null(ref)) "" else glue("@{ref}") github_string <- glue("{repo_spec}/{path}{ref_string}") ui_done("Saving {ui_path(github_string)} to {ui_path(save_as)}") # https://docs.github.com/en/rest/reference/repos#contents # https://docs.github.com/en/rest/reference/repos#if-the-content-is-a-symlink # If the requested {path} points to a symlink, and the symlink's target is a # normal file in the repository, then the API responds with the content of the # file.... tf <- withr::local_tempfile( pattern = glue("use_github_file-{path_file(save_as)}-") ) res <- gh::gh( "/repos/{repo_spec}/contents/{path}", repo_spec = repo_spec, path = path, ref = ref, .destfile = tf, .accept = "application/vnd.github.v3.raw" ) tf_contents <- read_utf8(tf) new <- write_over(proj_path(save_as), tf_contents, quiet = TRUE) if (ignore) { use_build_ignore(save_as) } if (open && new) { edit_file(proj_path(save_as)) } invisible(new) } # https://github.com/OWNER/REPO/blob/REF/path/to/some/file # https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file # https://github.acme.com/OWNER/REPO/blob/REF/path/to/some/file # https://raw.github.acme.com/OWNER/REPO/REF/path/to/some/file parse_file_url <- function(x) { out <- list( parsed = FALSE, repo_spec = x, path = NULL, ref = NULL, host = NULL ) dat <- re_match(x, github_remote_regex) if (is.na(dat$.match)) { return(out) } # TODO: generalize here for GHE hosts that don't include 'github' if (!grepl("github", dat$host)) { ui_stop("URL doesn't seem to be associated with GitHub.") } if (!grepl("^(raw[.])?github", dat$host) || !nzchar(dat$fragment) || (grepl("^github", dat$host) && !grepl("^/blob/", dat$fragment))) { ui_stop("Can't parse the URL provided via {ui_code('repo_spec')}.") } out$parsed <- TRUE dat$host <- sub("^raw[.]", "", dat$host) dat$host <- sub("^githubusercontent", "github", dat$host) dat$fragment <- sub("^/(blob/)?", "", dat$fragment) dat_fragment <- re_match(dat$fragment, "^(?[^/]+)/(?.+)$") out$repo_spec <- make_spec(owner = dat$repo_owner, repo = dat$repo_name) out$path <- dat_fragment$path out$ref <- dat_fragment$ref out$host <- glue_chr("https://{dat$host}") out } usethis/R/github-actions.R0000644000175000017500000002141714153706774015357 0ustar nileshnilesh# general GHA setup ------------------------------------------------------------ #' GitHub Actions setup #' #' @description #' Sets up continuous integration (CI) for an R package that is developed on #' GitHub using [GitHub Actions](https://github.com/features/actions). CI can be #' used to trigger various operations for each push or pull request, such as: #' * Run `R CMD check` on various operating systems and R versions #' * Build and deploy a pkgdown site #' * Determine test coverage #' #' This family of functions #' * Adds the necessary configuration files and lists them in `.Rbuildignore` #' * Provides the markdown to insert a badge into your README #' #' @name github_actions #' @param name For `use_github_action()`: Name of one of the example workflows #' from , with or without a #' `.yaml` extension, e.g. "pkgdown" or "test-coverage.yaml". #' #' For `use_github_actions_badge()`: Specifies the workflow whose status the #' badge will report. Usually, this is the `name` keyword that appears in the #' workflow `.yaml` file. #' @eval param_repo_spec() #' @param url The full URL to a `.yaml` file on GitHub. #' @param save_as Name of the local workflow file. Defaults to `name` or #' `fs::path_file(url)` for `use_github_action()`. Do not specify any other #' part of the path; the parent directory will always be `.github/workflows`, #' within the active project. #' @param readme The full URL to a `README` file that provides more details #' about the workflow. Ignored when `url` is `NULL`. #' @inheritParams use_template #' #' @seealso #' * [use_github_file()] for more about `url` format and parsing. #' * [use_tidy_github_actions()] for the standard GitHub Actions used for #' tidyverse packages. #' @examples #' \dontrun{ #' use_github_actions() #' #' use_github_action_check_standard() #' #' use_github_action("pkgdown") #' } NULL #' @section `use_github_actions()`: #' Configures a basic `R CMD check` workflow on GitHub Actions by adding a #' standard `R-CMD-check.yaml` file to the `.github/workflows` directory of the #' active project. This is actually just an alias for #' `use_github_action_check_release()`. #' @export #' @rdname github_actions use_github_actions <- function() { use_github_action_check_release() } #' @section `use_github_actions_badge()`: #' Generates a GitHub Actions badge and that's all. It does not configure a #' workflow. This exists mostly for internal use in the other functions #' documented here. #' @export #' @rdname github_actions use_github_actions_badge <- function(name = "R-CMD-check", repo_spec = NULL) { repo_spec <- repo_spec %||% target_repo_spec() enc_name <- utils::URLencode(name) img <- glue("https://github.com/{repo_spec}/workflows/{enc_name}/badge.svg") url <- glue("https://github.com/{repo_spec}/actions") use_badge(name, url, img) } uses_github_actions <- function() { path <- proj_path(".github", "workflows") file_exists(path) } check_uses_github_actions <- function() { if (uses_github_actions()) { return(invisible()) } ui_stop(" Cannot detect that package {ui_value(project_name())} already \\ uses GitHub Actions. Do you need to run {ui_code('use_github_actions()')}?") } # individual actions ----------------------------------------------------------- #' @section `use_github_action()`: #' Configures an individual, specific [GitHub #' Actions](https://github.com/features/actions) workflow, either one of the #' examples from #' [r-lib/actions/examples](https://github.com/r-lib/actions/tree/v1/examples) #' or a custom workflow given by the `url` parameter. #' #' Used internally to power all the other GitHub Actions functions, but it can #' also be called directly by the user. #' @export #' @rdname github_actions use_github_action <- function(name, url = NULL, save_as = NULL, readme = NULL, ignore = TRUE, open = FALSE) { if (is.null(url)) { check_string(name) name <- path_ext_set(name, "yaml") url <- glue( "https://raw.githubusercontent.com/r-lib/actions/v1/examples/{name}" ) readme <- "https://github.com/r-lib/actions/blob/v1/examples/README.md" } else { check_string(url) maybe_string(readme) } withr::defer(rstudio_git_tickle()) use_dot_github(ignore = ignore) if (is.null(save_as)) { save_as <- path_file(url) } check_string(save_as) save_as <- path(".github", "workflows", save_as) create_directory(path_dir(proj_path(save_as))) # `ignore = FALSE` because we took care of this at directory level, above new <- use_github_file(url, save_as = save_as, ignore = FALSE, open = open) if (!is.null(readme)) { ui_todo("Learn more at <{readme}>.") } invisible(new) } #' @section `use_github_action_check_release()`: #' This entry-level, bare-minimum workflow installs the latest release of R (on #' a current distribution of Linux) and runs `R CMD check` via the #' [rcmdcheck](https://github.com/r-lib/rcmdcheck) package. #' @export #' @rdname github_actions use_github_action_check_release <- function(save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE) { use_github_action( "check-release.yaml", save_as = save_as, ignore = ignore, open = open ) use_github_actions_badge("R-CMD-check") } #' @section `use_github_action_check_standard()`: #' This workflow runs `R CMD check` via the #' [rcmdcheck](https://github.com/r-lib/rcmdcheck) package on the three major #' operating systems (Linux, macOS, and Windows) on the latest release of R and #' on R-devel. This workflow is appropriate for a package that is (or aspires to #' be) on CRAN or Bioconductor. #' @export #' @rdname github_actions use_github_action_check_standard <- function(save_as = "R-CMD-check.yaml", ignore = TRUE, open = FALSE) { use_github_action( "check-standard.yaml", save_as = save_as, ignore = ignore, open = open ) use_github_actions_badge("R-CMD-check") } #' @section `use_github_action_pr_commands()`: #' This workflow enables the use of two R-specific commands in pull request #' issue comments: #' * `/document` to run `roxygen2::roxygenise()` and update the PR #' * `/style` to run `styler::style_pkg()` and update the PR #' @export #' @rdname github_actions use_github_action_pr_commands <- function(save_as = "pr-commands.yaml", ignore = TRUE, open = FALSE) { use_github_action( "pr-commands.yaml", save_as = save_as, ignore = ignore, open = open ) } # tidyverse GHA setup ---------------------------------------------------------- #' @details #' * `use_tidy_github_actions()`: Sets up the following workflows using [GitHub #' Actions](https://github.com/features/actions): #' - Run `R CMD check` on the current release, devel, and four previous #' versions of R. The build matrix also ensures `R CMD check` is run at #' least once on each of the three major operating systems (Linux, macOS, #' and Windows). #' - Report test coverage. #' - Build and deploy a pkgdown site. #' - Provide two commands to be used in pull requests: `/document` to run #' `roxygen2::roxygenise()` and update the PR, and `/style` to run #' `styler::style_pkg()` and update the PR. #' #' This is how the tidyverse team checks its packages, but it is overkill #' for less widely used packages. Consider using the more streamlined #' workflows set up by [use_github_actions()] or #' [use_github_action_check_standard()]. #' @export #' @rdname tidyverse use_tidy_github_actions <- function() { repo_spec <- target_repo_spec() use_coverage(repo_spec = repo_spec) # we killed use_github_action_check_full() because too many people were using # it who are better served by something less over-the-top # now we inline it here full_status <- use_github_action("check-full.yaml", save_as = "R-CMD-check.yaml") use_github_actions_badge("R-CMD-check", repo_spec = repo_spec) pr_status <- use_github_action_pr_commands() pkgdown_status <- use_github_action("pkgdown") test_coverage_status <- use_github_action("test-coverage") old_configs <- proj_path(c(".travis.yml", "appveyor.yml")) has_appveyor_travis <- file_exists(old_configs) if (any(has_appveyor_travis)) { if (ui_yeah( "Remove existing {ui_path('.travis.yml')} and {ui_path('appveyor.yml')}?" )) { file_delete(old_configs[has_appveyor_travis]) ui_todo("Remove old badges from README") } } invisible(full_status && pr_status && pkgdown_status && test_coverage_status) } usethis/R/utils-glue.R0000644000175000017500000000036414132400710014501 0ustar nileshnilesh# wrappers that apply as.character() to glue functions glue_chr <- function(...) { as.character(glue(..., .envir = parent.frame(1))) } glue_data_chr <- function(.x, ...) { as.character(glue_data(.x = .x, ..., .envir = parent.frame(1))) } usethis/R/line-ending.R0000644000175000017500000000212314117743363014614 0ustar nileshnileshproj_line_ending <- function() { # First look in .Rproj file proj_path <- proj_path(paste0(project_name(), ".Rproj")) if (file_exists(proj_path)) { config <- read_utf8(proj_path) if (any(grepl("^LineEndingConversion: Posix", config))) { return("\n") } else if (any(grepl("^LineEndingConversion: Windows", config))) { return("\r\n") } } # Then try DESCRIPTION desc_path <- proj_path("DESCRIPTION") if (file_exists(desc_path)) { return(detect_line_ending(desc_path)) } # Then try any .R file r_path <- proj_path("R") if (dir_exists(r_path)) { r_files <- dir_ls(r_path, regexp = "[.][rR]$") if (length(r_files) > 0) { return(detect_line_ending(r_files[[1]])) } } # Then give up - this is used (for example), when writing the # first file into the package platform_line_ending() } platform_line_ending <- function() { if (.Platform$OS.type == "windows") "\r\n" else "\n" } detect_line_ending <- function(path) { samp <- suppressWarnings(readChar(path, nchars = 500)) if (isTRUE(grepl("\r\n", samp))) "\r\n" else "\n" } usethis/R/data-table.R0000644000175000017500000000247014153502006014411 0ustar nileshnilesh#' Prepare for importing data.table #' #' @description #' #' `use_data_table()` imports the `data.table()` function from the data.table #' package, as well as several important symbols: `:=`, `.SD`, `.BY`, `.N`, #' `.I`, `.GRP`, `.NGRP`, `.EACHI`. This is a minimal setup to get `data.table`s #' working with your package. See the [importing #' data.table](https://rdatatable.gitlab.io/data.table/articles/datatable-importing.html) #' vignette for other strategies. In addition to importing these function, #' `use_data_table()` also blocks the usage of data.table in the `Depends` field #' of the `DESCRIPTION` file; `data.table` should be used as an _imported_ or #' _suggested_ package only. See this #' [discussion](https://github.com/Rdatatable/data.table/issues/3076). #' @export use_data_table <- function() { check_is_package("use_data_table()") check_uses_roxygen("use_data_table()") deps <- desc::desc_get_deps(proj_get()) if (any(deps$type == "Depends" & deps$package == "data.table")) { ui_warn("data.table should be in Imports or Suggests, not Depends") ui_done("Deleting data.table from {ui_field('Depends')}") desc::desc_del_dep("data.table", "Depends", file = proj_get()) } use_import_from( "data.table", c("data.table", ":=", ".SD", ".BY", ".N", ".I", ".GRP", ".NGRP", ".EACHI") ) } usethis/R/rstudio.R0000644000175000017500000001305414131645451014114 0ustar nileshnilesh#' Add RStudio Project infrastructure #' #' It is likely that you want to use [create_project()] or [create_package()] #' instead of `use_rstudio()`! Both `create_*()` functions can add RStudio #' Project infrastructure to a pre-existing project or package. `use_rstudio()` #' is mostly for internal use or for those creating a usethis-like package for #' their organization. It does the following in the current project, often after #' executing `proj_set(..., force = TRUE)`: #' * Creates an `.Rproj` file #' * Adds RStudio files to `.gitignore` #' * Adds RStudio files to `.Rbuildignore`, if project is a package #' #' @param line_ending Line ending #' @export use_rstudio <- function(line_ending = c("posix", "windows")) { line_ending <- arg_match(line_ending) line_ending <- c("posix" = "Posix", "windows" = "Windows")[[line_ending]] rproj_file <- paste0(project_name(), ".Rproj") new <- use_template( "template.Rproj", save_as = rproj_file, data = list(line_ending = line_ending, is_pkg = is_package()), ignore = is_package() ) use_git_ignore(".Rproj.user") if (is_package()) { use_build_ignore(".Rproj.user") } invisible(new) } #' Don't save/load user workspace between sessions #' #' R can save and reload the user's workspace between sessions via an `.RData` #' file in the current directory. However, long-term reproducibility is enhanced #' when you turn this feature off and clear R's memory at every restart. #' Starting with a blank slate provides timely feedback that encourages the #' development of scripts that are complete and self-contained. More detail can #' be found in the blog post [Project-oriented #' workflow](https://www.tidyverse.org/blog/2017/12/workflow-vs-script/). #' #' @inheritParams edit #' #' @export use_blank_slate <- function(scope = c("user", "project")) { scope <- match.arg(scope) if (scope == "user") { use_rstudio_config(list( save_workspace = "never", load_workspace = FALSE )) return(invisible()) } if (!is_rstudio_project()) { ui_stop("{ui_value(project_name())} is not an RStudio Project.") } rproj_fields <- modify_rproj( rproj_path(), list(RestoreWorkspace = "No", SaveWorkspace = "No") ) write_utf8(rproj_path(), serialize_rproj(rproj_fields)) restart_rstudio("Restart RStudio with a blank slate?") invisible() } # Is base_path an RStudio Project or inside an RStudio Project? is_rstudio_project <- function(base_path = proj_get()) { res <- tryCatch( rprojroot::find_rstudio_root_file(path = base_path), error = function(e) NA ) !is.na(res) } rproj_path <- function(base_path = proj_get()) { rproj_path <- dir_ls(base_path, regexp = "[.]Rproj$") if (length(rproj_path) > 1) { ui_stop("Multiple .Rproj files found.") } if (length(rproj_path) == 1) rproj_path else NA_character_ } # Is base_path open in RStudio? in_rstudio <- function(base_path = proj_get()) { if (!rstudio_available()) { return(FALSE) } if (!rstudioapi::hasFun("getActiveProject")) { return(FALSE) } proj <- rstudioapi::getActiveProject() if (is.null(proj)) { return(FALSE) } path_real(proj) == path_real(base_path) } # So we can override the default with a mock rstudio_available <- function() { rstudioapi::isAvailable() } in_rstudio_server <- function() { if (!rstudio_available()) { return(FALSE) } identical(rstudioapi::versionInfo()$mode, "server") } parse_rproj <- function(file) { lines <- as.list(read_utf8(file)) has_colon <- grepl(":", lines) fields <- lapply(lines[has_colon], function(x) strsplit(x, split = ": ")[[1]]) lines[has_colon] <- vapply(fields, `[[`, "character", 2) names(lines)[has_colon] <- vapply(fields, `[[`, "character", 1) names(lines)[!has_colon] <- "" lines } modify_rproj <- function(file, update) { utils::modifyList(parse_rproj(file), update) } serialize_rproj <- function(fields) { named <- nzchar(names(fields)) as.character(ifelse(named, paste0(names(fields), ": ", fields), fields)) } # Must be last command run restart_rstudio <- function(message = NULL) { if (!in_rstudio(proj_get())) { return(FALSE) } if (!is_interactive()) { return(FALSE) } if (!is.null(message)) { ui_todo(message) } if (!rstudioapi::hasFun("openProject")) { return(FALSE) } if (ui_nope("Restart now?")) { return(FALSE) } rstudioapi::openProject(proj_get()) } rstudio_git_tickle <- function() { if (rstudioapi::hasFun("executeCommand")) { rstudioapi::executeCommand("vcsRefresh") } invisible() } rstudio_config_path <- function(...) { if (is_windows()) { # https://github.com/r-lib/usethis/issues/1293 base <- rappdirs::user_config_dir("RStudio", appauthor = NULL) } else { # RStudio only uses windows/unix conventions, not mac base <- rappdirs::user_config_dir("rstudio", os = "unix") } path(base, ...) } rstudio_prefs_read <- function() { path <- rstudio_config_path("rstudio-prefs.json") if (file_exists(path)) { jsonlite::read_json(path) } else { list() } } rstudio_prefs_write <- function(json) { path <- rstudio_config_path("rstudio-prefs.json") create_directory(path_dir(path)) jsonlite::write_json(json, path, auto_unbox = TRUE, pretty = TRUE) } use_rstudio_config <- function(values) { stopifnot(is.list(values), is_named(values)) json <- rstudio_prefs_read() for (name in names(values)) { val <- values[[name]] if (identical(json[[name]], val)) { next } ui_done("Setting RStudio preference {ui_field(name)} to {ui_value(val)}") json[[name]] <- val } rstudio_prefs_write(json) } usethis/R/ignore.R0000644000175000017500000000211713676400475013714 0ustar nileshnilesh#' Add files to `.Rbuildignore` #' #' @description #' `.Rbuildignore` has a regular expression on each line, but it's #' usually easier to work with specific file names. By default, #' `use_build_ignore()` will (crudely) turn a filename into a regular #' expression that will only match that path. Repeated entries will be #' silently removed. #' #' `use_build_ignore()` is designed to ignore *individual* files. If you #' want to ignore *all* files with a given extension, consider providing #' an "as-is" regular expression, using `escape = FALSE`; see examples. #' #' @param files Character vector of path names. #' @param escape If `TRUE`, the default, will escape `.` to #' `\\.` and surround with `^` and `$`. #' @export #' @examples #' \dontrun{ #' # ignore all Excel files #' use_build_ignore("[.]xlsx$", escape = FALSE) #' } use_build_ignore <- function(files, escape = TRUE) { if (escape) { files <- escape_path(files) } write_union(proj_path(".Rbuildignore"), files) } escape_path <- function(x) { x <- gsub("\\.", "\\\\.", x) x <- gsub("/$", "", x) paste0("^", x, "$") } usethis/R/github-pages.R0000644000175000017500000001347714131645451015013 0ustar nileshnilesh#' Configure a GitHub Pages site #' #' Activates or reconfigures a GitHub Pages site for a project hosted on GitHub. #' This function anticipates two specific usage modes: #' * Publish from the root directory of a `gh-pages` branch, which is assumed to #' be only (or at least primarily) a remote branch. Typically the `gh-pages` #' branch is managed by an automatic "build and deploy" job, such as the one #' configured by [`use_github_action("pkgdown")`][use_github_action()]. #' * Publish from the `"/docs"` directory of a "regular" branch, probably the #' repo's default branch. The user is assumed to have a plan for how they will #' manage the content below `"/docs"`. #' #' @param branch,path Branch and path for the site source. The default of #' `branch = "gh-pages"` and `path = "/"` reflects strong GitHub support for #' this configuration: when a `gh-pages` branch is first created, it is #' *automatically* published to Pages, using the source found in `"/"`. If a #' `gh-pages` branch does not yet exist on the host, `use_github_pages()` #' creates an empty, orphan remote branch. #' #' The most common alternative is to use the repo's default branch, coupled #' with `path = "/docs"`. It is the user's responsibility to ensure that this #' `branch` pre-exists on the host. #' #' Note that GitHub does not support an arbitrary `path` and, at the time of #' writing, only `"/"` or `"/docs"` are accepted. #' @param cname Optional, custom domain name. The `NA` default means "don't set #' or change this", whereas a value of `NULL` removes any previously #' configured custom domain. #' #' Note that this *can* add or modify a CNAME file in your repository. If you #' are using Pages to host a pkgdown site, it is better to specify its URL in #' the pkgdown config file and let pkgdown manage CNAME. #' #' @seealso #' * [use_pkgdown_github_pages()] combines `use_github_pages()` with other functions to #' fully configure a pkgdown site #' * #' * #' @return Site metadata returned by the GitHub API, invisibly #' @export #' #' @examples #' \dontrun{ #' use_github_pages() #' use_github_pages(branch = git_default_branch(), path = "/docs") #' } use_github_pages <- function(branch = "gh-pages", path = "/", cname = NA) { stopifnot(is_string(branch), is_string(path)) stopifnot(is.na(cname) || is.null(cname) || is_string(cname)) tr <- target_repo(github_get = TRUE) if (!isTRUE(tr$can_push)) { ui_stop(" You don't seem to have push access for {ui_value(tr$repo_spec)}, which \\ is required to turn on GitHub Pages.") } gh <- gh_tr(tr) safe_gh <- purrr::safely(gh) if (branch == "gh-pages") { new_branch <- create_gh_pages_branch(tr, branch = "gh-pages") if (new_branch) { # merely creating gh-pages branch automatically activates publishing # BUT we need to give the servers time to sync up before a new GET # retrieves accurate info... ask me how I know Sys.sleep(2) } } site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]] if (is.null(site)) { ui_done("Activating GitHub Pages for {ui_value(tr$repo_spec)}") site <- gh( "POST /repos/{owner}/{repo}/pages", source = list(branch = branch, path = path), .accept = "application/vnd.github.switcheroo-preview+json" ) } need_update <- site$source$branch != branch || site$source$path != path || (is.null(cname) && !is.null(site$cname)) || (is_string(cname) && (is.null(site$cname) || cname != site$cname)) if (need_update) { args <- list( endpoint = "PUT /repos/{owner}/{repo}/pages", source = list(branch = branch, path = path) ) if (is.null(cname) && !is.null(site$cname)) { # this goes out as a JSON `null`, which is necessary to clear cname args$cname <- NA } if (is_string(cname) && (is.null(site$cname) || cname != site$cname)) { args$cname <- cname } Sys.sleep(2) exec(gh, !!!args) Sys.sleep(2) site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]] } ui_done("GitHub Pages is publishing from:") if (!is.null(site$cname)) { kv_line("Custom domain", site$cname) } kv_line("URL", site$html_url) kv_line("Branch", site$source$branch) kv_line("Path", site$source$path) invisible(site) } # returns FALSE if it does NOT create the branch (because it already exists) # returns TRUE if it does create the branch create_gh_pages_branch <- function(tr, branch = "gh-pages") { gh <- gh_tr(tr) safe_gh <- purrr::safely(gh) branch_GET <- safe_gh( "GET /repos/{owner}/{repo}/branches/{branch}", branch = branch ) if (!inherits(branch_GET$error, "http_error_404")) { return(FALSE) } ui_done(" Initializing empty, orphan {ui_value(branch)} branch in GitHub repo \\ {ui_value(tr$repo_spec)}") # GitHub no longer allows you to directly create an empty tree # hence this roundabout method of getting an orphan branch with no files tree <- gh( "POST /repos/{owner}/{repo}/git/trees", tree = list(list( path = "_temp_file_ok_to_delete", mode = "100644", type = "blob", content = "" )) ) commit <- gh( "POST /repos/{owner}/{repo}/git/commits", message = "Init orphan branch", tree = tree$sha ) ref <- gh( "POST /repos/{owner}/{repo}/git/refs", ref = glue("refs/heads/{branch}"), sha = commit$sha ) # this should succeed, but if somehow it does not, it's not worth failing and # leaving pkgdown + GitHub Pages setup half-done --> why I use safe_gh() safe_gh( "DELETE /repos/{owner}/{repo}/contents/_temp_file_ok_to_delete", message = "Remove temp file", sha = purrr::pluck(tree, "tree", 1, "sha"), branch = branch ) TRUE } usethis/R/tibble.R0000644000175000017500000000274214131622147013663 0ustar nileshnilesh#' Prepare to return a tibble #' #' @description #' #' `r lifecycle::badge("questioning")` #' #' Does minimum setup such that a tibble returned by your package #' is handled using the tibble method for generics like `print()` or \code{[}. #' Presumably you care about this if you've chosen to store and expose an #' object with class `tbl_df`. Specifically: #' * Check that the active package uses roxygen2 #' * Add the tibble package to "Imports" in `DESCRIPTION` #' * Prepare the roxygen directive necessary to import at least one function #' from tibble: #' - If possible, the directive is inserted into existing package-level #' documentation, i.e. the roxygen snippet created by [use_package_doc()] #' - Otherwise, we issue advice on where the user should add the directive #' #' This is necessary when your package returns a stored data object that has #' class `tbl_df`, but the package code does not make direct use of functions #' from the tibble package. If you do nothing, the tibble namespace is not #' necessarily loaded and your tibble may therefore be printed and subsetted #' like a base `data.frame`. #' #' @export #' @examples #' \dontrun{ #' use_tibble() #' } use_tibble <- function() { check_is_package("use_tibble()") check_uses_roxygen("use_tibble()") created <- use_import_from("tibble", "tibble") ui_todo("Document a returned tibble like so:") ui_code_block("#' @return a [tibble][tibble::tibble-package]", copy = FALSE) invisible(created) } usethis/R/latest-dependencies.R0000644000175000017500000000235314117743363016350 0ustar nileshnilesh#' Use "latest" versions of all dependencies #' #' Pins minimum versions of dependencies to latest ones (as determined by `source`). #' Useful for the tidyverse package, but should otherwise be used with extreme care. #' #' @keywords internal #' @export #' @param overwrite By default (`FALSE`), only dependencies without version #' specifications will be modified. Set to `TRUE` to modify all dependencies. #' @param source Use "local" or "CRAN" package versions. use_latest_dependencies <- function(overwrite = FALSE, source = c("local", "CRAN")) { deps <- desc::desc_get_deps(proj_get()) deps <- update_versions(deps, overwrite = overwrite, source = source) desc::desc_set_deps(deps, file = proj_get()) invisible(TRUE) } update_versions <- function(deps, overwrite = FALSE, source = c("local", "CRAN")) { baserec <- base_and_recommended() to_change <- !deps$package %in% c("R", baserec) if (!overwrite) { to_change <- to_change & deps$version == "*" } packages <- deps$package[to_change] versions <- switch(match.arg(source), local = map_chr(packages, ~ as.character(utils::packageVersion(.x))), CRAN = utils::available.packages()[packages, "Version"] ) deps$version[to_change] <- paste0(">= ", versions) deps } usethis/LICENSE0000644000175000017500000000005514117743363013106 0ustar nileshnileshYEAR: 2020 COPYRIGHT HOLDER: usethis authors usethis/inst/0000755000175000017500000000000014153502006013041 5ustar nileshnileshusethis/inst/templates/0000755000175000017500000000000014154446735015060 5ustar nileshnileshusethis/inst/templates/code-cpp11.cpp0000644000175000017500000000011714117743363017413 0ustar nileshnilesh#include using namespace cpp11; [[cpp11::register]] void fun() {} usethis/inst/templates/addins.dcf0000644000175000017500000000013613676400413016767 0ustar nileshnileshName: New Addin Name Description: New Addin Description Binding: {{addin}} Interactive: false usethis/inst/templates/license-GPL-2.md0000644000175000017500000004302514117743363017603 0ustar nileshnileshGNU General Public License ========================== _Version 2, June 1991_ _Copyright © 1989, 1991 Free Software Foundation, Inc.,_ _51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ### Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: **(1)** copyright the software, and **(2)** offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. ### TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION **0.** This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The “Program”, below, refers to any such program or work, and a “work based on the Program” means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term “modification”.) Each licensee is addressed as “you”. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. **1.** You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. **2.** You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: * **a)** You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. * **b)** You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. * **c)** If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. **3.** You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: * **a)** Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, * **b)** Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, * **c)** Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. **4.** You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. **5.** You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. **6.** Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. **7.** If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. **8.** If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. **9.** The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and “any later version”, you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. **10.** If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. ### NO WARRANTY **11.** BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. **12.** IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS ### How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w` and `show c`; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a “copyright disclaimer” for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. usethis/inst/templates/tidy-eval.R0000644000175000017500000001002714131645451017070 0ustar nileshnilesh{{=<< >>=}} #' Tidy eval helpers #' #' @description #' This page lists the tidy eval tools reexported in this package from #' rlang. To learn about using tidy eval in scripts and packages at a #' high level, see the [dplyr programming #' vignette](https://dplyr.tidyverse.org/articles/programming.html) #' and the [ggplot2 in packages #' vignette](https://ggplot2.tidyverse.org/articles/ggplot2-in-packages.html). #' The [Metaprogramming #' section](https://adv-r.hadley.nz/metaprogramming.html) of [Advanced #' R](https://adv-r.hadley.nz) may also be useful for a deeper dive. #' #' * The tidy eval operators `{{`, `!!`, and `!!!` are syntactic #' constructs which are specially interpreted by tidy eval functions. #' You will mostly need `{{`, as `!!` and `!!!` are more advanced #' operators which you should not have to use in simple cases. #' #' The curly-curly operator `{{` allows you to tunnel data-variables #' passed from function arguments inside other tidy eval functions. #' `{{` is designed for individual arguments. To pass multiple #' arguments contained in dots, use `...` in the normal way. #' #' ``` #' my_function <- function(data, var, ...) { #' data %>% #' group_by(...) %>% #' summarise(mean = mean({{ var }})) #' } #' ``` #' #' * [enquo()] and [enquos()] delay the execution of one or several #' function arguments. The former returns a single expression, the #' latter returns a list of expressions. Once defused, expressions #' will no longer evaluate on their own. They must be injected back #' into an evaluation context with `!!` (for a single expression) and #' `!!!` (for a list of expressions). #' #' ``` #' my_function <- function(data, var, ...) { #' # Defuse #' var <- enquo(var) #' dots <- enquos(...) #' #' # Inject #' data %>% #' group_by(!!!dots) %>% #' summarise(mean = mean(!!var)) #' } #' ``` #' #' In this simple case, the code is equivalent to the usage of `{{` #' and `...` above. Defusing with `enquo()` or `enquos()` is only #' needed in more complex cases, for instance if you need to inspect #' or modify the expressions in some way. #' #' * The `.data` pronoun is an object that represents the current #' slice of data. If you have a variable name in a string, use the #' `.data` pronoun to subset that variable with `[[`. #' #' ``` #' my_var <- "disp" #' mtcars %>% summarise(mean = mean(.data[[my_var]])) #' ``` #' #' * Another tidy eval operator is `:=`. It makes it possible to use #' glue and curly-curly syntax on the LHS of `=`. For technical #' reasons, the R language doesn't support complex expressions on #' the left of `=`, so we use `:=` as a workaround. #' #' ``` #' my_function <- function(data, var, suffix = "foo") { #' # Use `{{` to tunnel function arguments and the usual glue #' # operator `{` to interpolate plain strings. #' data %>% #' summarise("{{ var }}_mean_{suffix}" := mean({{ var }})) #' } #' ``` #' #' * Many tidy eval functions like `dplyr::mutate()` or #' `dplyr::summarise()` give an automatic name to unnamed inputs. If #' you need to create the same sort of automatic names by yourself, #' use `as_label()`. For instance, the glue-tunnelling syntax above #' can be reproduced manually with: #' #' ``` #' my_function <- function(data, var, suffix = "foo") { #' var <- enquo(var) #' prefix <- as_label(var) #' data %>% #' summarise("{prefix}_mean_{suffix}" := mean(!!var)) #' } #' ``` #' #' Expressions defused with `enquo()` (or tunnelled with `{{`) need #' not be simple column names, they can be arbitrarily complex. #' `as_label()` handles those cases gracefully. If your code assumes #' a simple column name, use `as_name()` instead. This is safer #' because it throws an error if the input is not a name as expected. #' #' @md #' @name tidyeval #' @keywords internal #' @importFrom rlang enquo enquos .data := as_name as_label #' @aliases enquo enquos .data := as_name as_label #' @export enquo enquos .data := as_name as_label NULL <<={{ }}=>> usethis/inst/templates/license-GPL-3.md0000644000175000017500000010413014117743363017577 0ustar nileshnileshGNU General Public License ========================== _Version 3, 29 June 2007_ _Copyright © 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ## Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: **(1)** assert copyright on the software, and **(2)** offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. ## TERMS AND CONDITIONS ### 0. Definitions “This License” refers to version 3 of the GNU General Public License. “Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. “The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. A “covered work” means either the unmodified Program or a work based on the Program. To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that **(1)** displays an appropriate copyright notice, and **(2)** tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. ### 1. Source Code The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The “System Libraries” of an executable work include anything, other than the work as a whole, that **(a)** is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and **(b)** serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. ### 2. Basic Permissions All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. ### 4. Conveying Verbatim Copies You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. ### 5. Conveying Modified Source Versions You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: * **a)** The work must carry prominent notices stating that you modified it, and giving a relevant date. * **b)** The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. * **c)** You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. * **d)** If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. ### 6. Conveying Non-Source Forms You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: * **a)** Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. * **b)** Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either **(1)** a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or **(2)** access to copy the Corresponding Source from a network server at no charge. * **c)** Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. * **d)** Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. * **e)** Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A “User Product” is either **(1)** a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or **(2)** anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. “Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. ### 7. Additional Terms “Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: * **a)** Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or * **b)** Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or * **c)** Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or * **d)** Limiting the use for publicity purposes of names of licensors or authors of the material; or * **e)** Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or * **f)** Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. ### 8. Termination You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated **(a)** provisionally, unless and until the copyright holder explicitly and finally terminates your license, and **(b)** permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. ### 9. Acceptance Not Required for Having Copies You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. ### 10. Automatic Licensing of Downstream Recipients Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. ### 11. Patents A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either **(1)** cause the Corresponding Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the patent license for this particular work, or **(3)** arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license **(a)** in connection with copies of the covered work conveyed by you (or copies made from those copies), or **(b)** primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. ### 12. No Surrender of Others' Freedom If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. ### 13. Use with the GNU Affero General Public License Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. ### 14. Revised Versions of this License The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. ### 15. Disclaimer of Warranty THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. ### 16. Limitation of Liability IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. ### 17. Interpretation of Sections 15 and 16 If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. _END OF TERMS AND CONDITIONS_ ## How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. This is free software, and you are welcome to redistribute it under certain conditions; type 'show c' for details. The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”. You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see <>. The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read <>. usethis/inst/templates/lifecycle-defunct.svg0000644000175000017500000000170413737204645021166 0ustar nileshnileshlifecyclelifecycledefunctdefunct usethis/inst/templates/article.Rmd0000644000175000017500000000025113676400413017134 0ustar nileshnilesh--- title: "{{{ vignette_title }}}" --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup} library({{Package}}) ``` usethis/inst/templates/NEWS.md0000644000175000017500000000013513676400413016144 0ustar nileshnilesh# {{{ Package }}} {{{ Version }}} * Added a `NEWS.md` file to track changes to the package. usethis/inst/templates/gitlab-ci.yml0000644000175000017500000000176313676400475017444 0ustar nileshnileshimage: rocker/tidyverse stages: - build - test - deploy building: stage: build script: - R -e "remotes::install_deps(dependencies = TRUE)" - R -e 'devtools::check()' # To have the coverage percentage appear as a gitlab badge follow these # instructions: # https://docs.gitlab.com/ee/user/project/pipelines/settings.html#test-coverage-parsing # The coverage parsing string is # Coverage: \d+\.\d+ testing: stage: test allow_failure: true when: on_success only: - master script: - Rscript -e 'install.packages("DT")' - Rscript -e 'covr::gitlab(quiet = FALSE)' artifacts: paths: - public # To produce a code coverage report as a GitLab page see # https://about.gitlab.com/2016/11/03/publish-code-coverage-report-with-gitlab-pages/ pages: stage: deploy dependencies: - testing script: - ls artifacts: paths: - public expire_in: 30 days only: - master usethis/inst/templates/cran-comments.md0000644000175000017500000000012114117743363020136 0ustar nileshnilesh## R CMD check results 0 errors | 0 warnings | 1 note * This is a new release. usethis/inst/templates/lifecycle-archived.svg0000644000175000017500000000170713737204645021326 0ustar nileshnilesh lifecyclelifecyclearchivedarchived usethis/inst/templates/code.c0000644000175000017500000000007213676400413016124 0ustar nileshnilesh#define R_NO_REMAP #include #include usethis/inst/templates/template.Rproj0000644000175000017500000000056614117743363017714 0ustar nileshnileshVersion: 1.0 RestoreWorkspace: No SaveWorkspace: No AlwaysSaveHistory: Default EnableCodeIndexing: Yes Encoding: UTF-8 AutoAppendNewline: Yes StripTrailingWhitespace: Yes LineEndingConversion: {{line_ending}} {{#is_pkg}} BuildType: Package PackageUseDevtools: Yes PackageInstallArgs: --no-multiarch --with-keep.source PackageRoxygenize: rd,collate,namespace {{/is_pkg}} usethis/inst/templates/rmarkdown-template.yml0000644000175000017500000000016313676400413021407 0ustar nileshnileshname: {{{ template_name }}} description: > {{{ template_description }}} create_dir: {{{ template_create_dir }}} usethis/inst/templates/lifecycle-stable.svg0000644000175000017500000000167413737204645021016 0ustar nileshnileshlifecyclelifecyclestablestable usethis/inst/templates/packagename-data-prep.R0000644000175000017500000000014313676400475021307 0ustar nileshnilesh## code to prepare `{{{name}}}` dataset goes here usethis::use_data({{{name}}}, overwrite = TRUE) usethis/inst/templates/circleci-config.yml0000644000175000017500000000215713737204645020626 0ustar nileshnilesh{{=<% %>=}} version: 2 jobs: build: docker: - image: <% image %> environment: R_LIBS: ~/R/Library steps: - restore_cache: keys: - r-pkg-cache-{{ arch }}-{{ .Branch }} - r-pkg-cache-{{ arch }}- - checkout - run: name: Install package dependencies command: | mkdir -p ~/R/Library Rscript -e 'install.packages("remotes")' Rscript -e 'remotes::install_deps(dependencies = TRUE)' - run: name: Session information and installed package versions command: | Rscript -e 'sessionInfo()' Rscript -e 'installed.packages()[, c("Package", "Version")]' Rscript -e 'rmarkdown::pandoc_version()' - run: name: Build package command: R CMD build . - run: name: Check package command: R CMD check --as-cran --no-manual *tar.gz - store_artifacts: path: <% package %>.Rcheck/ - save_cache: key: r-pkg-cache-{{ arch }}-{{ .Branch }} paths: - "~/R/Library" usethis/inst/templates/test-example-2.1.R0000644000175000017500000000010013676400413020067 0ustar nileshnileshtest_that("multiplication works", { expect_equal(2 * 2, 4) }) usethis/inst/templates/package-README0000644000175000017500000000313014131645451017315 0ustar nileshnilesh{{#Rmd}} --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>", fig.path = "man/figures/README-", out.width = "100%" ) ``` {{/Rmd}} # {{{ Package }}} The goal of {{{ Package }}} is to ... ## Installation {{#on_github}} You can install the development version of {{{ Package }}} from [GitHub](https://github.com/) with: ``` r # install.packages("devtools") devtools::install_github("{{{ github_spec }}}") ``` {{/on_github}} {{^on_github}} You can install the development version of {{{ Package }}} like so: ``` r # FILL THIS IN! HOW CAN PEOPLE INSTALL YOUR DEV PACKAGE? ``` {{/on_github}} ## Example This is a basic example which shows you how to solve a common problem: {{#Rmd}} ```{r example} {{/Rmd}} {{^Rmd}}``` r {{/Rmd}} library({{Package}}) ## basic example code ``` {{#Rmd}} What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You could also use GitHub Actions to re-render `README.Rmd` every time you push. An example workflow can be found here: . You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub and CRAN. {{/Rmd}} usethis/inst/templates/tidy-travis.yml0000644000175000017500000000040313676400475020056 0ustar nileshnilesh# R for travis: see documentation at https://docs.travis-ci.com/user/languages/r language: R cache: packages matrix: include: - r: devel - r: release after_success: - Rscript -e 'covr::codecov()' - r: oldrel - r: 3.4 - r: 3.3 - r: 3.2 usethis/inst/templates/rmarkdown-template.Rmd0000644000175000017500000000120613676400413021327 0ustar nileshnilesh--- title: "Template Title" author: "Your Name" date: "The Date" output: output_format --- ```{r setup, include=FALSE} knitr::opts_chunk$set(echo = TRUE) ``` ## Adding an RMarkdown Template This file is what a user will see when they select your template. Make sure that you update the fields in the yaml header. In particular you will want to update the `output` field to whatever format your template requires. This is a good place to demonstrate special features that your template provides. Ideally it should knit out-of-the-box, or at least contain clear instructions as to what needs changing. Finally, be sure to remove this message! usethis/inst/templates/license-AGPL-3.md0000644000175000017500000010277713737204645017721 0ustar nileshnileshGNU Affero General Public License ================================= _Version 3, 19 November 2007_ _Copyright (C) 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ## Preamble The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. The precise terms and conditions for copying, distribution and modification follow. ## TERMS AND CONDITIONS ### 0. Definitions. "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. ### 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. ### 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. ### 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. ### 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: - a) The work must carry prominent notices stating that you modified it, and giving a relevant date. - b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". - c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. - d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. ### 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: - a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. - b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. - c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. - d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. - e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. ### 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: - a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or - b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or - c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or - d) Limiting the use for publicity purposes of names of licensors or authors of the material; or - e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or - f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. ### 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. ### 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. ### 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. ### 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. ### 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. ### 13. Remote Network Interaction; Use with the GNU General Public License. Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. ### 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. ### 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. ### 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. ### 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS ## How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . usethis/inst/templates/tutorial-template.Rmd0000644000175000017500000000352013676400413021167 0ustar nileshnilesh--- title: "{{{ tutorial_title }}}" output: learnr::tutorial runtime: shiny_prerendered --- ```{r setup, include=FALSE} library(learnr) knitr::opts_chunk$set(echo = FALSE) ``` ## Topic 1 ### Exercise *Here's a simple exercise with an empty code chunk provided for entering the answer.* Write the R code required to add two plus two: ```{r two-plus-two, exercise=TRUE} ``` ### Exercise with Code *Here's an exercise with some prepopulated code as well as `exercise.lines = 5` to provide a bit more initial room to work.* Now write a function that adds any two numbers and then call it: ```{r add-function, exercise=TRUE, exercise.lines = 5} add <- function() { } ``` ## Topic 2 ### Exercise with Hint *Here's an exercise where the chunk is pre-evaulated via the `exercise.eval` option (so the user can see the default output we'd like them to customize). We also add a "hint" to the correct solution via the chunk immediate below labeled `print-limit-hint`.* Modify the following code to limit the number of rows printed to 5: ```{r print-limit, exercise=TRUE, exercise.eval=TRUE} mtcars ``` ```{r print-limit-hint} head(mtcars) ``` ### Quiz *You can include any number of single or multiple choice questions as a quiz. Use the `question` function to define a question and the `quiz` function for grouping multiple questions together.* Some questions to verify that you understand the purposes of various base and recommended R packages: ```{r quiz} quiz( question("Which package contains functions for installing other R packages?", answer("base"), answer("tools"), answer("utils", correct = TRUE), answer("codetools") ), question("Which of the R packages listed below are used to create plots?", answer("lattice", correct = TRUE), answer("tools"), answer("stats"), answer("grid", correct = TRUE) ) ) ``` usethis/inst/templates/license-cc0.md0000644000175000017500000001546613676400413017472 0ustar nileshnilesh## creative commons # CC0 1.0 Universal CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER. ### Statement of Purpose The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 1. __Copyright and Related Rights.__ A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; ii. moral rights retained by the original author(s) and/or performer(s); iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; v. rights protecting the extraction, dissemination, use and reuse of data in a Work; vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 2. __Waiver.__ To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 3. __Public License Fallback.__ Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 4. __Limitations and Disclaimers.__ a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. usethis/inst/templates/pipe.R0000644000175000017500000000055314117743363016137 0ustar nileshnilesh#' Pipe operator #' #' See \code{magrittr::\link[magrittr:pipe]{\%>\%}} for details. #' #' @name %>% #' @rdname pipe #' @keywords internal #' @export #' @importFrom magrittr %>% #' @usage lhs \%>\% rhs #' @param lhs A value or the magrittr placeholder. #' @param rhs A function call using the magrittr semantics. #' @return The result of calling `rhs(lhs)`. NULL usethis/inst/templates/lifecycle-questioning.svg0000644000175000017500000000171413737204645022104 0ustar nileshnileshlifecyclelifecyclequestioningquestioning usethis/inst/templates/project-README0000644000175000017500000000170514131645451017376 0ustar nileshnilesh{{#Rmd}} --- output: github_document --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` {{/Rmd}} # {{{ Project }}} The goal of {{{ Project }}} is to ... {{#Rmd}} What is special about using `README.Rmd` instead of just `README.md`? You can include R chunks like so: ```{r cars} summary(cars) ``` You'll still need to render `README.Rmd` regularly, to keep `README.md` up-to-date. `devtools::build_readme()` is handy for this. You could also use GitHub Actions to re-render `README.Rmd` every time you push. An example workflow can be found here: . You can also embed plots, for example: ```{r pressure, echo = FALSE} plot(pressure) ``` In that case, don't forget to commit and push the resulting figure files, so they display on GitHub. {{/Rmd}} usethis/inst/templates/packagename-package.R0000644000175000017500000000013514117743363021023 0ustar nileshnilesh#' @keywords internal "_PACKAGE" ## usethis namespace: start ## usethis namespace: end NULL usethis/inst/templates/lifecycle-experimental.svg0000644000175000017500000000171613737204645022236 0ustar nileshnileshlifecyclelifecycleexperimentalexperimental usethis/inst/templates/Makefile0000644000175000017500000000144014154446740016513 0ustar nileshnilesh# h/t to @jimhester and @yihui for this parse block: # https://github.com/yihui/knitr/blob/dc5ead7bcfc0ebd2789fe99c527c7d91afb3de4a/Makefile#L1-L4 # Note the portability change as suggested in the manual: # https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Writing-portable-packages PKGNAME = `sed -n "s/Package: *\([^ ]*\)/\1/p" DESCRIPTION` PKGVERS = `sed -n "s/Version: *\([^ ]*\)/\1/p" DESCRIPTION` all: check build: install_deps R CMD build . check: build R CMD check --no-manual $(PKGNAME)_$(PKGVERS).tar.gz install_deps: Rscript \ -e 'if (!requireNamespace("remotes")) install.packages("remotes")' \ -e 'remotes::install_deps(dependencies = TRUE)' install: build R CMD INSTALL $(PKGNAME)_$(PKGVERS).tar.gz clean: @rm -rf $(PKGNAME)_$(PKGVERS).tar.gz $(PKGNAME).Rcheck usethis/inst/templates/testthat.R0000644000175000017500000000010413676400413017025 0ustar nileshnileshlibrary(testthat) library({{{ name }}}) test_check("{{{ name }}}") usethis/inst/templates/junit-testthat.R0000644000175000017500000000040113737204645020163 0ustar nileshnileshlibrary(testthat) library({{{ name }}}) if (requireNamespace("xml2")) { test_check("{{{ name }}}", reporter = MultiReporter$new(reporters = list(JunitReporter$new(file = "test-results.xml"), CheckReporter$new()))) } else { test_check("{{{ name }}}") } usethis/inst/templates/lifecycle-maturing.svg0000644000175000017500000000170613737204645021366 0ustar nileshnileshlifecyclelifecyclematuringmaturing usethis/inst/templates/license-LGPL-3.md0000644000175000017500000001661014117743363017720 0ustar nileshnileshGNU Lesser General Public License ================================= _Version 3, 29 June 2007_ _Copyright © 2007 Free Software Foundation, Inc. <>_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. ### 0. Additional Definitions As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License. “The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”. The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. ### 1. Exception to Section 3 of the GNU GPL You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. ### 2. Conveying Modified Versions If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: * **a)** under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or * **b)** under the GNU GPL, with none of the additional permissions of this License applicable to that copy. ### 3. Object Code Incorporating Material from Library Header Files The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: * **a)** Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. * **b)** Accompany the object code with a copy of the GNU GPL and this license document. ### 4. Combined Works You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: * **a)** Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. * **b)** Accompany the Combined Work with a copy of the GNU GPL and this license document. * **c)** For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. * **d)** Do one of the following: - **0)** Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. - **1)** Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that **(a)** uses at run time a copy of the Library already present on the user's computer system, and **(b)** will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. * **e)** Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option **4d0**, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option **4d1**, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) ### 5. Combined Libraries You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: * **a)** Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. * **b)** Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. ### 6. Revised Versions of the GNU Lesser General Public License The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. usethis/inst/templates/tidy-contributing.md0000644000175000017500000000546214131622147021053 0ustar nileshnilesh# Contributing to {{{ Package }}} This outlines how to propose a change to {{{ Package }}}. For more detailed info about contributing to this, and other tidyverse packages, please see the [**development contributing guide**](https://rstd.io/tidy-contrib). ## Fixing typos You can fix typos, spelling mistakes, or grammatical errors in the documentation directly using the GitHub web interface, as long as the changes are made in the _source_ file. This generally means you'll need to edit [roxygen2 comments](https://roxygen2.r-lib.org/articles/roxygen2.html) in an `.R`, not a `.Rd` file. You can find the `.R` file that generates the `.Rd` by reading the comment in the first line. ## Bigger changes If you want to make a bigger change, it's a good idea to first file an issue and make sure someone from the team agrees that it’s needed. If you’ve found a bug, please file an issue that illustrates the bug with a minimal [reprex](https://www.tidyverse.org/help/#reprex) (this will also help you write a unit test, if needed). ### Pull request process * Fork the package and clone onto your computer. If you haven't done this before, we recommend using `usethis::create_from_github("{{github_spec}}", fork = TRUE)`. * Install all development dependencies with `devtools::install_dev_deps()`, and then make sure the package passes R CMD check by running `devtools::check()`. If R CMD check doesn't pass cleanly, it's a good idea to ask for help before continuing. * Create a Git branch for your pull request (PR). We recommend using `usethis::pr_init("brief-description-of-change")`. * Make your changes, commit to git, and then create a PR by running `usethis::pr_push()`, and following the prompts in your browser. The title of your PR should briefly describe the change. The body of your PR should contain `Fixes #issue-number`. * For user-facing changes, add a bullet to the top of `NEWS.md` (i.e. just below the first header). Follow the style described in . ### Code style * New code should follow the tidyverse [style guide](https://style.tidyverse.org). You can use the [styler](https://CRAN.R-project.org/package=styler) package to apply these styles, but please don't restyle code that has nothing to do with your PR. * We use [roxygen2](https://cran.r-project.org/package=roxygen2), with [Markdown syntax](https://cran.r-project.org/web/packages/roxygen2/vignettes/rd-formatting.html), for documentation. * We use [testthat](https://cran.r-project.org/package=testthat) for unit tests. Contributions with test cases included are easier to accept. ## Code of Conduct Please note that the {{{ Package }}} project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By contributing to this project you agree to abide by its terms. usethis/inst/templates/year-copyright.txt0000644000175000017500000000007214117743363020562 0ustar nileshnileshYEAR: {{{year}}} COPYRIGHT HOLDER: {{{copyright_holder}}} usethis/inst/templates/lifecycle-superseded.svg0000644000175000017500000000171313737204645021701 0ustar nileshnilesh lifecyclelifecyclesupersededsuperseded usethis/inst/templates/tidy-issue.md0000644000175000017500000000107413737204645017501 0ustar nileshnilesh--- name: Bug report or feature request about: Describe a bug you've seen or make a case for a new feature --- Please briefly describe your problem and what output you expect. If you have a question, please don't use this form. Instead, ask on or . Please include a minimal reproducible example (AKA a reprex). If you've never heard of a [reprex](http://reprex.tidyverse.org/) before, start by reading . Brief description of the problem ```r # insert reprex here ``` usethis/inst/templates/tidy-support.md0000644000175000017500000000467313737204645020075 0ustar nileshnilesh# Getting help with {{{Package}}} Thanks for using {{{Package}}}! Before filing an issue, there are a few places to explore and pieces to put together to make the process as smooth as possible. ## Make a reprex Start by making a minimal **repr**oducible **ex**ample using the [reprex](https://reprex.tidyverse.org/) package. If you haven't heard of or used reprex before, you're in for a treat! Seriously, reprex will make all of your R-question-asking endeavors easier (which is a pretty insane ROI for the five to ten minutes it'll take you to learn what it's all about). For additional reprex pointers, check out the [Get help!](https://www.tidyverse.org/help/) section of the tidyverse site. ## Where to ask? Armed with your reprex, the next step is to figure out [where to ask](https://www.tidyverse.org/help/#where-to-ask). * If it's a question: start with [community.rstudio.com](https://community.rstudio.com/), and/or StackOverflow. There are more people there to answer questions. * If it's a bug: you're in the right place, [file an issue](https://github.com/{{github_spec}}/issues/new). * If you're not sure: let the community help you figure it out! If your problem _is_ a bug or a feature request, you can easily return here and report it. Before opening a new issue, be sure to [search issues and pull requests](https://github.com/{{github_spec}}/issues) to make sure the bug hasn't been reported and/or already fixed in the development version. By default, the search will be pre-populated with `is:issue is:open`. You can [edit the qualifiers](https://help.github.com/articles/searching-issues-and-pull-requests/) (e.g. `is:pr`, `is:closed`) as needed. For example, you'd simply remove `is:open` to search _all_ issues in the repo, open or closed. ## What happens next? To be as efficient as possible, development of tidyverse packages tends to be very bursty, so you shouldn't worry if you don't get an immediate response. Typically we don't look at a repo until a sufficient quantity of issues accumulates, then there’s a burst of intense activity as we focus our efforts. That makes development more efficient because it avoids expensive context switching between problems, at the cost of taking longer to get back to you. This process makes a good reprex particularly important because it might be multiple months between your initial report and when we start working on it. If we can’t reproduce the bug, we can’t fix it! usethis/inst/templates/vignette.Rmd0000644000175000017500000000051413676400413017340 0ustar nileshnilesh--- title: "{{{ vignette_title }}}" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{{{ braced_vignette_title }}} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup} library({{Package}}) ``` usethis/inst/templates/license-proprietary.txt0000644000175000017500000000010214117743363021606 0ustar nileshnileshCopyright {{{year}}} {{{copyright_holder}}}. All rights reserved. usethis/inst/templates/license-LGPL-2.1.md0000644000175000017500000006331214117743363020057 0ustar nileshnileshGNU Lesser General Public License ================================= _Version 2.1, February 1999_ _Copyright © 1991, 1999 Free Software Foundation, Inc._ _51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA_ Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. _This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1._ ### Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: **(1)** we copyright the library, and **(2)** we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the “Lesser” General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a “work based on the library” and a “work that uses the library”. The former contains code derived from the library, whereas the latter must be combined with the library in order to run. ### TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION **0.** This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called “this License”). Each licensee is addressed as “you”. A “library” means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The “Library”, below, refers to any such software library or work which has been distributed under these terms. A “work based on the Library” means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term “modification”.) “Source code” for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. **1.** You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. **2.** You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: * **a)** The modified work must itself be a software library. * **b)** You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. * **c)** You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. * **d)** If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. **3.** You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. **4.** You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. **5.** A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a “work that uses the Library”. Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a “work that uses the Library” with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a “work that uses the library”. The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a “work that uses the Library” uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. **6.** As an exception to the Sections above, you may also combine or link a “work that uses the Library” with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: * **a)** Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable “work that uses the Library”, as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) * **b)** Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. * **c)** Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. * **d)** If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. * **e)** Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the “work that uses the Library” must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. **7.** You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: * **a)** Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. * **b)** Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. **8.** You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. **9.** You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. **10.** Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. **11.** If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. **12.** If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. **13.** The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and “any later version”, you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. **14.** If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. ### NO WARRANTY **15.** BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. **16.** IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. _END OF TERMS AND CONDITIONS_ ### How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a “copyright disclaimer” for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! usethis/inst/templates/code.cpp0000644000175000017500000000005013676400413016460 0ustar nileshnilesh#include using namespace Rcpp; usethis/inst/templates/license-apache-2.md0000644000175000017500000002436014117743363020403 0ustar nileshnileshApache License ============== _Version 2.0, January 2004_ _<>_ ### Terms and Conditions for use, reproduction, and distribution #### 1. Definitions “License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. “Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. “Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means **(i)** the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the outstanding shares, or **(iii)** beneficial ownership of such entity. “You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. “Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. “Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. “Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). “Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. “Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” “Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. #### 2. Grant of Copyright License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. #### 3. Grant of Patent License Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. #### 4. Redistribution You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: * **(a)** You must give any other recipients of the Work or Derivative Works a copy of this License; and * **(b)** You must cause any modified files to carry prominent notices stating that You changed the files; and * **(c)** You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and * **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. #### 5. Submission of Contributions Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. #### 6. Trademarks This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. #### 7. Disclaimer of Warranty Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. #### 8. Limitation of Liability In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. #### 9. Accepting Warranty or Additional Liability While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. _END OF TERMS AND CONDITIONS_ ### APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets `[]` replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same “printed page” as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. usethis/inst/templates/appveyor.yml0000644000175000017500000000207513676400413017443 0ustar nileshnilesh# DO NOT CHANGE the "init" and "install" sections below # Download script file from GitHub init: ps: | $ErrorActionPreference = "Stop" Invoke-WebRequest http://raw.github.com/krlmlr/r-appveyor/master/scripts/appveyor-tool.ps1 -OutFile "..\appveyor-tool.ps1" Import-Module '..\appveyor-tool.ps1' install: ps: Bootstrap cache: - C:\RLibrary environment: NOT_CRAN: true # env vars that may need to be set, at least temporarily, from time to time # see https://github.com/krlmlr/r-appveyor#readme for details # USE_RTOOLS: true # R_REMOTES_STANDALONE: true # Adapt as necessary starting from here build_script: - travis-tool.sh install_deps test_script: - travis-tool.sh run_tests on_failure: - 7z a failure.zip *.Rcheck\* - appveyor PushArtifact failure.zip artifacts: - path: '*.Rcheck\**\*.log' name: Logs - path: '*.Rcheck\**\*.out' name: Logs - path: '*.Rcheck\**\*.fail' name: Logs - path: '*.Rcheck\**\*.Rout' name: Logs - path: '\*_*.tar.gz' name: Bits - path: '\*_*.zip' name: Bits usethis/inst/templates/citation-template.R0000644000175000017500000000035713676400413020622 0ustar nileshnileshcitHeader("To cite {{Package}} in publications use:") citEntry( entry = "Article", title = , author = , journal = , year = , volume = , number = , pages = , url = , textVersion = paste( ) ) usethis/inst/templates/travis.yml0000644000175000017500000000015613676400413017104 0ustar nileshnilesh# R for travis: see documentation at https://docs.travis-ci.com/user/languages/r language: R cache: packages usethis/inst/templates/Jenkinsfile0000644000175000017500000000043413676400475017244 0ustar nileshnileshpipeline { agent any { stages { stage('Build') { steps { make build } } stage('Check') { steps { make check } } stage('Clean') { steps { make clean } } } } } usethis/inst/templates/revdep-email.yml0000644000175000017500000000014313676400413020142 0ustar nileshnileshrelease_date: ??? rel_release_date: ??? my_news_url: ??? release_version: ??? release_details: ??? usethis/inst/templates/license-mit.md0000644000175000017500000000207714117743363017615 0ustar nileshnilesh# MIT License Copyright (c) {{{year}}} {{{copyright_holder}}} Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. usethis/inst/templates/lifecycle-deprecated.svg0000644000175000017500000000171213737204645021635 0ustar nileshnileshlifecyclelifecycledeprecateddeprecated usethis/inst/templates/readme-rmd-pre-commit.sh0000644000175000017500000000064713676400413021501 0ustar nileshnilesh#!/bin/bash README=($(git diff --cached --name-only | grep -Ei '^README\.[R]?md$')) MSG="use 'git commit --no-verify' to override this check" if [[ ${#README[@]} == 0 ]]; then exit 0 fi if [[ README.Rmd -nt README.md ]]; then echo -e "README.md is out of date; please re-knit README.Rmd\n$MSG" exit 1 elif [[ ${#README[@]} -lt 2 ]]; then echo -e "README.Rmd and README.md should be both staged\n$MSG" exit 1 fi usethis/inst/templates/CODE_OF_CONDUCT.md0000644000175000017500000001213614117743363017656 0ustar nileshnilesh# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at {{{ contact }}}. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at . Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at . Translations are available at . usethis/inst/templates/license-ccby-4.md0000644000175000017500000004433413676400413020102 0ustar nileshnileshAttribution 4.0 International ======================================================================= Creative Commons Corporation ("Creative Commons") is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an "as-is" basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible. Using Creative Commons Public Licenses Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses. Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC- licensed material, or material used under an exception or limitation to copyright. More considerations for licensors: wiki.creativecommons.org/Considerations_for_licensors Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor's permission is not necessary for any reason--for example, because of any applicable exception or limitation to copyright--then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public: wiki.creativecommons.org/Considerations_for_licensees ======================================================================= Creative Commons Attribution 4.0 International Public License By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. Section 1 -- Definitions. a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. c. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. d. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. e. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. f. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. g. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. h. Licensor means the individual(s) or entity(ies) granting rights under this Public License. i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. Section 2 -- Scope. a. License grant. 1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: a. reproduce and Share the Licensed Material, in whole or in part; and b. produce, reproduce, and Share Adapted Material. 2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. 3. Term. The term of this Public License is specified in Section 6(a). 4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a) (4) never produces Adapted Material. 5. Downstream recipients. a. Offer from the Licensor -- Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. b. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. 6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). b. Other rights. 1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. 2. Patent and trademark rights are not licensed under this Public License. 3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. Section 3 -- License Conditions. Your exercise of the Licensed Rights is expressly made subject to the following conditions. a. Attribution. 1. If You Share the Licensed Material (including in modified form), You must: a. retain the following if it is supplied by the Licensor with the Licensed Material: i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); ii. a copyright notice; iii. a notice that refers to this Public License; iv. a notice that refers to the disclaimer of warranties; v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; b. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and c. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. 2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. 3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. 4. If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License. Section 4 -- Sui Generis Database Rights. Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. Section 5 -- Disclaimer of Warranties and Limitation of Liability. a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. Section 6 -- Term and Termination. a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: 1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or 2. upon express reinstatement by the Licensor. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. c. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. Section 7 -- Other Terms and Conditions. a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. Section 8 -- Interpretation. a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. ======================================================================= Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.” The text of the Creative Commons public licenses is dedicated to the public domain under the CC0 Public Domain Dedication. Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark "Creative Commons" or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses. Creative Commons may be contacted at creativecommons.org. usethis/inst/templates/codecov.yml0000644000175000017500000000035013737204645017221 0ustar nileshnileshcomment: false coverage: status: project: default: target: auto threshold: 1% informational: true patch: default: target: auto threshold: 1% informational: true usethis/inst/WORDLIST0000644000175000017500000000270014153502006014232 0ustar nileshnileshAGPL Addin AppVeyor BioConductor BugReports CCBY CLA CMD CNAME CRAN's CircleCI CoC Codecov Colours Depsy DockerHub DropBox GHA GHE Gert Gert's Git's GitLab Gitignores HTTPS Homebrew Initialise Jenkinsfile Jupyter Keychain LF LGL LastPass Lifecycle LinkingTo Makefile ORCID PATs PRs README REPO RMarkdown RProfile RStudio's Rcpp RcppArmadillo RcppEigen Repo Rmd Rmds Roxygen Rtools SHA Sitrep TLS TeXLive Templated Tidyverse UI Ushey's Xcode YAML addin addins alphabetises api applypatch artefacts aspirationally auth authenticator backports behaviour bzip ci cli clipr clisymbols codecov colour colours config convergently cpp cran customised dev devtools discoverable else's emacs eval favour fiascos filenaming formidabel frontmatter fs funder gc gert gh gh's gitcreds github gitignore gzip hexb href http https httr ing initialisation initialises initialising io jsonlite labelled labelling learnr libgit lifecycle macOS macbook magrittr magrittr's md mergeable minimise msg nano noninteractive organisation organisations ort pak pandoc pkgdown pre programmatically purrr r's rOpenSci rappdirs rcmdcheck rebase reconfigures redirections repo repo's repos reprex reproducibility revdep revdepcheck revdepchecks rladies rlang rmarkdown ropensci roxygen rprojroot shortlink shortlinks signalling sitrep src styler symlink symlinks templated templating testthat tibble tidymodels tidyverse todo travis triaged uation un unpushed useR usethis's ver vm withr www xyz xz yaml usethis/NAMESPACE0000644000175000017500000001166314154446667013337 0ustar nileshnilesh# Generated by roxygen2: do not edit by hand S3method(format,github_remote_config) S3method(print,github_remote_config) S3method(print,sitrep) export(browse_circleci) export(browse_cran) export(browse_github) export(browse_github_actions) export(browse_github_issues) export(browse_github_pat) export(browse_github_pulls) export(browse_github_token) export(browse_package) export(browse_project) export(browse_travis) export(create_download_url) export(create_from_github) export(create_github_token) export(create_package) export(create_project) export(create_tidy_package) export(edit_file) export(edit_git_config) export(edit_git_ignore) export(edit_pkgdown_config) export(edit_r_buildignore) export(edit_r_environ) export(edit_r_makevars) export(edit_r_profile) export(edit_rstudio_prefs) export(edit_rstudio_snippets) export(edit_template) export(gh_token_help) export(git_branch_default) export(git_credentials) export(git_default_branch) export(git_default_branch_configure) export(git_default_branch_rediscover) export(git_default_branch_rename) export(git_protocol) export(git_remotes) export(git_sitrep) export(git_vaccinate) export(github_token) export(issue_close_community) export(issue_reprex_needed) export(local_project) export(pr_fetch) export(pr_finish) export(pr_forget) export(pr_init) export(pr_merge_main) export(pr_pause) export(pr_pull) export(pr_pull_upstream) export(pr_push) export(pr_resume) export(pr_sync) export(pr_view) export(proj_activate) export(proj_get) export(proj_path) export(proj_set) export(proj_sitrep) export(rename_files) export(tidy_label_colours) export(tidy_label_descriptions) export(tidy_labels) export(tidy_labels_rename) export(ui_code) export(ui_code_block) export(ui_done) export(ui_field) export(ui_info) export(ui_line) export(ui_nope) export(ui_oops) export(ui_path) export(ui_silence) export(ui_stop) export(ui_todo) export(ui_unset) export(ui_value) export(ui_warn) export(ui_yeah) export(use_addin) export(use_agpl3_license) export(use_agpl_license) export(use_apache_license) export(use_apl2_license) export(use_appveyor) export(use_appveyor_badge) export(use_article) export(use_badge) export(use_binder_badge) export(use_bioc_badge) export(use_blank_slate) export(use_build_ignore) export(use_c) export(use_cc0_license) export(use_ccby_license) export(use_circleci) export(use_circleci_badge) export(use_citation) export(use_code_of_conduct) export(use_conflicted) export(use_course) export(use_coverage) export(use_covr_ignore) export(use_cpp11) export(use_cran_badge) export(use_cran_comments) export(use_data) export(use_data_raw) export(use_data_table) export(use_description) export(use_description_defaults) export(use_dev_package) export(use_dev_version) export(use_devtools) export(use_directory) export(use_git) export(use_git_config) export(use_git_credentials) export(use_git_hook) export(use_git_ignore) export(use_git_protocol) export(use_git_remote) export(use_github) export(use_github_action) export(use_github_action_check_full) export(use_github_action_check_release) export(use_github_action_check_standard) export(use_github_action_pr_commands) export(use_github_actions) export(use_github_actions_badge) export(use_github_file) export(use_github_labels) export(use_github_links) export(use_github_pages) export(use_github_release) export(use_gitlab_ci) export(use_gpl3_license) export(use_gpl_license) export(use_import_from) export(use_jenkins) export(use_latest_dependencies) export(use_lgpl_license) export(use_lifecycle) export(use_lifecycle_badge) export(use_logo) export(use_make) export(use_mit_license) export(use_namespace) export(use_news_md) export(use_package) export(use_package_doc) export(use_partial_warnings) export(use_pipe) export(use_pkgdown) export(use_pkgdown_github_pages) export(use_pkgdown_travis) export(use_proprietary_license) export(use_r) export(use_rcpp) export(use_rcpp_armadillo) export(use_rcpp_eigen) export(use_readme_md) export(use_readme_rmd) export(use_release_issue) export(use_reprex) export(use_revdep) export(use_rmarkdown_template) export(use_roxygen_md) export(use_rstudio) export(use_spell_check) export(use_template) export(use_test) export(use_testthat) export(use_tibble) export(use_tidy_ci) export(use_tidy_coc) export(use_tidy_contributing) export(use_tidy_dependencies) export(use_tidy_description) export(use_tidy_eval) export(use_tidy_github) export(use_tidy_github_actions) export(use_tidy_github_labels) export(use_tidy_issue_template) export(use_tidy_labels) export(use_tidy_style) export(use_tidy_support) export(use_tidy_thanks) export(use_tidy_upkeep_issue) export(use_travis) export(use_travis_badge) export(use_tutorial) export(use_usethis) export(use_version) export(use_vignette) export(use_zip) export(with_project) export(write_over) export(write_union) import(fs) import(rlang) importFrom(glue,glue) importFrom(glue,glue_collapse) importFrom(glue,glue_data) importFrom(lifecycle,deprecated) importFrom(purrr,map) importFrom(purrr,map_chr) importFrom(purrr,map_int) importFrom(purrr,map_lgl)